2018-10-24
¶Agglomerative clustering, test_grammar
updated 2018-10-19, Link Grammar 5.4.4; server 94.130.238.118
.
poc-turtle-parses-gold.txt
updated 2018-10-24.
This notebook is shared as POC-Turtle-2018-10-24.html.
The "All tests" table is shared as 'short_table.txt' in POC-Turtle-2018-10-24 folder.
Previous (reference) tests:
POC-Turtle-2018-10-21.html,
POC-Turtle-2018-08-05.html.
import os, sys, time
module_path = os.path.abspath(os.path.join('..'))
if module_path not in sys.path: sys.path.append(module_path)
from src.grammar_learner.utl import UTC
from src.grammar_learner.read_files import check_dir
from src.grammar_learner.write_files import list2file
from src.grammar_learner.widgets import html_table
from src.grammar_learner.pqa_table import table_rows
tmpath = module_path + '/tmp/'
check_dir(tmpath, True, 'none')
table = []
start = time.time()
print(UTC(), ':: module_path =', module_path)
corpus = 'POC-Turtle'
out_dir = module_path + '/output/'+ corpus + '-' + str(UTC())[:10]
runs = (1,1) # (attempts to learn grammar per line, grammar tests per attempt)
if runs != (1,1): out_dir += '-multi'
kwargs = {
'left_wall' : '' ,
'period' : False ,
'context' : 1 ,
'word_space' : 'vectors' ,
'clustering' : ('kmeans', 'kmeans++', 10),
'cluster_range' : (2,9,1,5),
'grammar_rules' : 1 ,
'tmpath' : tmpath ,
'template_path' : 'poc-turtle',
'linkage_limit' : 1000 ,
'verbose' : 'min' }
lines = [
#[5, 'POC-Turtle' , 'MST-fixed-2018-05-05' , 0 , 0 , 'none' ],
[5, 'POC-Turtle' , 'MST-fixed-manually' , 0 , 0 , 'none' ],
[6, 'POC-Turtle' , 'R=6-Weight=6:R-mst-weight=+1:R' , 0 , 0 , 'none' ],
[7, 'POC-Turtle' , 'R=6-Weight=1-no-mst-weighting' , 0 , 0 , 'none' ],
[8, 'POC-Turtle' , 'LG-ANY-all-parses' , 0 , 0 , 'none' ]]
# cp,rp :: (test) corpus_path and reference_path:
# cp = module_path + '/data/POC-Turtle/poc-turtle-corpus.txt'
# rp = module_path + '/data/POC-Turtle/MST-fixed-2018-05-05/poc-turtle-parses-gold.txt'
rp = module_path + '/data/POC-Turtle/MST-fixed-manually/poc-turtle-parses-gold.txt'
cp = rp
%%capture
kwargs['context'] = 1
kwargs['grammar_rules'] = 1
average21, _, header = table_rows(lines, out_dir, cp, rp, runs, **kwargs)
table.extend(average21)
display(html_table([header]+average21))
%%capture
kwargs['context'] = 1
kwargs['grammar_rules'] = 2
average22, _, header = table_rows(lines, out_dir, cp, rp, runs, **kwargs)
table.extend(average22)
display(html_table([header]+average22))
%%capture
kwargs['context'] = 2
kwargs['grammar_rules'] = 2
average23, _, header = table_rows(lines, out_dir, cp, rp, runs, **kwargs)
table.extend(average23)
display(html_table([header]+average23))
%%capture
kwargs['context'] = 2
kwargs['word_space'] = 'discrete'
kwargs['clustering'] = 'group'
kwargs['grammar_rules'] = 2
average24, _, header = table_rows(lines, out_dir, cp, rp, runs, **kwargs)
table.extend(average24)
display(html_table([header]+average24))
%%capture
kwargs['context'] = 2
kwargs['cluster_range'] = (2,12,1,1)
kwargs['word_space'] = 'sparse'
kwargs['clustering'] = ('agglomerative', 'ward')
kwargs['clustering_metric'] = ('silhouette', 'cosine')
kwargs['grammar_rules'] = 2
average25, _, header = table_rows(lines, out_dir, cp, rp, runs, **kwargs)
table.extend(average25)
display(html_table([header]+average25))
%%capture
kwargs['context'] = 1
kwargs['grammar_rules'] = 2
average26, _, header = table_rows(lines, out_dir, cp, rp, runs, **kwargs)
table.extend(average26)
display(html_table([header]+average26))
display(html_table([header] + table))
table_str = list2file(table, out_dir+'/all_tests_table.txt')
print(UTC(), ':: finished, elapsed', str(round((time.time()-start))),
'seconds. Results saved to\n', out_dir + '/all_tests_table.txt')