2019-02-17
MSL=5 beta
¶LG-E-clean corpus, ALE clustering, 2000/1000/500/50/20 clusters, server 94.130.238.118
trash filter off, min_word_count = 1
, max_sentence_length' = 5
This notebook is shared as static GCB-LG-E-clean-ALE-MWC=1-MSL=5-2019-02-17.html.
Output data shared via GCB-LG-E-clean-ALE-MWC=1-MSL=5-2019-02-17 directory.
import os, sys, time
module_path = os.path.abspath(os.path.join('..'))
if module_path not in sys.path: sys.path.append(module_path)
from src.grammar_learner.utl import UTC, test_stats
from src.grammar_learner.read_files import check_dir, check_corpus
from src.grammar_learner.write_files import list2file
from src.grammar_learner.widgets import html_table
from src.grammar_learner.pqa_table import table_rows, params, wide_rows
tmpath = module_path + '/tmp/'
check_dir(tmpath, True, 'none')
start = time.time()
runs = (1,1)
print(UTC(), ':: module_path:', module_path)
corpus = 'GCB' # 'Gutenberg-Children-Books-Caps'
dataset = 'LG-E-clean'
kwargs = {
'max_sentence_length' : 5 ,
'max_unparsed_words' : 0 ,
'left_wall' : '' ,
'period' : False ,
'context' : 1 ,
'min_word_count': 1 ,
'word_space' : 'sparse' ,
'clustering' : ['agglomerative', 'ward'],
'clustering_metric' : ['silhouette', 'cosine'],
'cluster_range' : 2000 , # 2000/1000/500/50/20
'top_level' : 0.01 ,
'grammar_rules' : 2 ,
'max_disjuncts' : 1000000 , # off
'stop_words' : [] ,
'tmpath' : tmpath ,
'verbose' : 'log+' ,
'template_path' : 'poc-turtle',
'linkage_limit' : 1000 }
rp = module_path + '/data/' + corpus + '/LG-E-clean/GCB-LG-English-clean.ull'
cp = rp # corpus path = reference_path
runs = (1,1)
out_dir = module_path + '/output/' + 'GCB-LG-E-clean-MWC=1-MSL=5-' + str(UTC())[:10]
print(UTC(), '\n', out_dir)
min_word_count = 1
; 2000/1000/500/50/20 clusters¶%%capture
table = []
kwargs['cluster_range'] = 2000
line = [['ALE2000', corpus, dataset, 0, 0, 'none']]
a, _, header, log, rules = wide_rows(line, out_dir, cp, rp, runs, **kwargs)
header[0] = 'Cell'
table.extend(a)
display(html_table([header] + a)); print(test_stats(log))
%%capture
kwargs['cluster_range'] = 1000
line = [['ALE1000', corpus, dataset, 0, 0, 'none']]
a, _, h, log, rules = wide_rows(line, out_dir, cp, rp, runs, **kwargs)
table.extend(a)
display(html_table([header] + a)); print(test_stats(log))
%%capture
kwargs['cluster_range'] = 500
line = [['ALE500', corpus, dataset, 0, 0, 'none']]
a, _, header, log, rules = wide_rows(line, out_dir, cp, rp, runs, **kwargs)
header[0] = 'Cell'
table.extend(a)
display(html_table([header] + a)); print(test_stats(log))
%%capture
kwargs['cluster_range'] = 50
line = [['ALE50', corpus, dataset, 0, 0, 'none']]
a, _, h, log, rules = wide_rows(line, out_dir, cp, rp, runs, **kwargs)
table.extend(a)
display(html_table([header] + a)); print(test_stats(log))
%%capture
kwargs['cluster_range'] = 20
line = [['ALE20', corpus, dataset, 0, 0, 'none']]
a, _, h, log, rules = wide_rows(line, out_dir, cp, rp, runs, **kwargs)
table.extend(a)
display(html_table([header] + a)); print(test_stats(log))
Test with 20 clusters might take several days to fulfil or fail...
display(html_table([header] + table))
print(UTC(), ':: 2000/1000/500/50 finished, elapsed', str(round((time.time()-start)/3600.0, 1)), 'hours')
table_str = list2file(table, out_dir + '/all_tests_table.txt')
print('Results saved to', out_dir + '/all_tests_table.txt')
Test with 20 clusters might take several days to fulfil or fail... Please find results in all_tests_table.txt file.