2019-04-02
¶"Gutenberg Children Books" corpus, new "LG-E-noQuotes" dataset (GC_LGEnglish_noQuotes_fullyParsed.ull),
trash filter off: min_word_count = 31,21,11,6,2,1
; max_sentence_length
off; Link Grammar 5.5.1.
_Server 94, fresh clone of singnet repository, fresh ull
environment: check 2019-03-25 error 255 with MWC=2
This notebook is shared as static ILE-GCB-LG-E-noQuotes-LG_551-S94-2019-04-02.html.
Output data shared via ILE-GCB-LG-E-noQuotes-LG_551-S94-2019-04-02 directory.
import os, sys, time
module_path = os.path.abspath(os.path.join('..'))
if module_path not in sys.path: sys.path.append(module_path)
from src.grammar_learner.utl import UTC, test_stats
from src.grammar_learner.read_files import check_dir, check_corpus
from src.grammar_learner.write_files import list2file
from src.grammar_learner.widgets import html_table
from src.grammar_learner.pqa_table import table_rows, params, wide_rows
tmpath = module_path + '/tmp/'
check_dir(tmpath, True, 'none')
start = time.time()
runs = (1,1)
print(UTC(), ':: module_path:', module_path)
corpus = 'GCB' # 'Gutenberg-Children-Books-Caps'
dataset = 'LG-E-noQuotes' # 'LG-E-clean'
kwargs = {
'left_wall' : '' ,
'period' : False ,
'context' : 2 , # 2: disjuncts
'min_word_count': 1 ,
'word_space' : 'discrete' , # 'sparse' ,
'clustering' : 'group' , # ('mean_shift', 2),
'clustering_metric' : ['silhouette', 'cosine'],
'cluster_range' : [0] , # auto
'top_level' : 0.01 ,
'grammar_rules' : 2 ,
'max_disjuncts' : 1000000 , # off
'stop_words' : [] ,
'tmpath' : tmpath ,
'verbose' : 'log+' ,
'template_path' : 'poc-turtle',
'linkage_limit' : 1000 }
rp = module_path + '/data/' + corpus + '/LG-E-noQuotes/GC_LGEnglish_noQuotes_fullyParsed.ull'
cp = rp # corpus path = reference_path
out_dir = module_path + '/output/' + 'ILE-GCB-LG-E-noQuotes-LG_551-S94-' + str(UTC())[:10]
print(UTC(), '\n', out_dir)
min_word_count = 31, 21, 11, 6, 2, 1
¶%%capture
table = []
line = [['', corpus, dataset, 0, 0, 'none']]
kwargs['min_word_count'] = 31
a, _, header, log, rules = wide_rows(line, out_dir, cp, rp, runs, **kwargs)
header[0] = ''
table.extend(a)
display(html_table([header] + a)); print(test_stats(log))
display(html_table([header] + a)); print(test_stats(log))
display(html_table([header] + a)); print(test_stats(log))
display(html_table([header] + a)); print(test_stats(log))
display(html_table([header] + table))
print(UTC(), ':: 4 tests finished, elapsed', str(round((time.time()-start)/3600.0, 1)), 'hours')
table_str = list2file(table, out_dir + '/all_tests_table.txt')
print('Results saved to', out_dir + '/all_tests_table.txt')
%%capture
kwargs['min_word_count'] = 2
a, _, h, log, rules = wide_rows(line, out_dir, cp, rp, runs, **kwargs)
table.extend(a)
2019-03-26, 2019-04-03 Error:
ParserError: Process 'link-parser' terminated with exit code: 255 and error message:
'link-grammar: Info: Dictionary found at /home/obaskov/94/language-learning/output/ILE-GCB-LG-E-clean-2019-03-25/GCB_LG-E-noQuotes_dILEd_no-gen_mwc=2/dict_13629C_2019-03-26_0007/4.0.dict
link-grammar: Error: While parsing dictionary /home/obaskov/94/language-learning/output/ILE-GCB-LG-E-clean-2019-03-25/GCB_LG-E-noQuotes_dILEd_no-gen_mwc=2/dict_13629C_2019-03-26_0007/4.0.dict:
Connector, "(", "[", or "{" expected.
Line 11278, next tokens: ";" "d'aulnoy" ":" "(" "AAPENV-"
link-grammar: Info: Freeing dictionary /home/obaskov/94/language-learning/output/ILE-GCB-LG-E-clean-2019-03-25/GCB_LG-E-noQuotes_dILEd_no-gen_mwc=2/dict_13629C_2019-03-26_0007/4.0.dict
link-grammar: Fatal error: Unable to open dictionary.
'.
2019-04-03 dict path: /home/obaskov/94/language-learning/output/ILE-GCB-LG-E-noQuotes-LG_551-S94-2019-04-02/GCB_LG-E-noQuotes_dILEd_no-gen_mwc=2/dict_13629C_2019-04-03_0007/4.0.dict; executed in 7h 5m 38s, finished 14:09:14 2019-04-03
Run twice: 2019-03-03 cell results lost due broken pipe, repeated 2019-04-04
%%capture
kwargs['min_word_count'] = 1
a, _, h, log, rules = wide_rows(line, out_dir, cp, rp, runs, **kwargs)
table.extend(a)
display(html_table([header] + a)); print(test_stats(log))
display(html_table([header] + table))
print(UTC(), ':: 6 tests finished, elapsed', str(round((time.time()-start)/3600.0, 1)), 'hours')
table_str = list2file(table, out_dir + '/all_tests_table.txt')
print('Results saved to', out_dir + '/all_tests_table.txt')