Gutenberg Children Books 2019-01-25 agm-opt MWC=1

ULLPP Line 51: LG-Eenglish-5.5.1 corpus, trash filter off, min_word_count = 1; 2000/1000/500/50/20 clusters

Link Grammar 5.5.1, test_grammar updated 2019-01-23.
This notebook is shared as static Gutenberg-Children-Books-agm-opt-MWC=1-2019-01-25.html.
Output data shared via Gutenberg-Children-Books-agm-opt-MWC=1-2019-01-25 directory.

Basic settings

In [1]:
import os, sys, time
module_path = os.path.abspath(os.path.join('..'))
if module_path not in sys.path: sys.path.append(module_path)
from src.grammar_learner.utl import UTC, test_stats
from src.grammar_learner.read_files import check_dir, check_corpus
from src.grammar_learner.write_files import list2file
from src.grammar_learner.widgets import html_table
from src.grammar_learner.pqa_table import table_rows, params, wide_rows
tmpath = module_path + '/tmp/'
check_dir(tmpath, True, 'none')
start = time.time()
runs = (1,1)
# print(UTC(), ':: module_path:', module_path)

Corpus test settings

In [2]:
corpus = 'GCB' # 'Gutenberg-Children-Books-Caps' 
dataset = 'LG-ANY-all-parses-agm-opt'
kwargs = {
    'left_wall'     :   ''          ,
    'period'        :   False       ,
    'context'       :   1           ,
    'min_word_count':   1           ,
    'word_space'    :   'sparse'    ,
    'clustering'    :   ['agglomerative', 'ward'],
    'clustering_metric' : ['silhouette', 'cosine'],
    'cluster_range' :   2000        ,   # 2000/1000/500/50/20
    'top_level'     :   0.01        ,
    'grammar_rules' :   2           ,
    'max_disjuncts' :   1000000     ,   # off
    'stop_words'    :   []          ,   # allow any token
    'tmpath'        :   tmpath      ,
    'verbose'       :   'log+'      ,
    'template_path' :   'poc-turtle',
    'linkage_limit' :   1000        }
rp = module_path + '/data/' + corpus + '/LG-E-clean/GCB-LG-English-clean.ull'
cp = rp  # corpus path = reference_path
runs = (1,1)
out_dir = module_path + '/output/Gutenberg-Children-agm-opt-MWC=1-' + str(UTC())[:10]
if check_corpus(rp, 'min'): print(UTC(), '\n', out_dir)
2019-01-25 15:01:39 UTC 
 /home/obaskov/94/language-learning/output/Gutenberg-Children-agm-opt-MWC=1-2019-01-25

Tests: min_word_count = 1; 2000/1000/500/50/20 clusters

In [3]:
%%capture
table = []
kwargs['cluster_range'] = 2000
line = [['ALE2000', corpus, dataset, 0, 0, 'none']]
a, _, header, log, rules = wide_rows(line, out_dir, cp, rp, runs, **kwargs)
header[0] = 'Cell'
table.extend(a)
In [4]:
display(html_table([header] + a)); print(test_stats(log))
CellCorpusParsingSpaceLinkageAffinityG12nThresholdRulesMWCNNSIPAPQF1Top 5 cluster sizes
ALE2000GCBLG-ANY-all-parses-agm-optcALWEdwardeuclideannone---20001---0.069%38%0.42[4513, 1703, 934, 922, 823]
Cleaned dictionary: 35750 words, grammar learn time: 11:07:07, grammar test time: 01:07:37
In [5]:
%%capture
kwargs['cluster_range'] = 1000
line = [['ALE1000', corpus, dataset, 0, 0, 'none']]
a, _, h, log, rules = wide_rows(line, out_dir, cp, rp, runs, **kwargs)
table.extend(a)
In [6]:
display(html_table([header] + a)); print(test_stats(log))
CellCorpusParsingSpaceLinkageAffinityG12nThresholdRulesMWCNNSIPAPQF1Top 5 cluster sizes
ALE1000GCBLG-ANY-all-parses-agm-optcALWEdwardeuclideannone---10001---0.075%41%0.45[6406, 2115, 1617, 1415, 934]
Cleaned dictionary: 35750 words, grammar learn time: 09:58:19, grammar test time: 01:31:47
In [7]:
%%capture
kwargs['cluster_range'] = 500
line = [['ALE500', corpus, dataset, 0, 0, 'none']]
a, _, header, log, rules = wide_rows(line, out_dir, cp, rp, runs, **kwargs)
header[0] = 'Cell'
table.extend(a)
In [8]:
display(html_table([header] + a)); print(test_stats(log))
CellCorpusParsingSpaceLinkageAffinityG12nThresholdRulesMWCNNSIPAPQF1Top 5 cluster sizes
ALE500GCBLG-ANY-all-parses-agm-optcALWEdwardeuclideannone---5001---0.081%45%0.48[7286, 5077, 3233, 1415, 1092]
Cleaned dictionary: 35750 words, grammar learn time: 09:52:07, grammar test time: 03:20:16
In [9]:
display(html_table([header] + table))
CellCorpusParsingSpaceLinkageAffinityG12nThresholdRulesMWCNNSIPAPQF1Top 5 cluster sizes
ALE2000GCBLG-ANY-all-parses-agm-optcALWEdwardeuclideannone---20001---0.069%38%0.42[4513, 1703, 934, 922, 823]
ALE1000GCBLG-ANY-all-parses-agm-optcALWEdwardeuclideannone---10001---0.075%41%0.45[6406, 2115, 1617, 1415, 934]
ALE500GCBLG-ANY-all-parses-agm-optcALWEdwardeuclideannone---5001---0.081%45%0.48[7286, 5077, 3233, 1415, 1092]
In [10]:
print(UTC(), ':: 2000/100/500 finished, elapsed', str(round((time.time()-start)/3600.0, 1)), 'hours')
table_str = list2file(table, out_dir + '/all_tests_table.txt')
print('Results saved to', out_dir + '/all_tests_table.txt')
2019-01-27 03:58:57 UTC :: 2000/100/500 finished, elapsed 37.0 hours
Results saved to /home/obaskov/94/language-learning/output/Gutenberg-Children-agm-opt-MWC=1-2019-01-25/all_tests_table.txt
In [11]:
%%capture
kwargs['cluster_range'] = 50
line = [['ALE50', corpus, dataset, 0, 0, 'none']]
a, _, h, log, rules = wide_rows(line, out_dir, cp, rp, runs, **kwargs)
table.extend(a)
---------------------------------------------------------------------------
ParserError                               Traceback (most recent call last)
<ipython-input-11-5c6f3210deef> in <module>()
      1 kwargs['cluster_range'] = 50
      2 line = [['ALE50', corpus, dataset, 0, 0, 'none']]
----> 3 a, _, h, log, rules = wide_rows(line, out_dir, cp, rp, runs, **kwargs)
      4 table.extend(a)

~/94/language-learning/src/grammar_learner/pqa_table.py in wide_rows(lines, out_dir, cp, rp, runs, **kwargs)
    414                 for k in range(runs[1]):
    415                     a, f1, precision, q = pqa_meter(re['grammar_file'],
--> 416                                                     og, cp, rp, **kwargs)
    417                     pa.append(a)
    418                     pq.append(q)

~/94/language-learning/src/grammar_learner/pqa_table.py in pqa_meter(dict_path, output_path, corpus_path, reference_path, **kwargs)
     97                                              dict_path, grammar_path,
     98                                              template_path, linkage_limit,
---> 99                                              options, reference_path)
    100     pa = float(pa)
    101     recall = float(recall)

~/94/language-learning/src/grammar_tester/grammartester.py in test_grammar(corpus_path, output_path, dict_path, grammar_path, template_path, linkage_limit, options, reference_path, timeout)
    372 
    373     # pm, pq = gt.test(dict_path, corpus_path, output_path, reference_path, options, None)
--> 374     pm, pq = gt.test(dict_path, corpus_path, output_path, reference_path, options, TextProgress)
    375 
    376     return \

~/94/language-learning/src/grammar_tester/grammartester.py in test(self, dict_path, corpus_path, output_path, reference_path, options, progress)
    327                 self._options &= (~BIT_DPATH_CREATE)
    328 
--> 329             self._on_dict_file(dict_path, parse_args)
    330 
    331         if self._parser is not None:

~/94/language-learning/src/grammar_tester/grammartester.py in _on_dict_file(self, dict_file_path, args)
    224 
    225         if os.path.isfile(corp_path):
--> 226             self._on_corpus_file(corp_path, [dest_path, lang_path] + args)
    227 
    228         elif os.path.isdir(corp_path):

~/94/language-learning/src/grammar_tester/grammartester.py in _on_corpus_file(self, corpus_file_path, args)
    181 
    182         file_metrics, file_quality = self._parser.parse(dict_path, corpus_file_path, out_file,
--> 183                                                         ref_file, self._options, self._progress)
    184 
    185         if self._options & (BIT_SEP_STAT | BIT_OUTPUT) == BIT_SEP_STAT:

~/94/language-learning/src/grammar_tester/lginprocparser.py in parse(self, dict_path, corpus_path, output_path, ref_file, options, progress)
    288                     raise ParserError("Process '{0}' terminated with exit code: {1} "
    289                                        "and error message:\n'{2}'.".format(lgp_cmd[0], proc_pars.returncode,
--> 290                                                                            err.decode()))
    291 
    292                 # with open(output_path + ".raw", "w") as r:

ParserError: Process 'link-parser' terminated with exit code: -11 and error message:
'link-grammar: Info: Dictionary found at /home/obaskov/94/language-learning/output/Gutenberg-Children-agm-opt-MWC=1-2019-01-25/GCB_LG-ANY-all-parses-agm-opt_cALWEd_no-gen_50c/dict_50C_2019-01-27_0006/4.0.dict
link-grammar: Info: Dictionary version 0.0.7, locale en_US.UTF-8
link-grammar: Info: Library version link-grammar-5.5.1. Enter "!help" for help.
'.
In [12]:
display(html_table([header] + a)); print(test_stats(log))
CellCorpusParsingSpaceLinkageAffinityG12nThresholdRulesMWCNNSIPAPQF1Top 5 cluster sizes
ALE500GCBLG-ANY-all-parses-agm-optcALWEdwardeuclideannone---5001---0.081%45%0.48[7286, 5077, 3233, 1415, 1092]
Cleaned dictionary: 35750 words, grammar learn time: 09:52:07, grammar test time: 03:20:16
In [13]:
display(html_table([header] + table))
CellCorpusParsingSpaceLinkageAffinityG12nThresholdRulesMWCNNSIPAPQF1Top 5 cluster sizes
ALE2000GCBLG-ANY-all-parses-agm-optcALWEdwardeuclideannone---20001---0.069%38%0.42[4513, 1703, 934, 922, 823]
ALE1000GCBLG-ANY-all-parses-agm-optcALWEdwardeuclideannone---10001---0.075%41%0.45[6406, 2115, 1617, 1415, 934]
ALE500GCBLG-ANY-all-parses-agm-optcALWEdwardeuclideannone---5001---0.081%45%0.48[7286, 5077, 3233, 1415, 1092]
In [14]:
print(UTC(), ':: 2000/1000/500/50 finished, elapsed', str(round((time.time()-start)/3600.0, 1)), 'hours')
table_str = list2file(table, out_dir + '/all_tests_table.txt')
print('Results saved to', out_dir + '/all_tests_table.txt')
2019-01-27 13:37:58 UTC :: 2000/1000/500/50 finished, elapsed 46.6 hours
Results saved to /home/obaskov/94/language-learning/output/Gutenberg-Children-agm-opt-MWC=1-2019-01-25/all_tests_table.txt
In [ ]:
%%capture
kwargs['cluster_range'] = 20
line = [['ALE20', corpus, dataset, 0, 0, 'none']]
a, _, h, log, rules = wide_rows(line, out_dir, cp, rp, runs, **kwargs)
table.extend(a)
In [ ]:
display(html_table([header] + a)); print(test_stats(log))

Save results

In [ ]:
display(html_table([header] + table))
In [ ]:
print(UTC(), ':: finished, elapsed', str(round((time.time()-start)/3600.0, 1)), 'hours')
table_str = list2file(table, out_dir + '/all_tests_table.txt')
print('Results saved to', out_dir + '/all_tests_table.txt')