Redmine #1835: used filename map for more flexible import/export
This commit is contained in:
parent
a1577029b5
commit
351865c50e
|
@ -22,13 +22,13 @@ def test(string):
|
||||||
string_file.write(string + '\n')
|
string_file.write(string + '\n')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
pipeline.import_string_file(string_file_name)
|
pipeline.import_file(string_file_name, 'strings-list')
|
||||||
pipeline.do_tokenise()
|
pipeline.do_tokenise()
|
||||||
pipeline.do_tweak_conllu()
|
pipeline.do_tweak_conllu()
|
||||||
pipeline.do_parse()
|
pipeline.do_parse()
|
||||||
pipeline.do_translate_jos()
|
pipeline.do_translate_jos()
|
||||||
pipeline.do_conllu_to_tei()
|
pipeline.do_conllu_to_tei()
|
||||||
pipeline.export_parsed_file(parse_file_name)
|
pipeline.export_file(parse_file_name, 'tei-initial')
|
||||||
tei = lxml.parse(parse_file_name).getroot()
|
tei = lxml.parse(parse_file_name).getroot()
|
||||||
message = lxml.tostring(tei, encoding='UTF-8', pretty_print=True).decode()
|
message = lxml.tostring(tei, encoding='UTF-8', pretty_print=True).decode()
|
||||||
ok = True
|
ok = True
|
||||||
|
|
|
@ -17,21 +17,41 @@ STRUCTURE_SCHEMA_FILE_NAME = '../resources/structures.xsd'
|
||||||
DICTIONARY_SCHEMA_FILE_NAME = '../resources/monolingual_dictionaries.xsd'
|
DICTIONARY_SCHEMA_FILE_NAME = '../resources/monolingual_dictionaries.xsd'
|
||||||
|
|
||||||
# temporary outputs
|
# temporary outputs
|
||||||
STRING_LIST_FILE_NAME = 'strings.txt'
|
FILE_NAME_MAP = {'strings-list': 'strings.txt',
|
||||||
OBELIKS_RAW_FILE_NAME = 'obeliks_raw.conllu'
|
'obeliks-tokenised': 'obeliks_raw.conllu',
|
||||||
OBELIKS_TWEAKED_FILE_NAME = 'obeliks_tweaked.conllu'
|
'obeliks-tweaked': 'obeliks_tweaked.conllu',
|
||||||
CLASSLA_OUTPUT_FILE_NAME = 'classla_raw.conllu'
|
'classla-parsed': 'classla_raw.conllu',
|
||||||
CLASSLA_TRANSLATED_FILE_NAME = 'classla_translated.conllu'
|
'classla-translated': 'classla_translated.conllu',
|
||||||
TEI_INIT_FILE_NAME = 'tei_initial.xml'
|
'tei-initial': 'tei_initial.xml',
|
||||||
TEI_SINGLE_FILE_NAME = 'tei_single.xml'
|
'tei-single': 'tei_single.xml',
|
||||||
TEI_SINGLE_STRUCTURE_FILE_NAME = 'tei_single_with_ids.xml'
|
'tei-single-ids': 'tei_single_with_ids.xml',
|
||||||
TEI_MULTIPLE_FILE_NAME = 'tei_multiple.xml'
|
'tei-multiple': 'tei_multiple.xml',
|
||||||
TEI_MULTIPLE_STRUCTURE_1_FILE_NAME = 'tei_multiple_with_ids1.xml'
|
'tei-multiple-ids-1': 'tei_multiple_with_ids1.xml',
|
||||||
TEI_MULTIPLE_STRUCTURE_2_FILE_NAME = 'tei_multiple_with_ids2.xml'
|
'tei-multiple-ids-2': 'tei_multiple_with_ids2.xml',
|
||||||
MWE_CSV_1_FILE_NAME = 'mwes1.csv'
|
'mwes-1': 'mwes1.csv',
|
||||||
MWE_CSV_2_FILE_NAME = 'mwes2.csv'
|
'mwes-2': 'mwes2.csv',
|
||||||
STRUCTURE_OLD_FILE_NAME = 'structures_old.xml'
|
'structures-old': 'structures_old.xml',
|
||||||
STRUCTURE_NEW_FILE_NAME = 'structures_new.xml'
|
'structures-new': 'structures_new.xml',
|
||||||
DICTIONARY_SINGLE_FILE_NAME = 'dictionary_single.xml'
|
'dictionary-single': 'dictionary_single.xml',
|
||||||
DICTIONARY_MULTIPLE_FILE_NAME = 'dictionary_multiple.xml'
|
'dictionary-multiple': 'dictionary_multiple.xml',
|
||||||
DICTIONARY_FILE_NAME = 'dictionary.xml'
|
'dictionary': 'dictionary.xml'
|
||||||
|
}
|
||||||
|
|
||||||
|
# STRING_LIST_FILE_NAME = 'strings.txt'
|
||||||
|
# OBELIKS_RAW_FILE_NAME = 'obeliks_raw.conllu'
|
||||||
|
# OBELIKS_TWEAKED_FILE_NAME = 'obeliks_tweaked.conllu'
|
||||||
|
# CLASSLA_OUTPUT_FILE_NAME = 'classla_raw.conllu'
|
||||||
|
# CLASSLA_TRANSLATED_FILE_NAME = 'classla_translated.conllu'
|
||||||
|
# TEI_INIT_FILE_NAME = 'tei_initial.xml'
|
||||||
|
# TEI_SINGLE_FILE_NAME = 'tei_single.xml'
|
||||||
|
# TEI_SINGLE_STRUCTURE_FILE_NAME = 'tei_single_with_ids.xml'
|
||||||
|
# TEI_MULTIPLE_FILE_NAME = 'tei_multiple.xml'
|
||||||
|
# TEI_MULTIPLE_STRUCTURE_1_FILE_NAME = 'tei_multiple_with_ids1.xml'
|
||||||
|
# TEI_MULTIPLE_STRUCTURE_2_FILE_NAME = 'tei_multiple_with_ids2.xml'
|
||||||
|
# MWE_CSV_1_FILE_NAME = 'mwes1.csv'
|
||||||
|
# MWE_CSV_2_FILE_NAME = 'mwes2.csv'
|
||||||
|
# STRUCTURE_OLD_FILE_NAME = 'structures_old.xml'
|
||||||
|
# STRUCTURE_NEW_FILE_NAME = 'structures_new.xml'
|
||||||
|
# DICTIONARY_SINGLE_FILE_NAME = 'dictionary_single.xml'
|
||||||
|
# DICTIONARY_MULTIPLE_FILE_NAME = 'dictionary_multiple.xml'
|
||||||
|
# DICTIONARY_FILE_NAME = 'dictionary.xml'
|
||||||
|
|
|
@ -26,8 +26,8 @@ resource_directory = None
|
||||||
tmp_directory = None
|
tmp_directory = None
|
||||||
nlp = None
|
nlp = None
|
||||||
|
|
||||||
def __get_tmp_file_name(file_name):
|
def __get_tmp_file_name(file_key):
|
||||||
return tmp_directory + '/' + file_name
|
return tmp_directory + '/' + FILE_NAME_MAP[file_key]
|
||||||
|
|
||||||
def initialise(**argument_map):
|
def initialise(**argument_map):
|
||||||
global tmp_directory, resource_directory, nlp
|
global tmp_directory, resource_directory, nlp
|
||||||
|
@ -38,22 +38,22 @@ def initialise(**argument_map):
|
||||||
NLP_CONFIG_MAP['models_dir'] = resource_directory + '/classla'
|
NLP_CONFIG_MAP['models_dir'] = resource_directory + '/classla'
|
||||||
nlp = classla.Pipeline('sl', **NLP_CONFIG_MAP)
|
nlp = classla.Pipeline('sl', **NLP_CONFIG_MAP)
|
||||||
|
|
||||||
def import_string_file(file_name):
|
def import_file(file_name, file_key):
|
||||||
shutil.copyfile(file_name, __get_tmp_file_name(STRING_LIST_FILE_NAME))
|
shutil.copyfile(file_name, __get_tmp_file_name(file_key))
|
||||||
|
|
||||||
def do_tokenise():
|
def do_tokenise():
|
||||||
input_file_name = __get_tmp_file_name(STRING_LIST_FILE_NAME)
|
input_file_name = __get_tmp_file_name('strings-list')
|
||||||
output_file_name = __get_tmp_file_name(OBELIKS_RAW_FILE_NAME)
|
output_file_name = __get_tmp_file_name('obeliks-tokenised')
|
||||||
obeliks.run(in_file=input_file_name, out_file=output_file_name, conllu=True)
|
obeliks.run(in_file=input_file_name, out_file=output_file_name, conllu=True)
|
||||||
|
|
||||||
def do_tweak_conllu():
|
def do_tweak_conllu():
|
||||||
input_file_name = __get_tmp_file_name(OBELIKS_RAW_FILE_NAME)
|
input_file_name = __get_tmp_file_name('obeliks-tokenised')
|
||||||
output_file_name = __get_tmp_file_name(OBELIKS_TWEAKED_FILE_NAME)
|
output_file_name = __get_tmp_file_name('obeliks-tweaked')
|
||||||
tweak_conllu(input_file_name, output_file_name)
|
tweak_conllu(input_file_name, output_file_name)
|
||||||
|
|
||||||
def do_parse():
|
def do_parse():
|
||||||
input_file_name = __get_tmp_file_name(OBELIKS_TWEAKED_FILE_NAME)
|
input_file_name = __get_tmp_file_name('obeliks-tweaked')
|
||||||
output_file_name = __get_tmp_file_name(CLASSLA_OUTPUT_FILE_NAME)
|
output_file_name = __get_tmp_file_name('classla-parsed')
|
||||||
doc = Document(text=None)
|
doc = Document(text=None)
|
||||||
conll_file = CoNLLFile(filename=input_file_name)
|
conll_file = CoNLLFile(filename=input_file_name)
|
||||||
doc.conll_file = conll_file
|
doc.conll_file = conll_file
|
||||||
|
@ -61,15 +61,15 @@ def do_parse():
|
||||||
result.conll_file.write_conll(output_file_name)
|
result.conll_file.write_conll(output_file_name)
|
||||||
|
|
||||||
def do_translate_jos():
|
def do_translate_jos():
|
||||||
input_file_name = __get_tmp_file_name(CLASSLA_OUTPUT_FILE_NAME)
|
input_file_name = __get_tmp_file_name('classla-parsed')
|
||||||
dictionary_file_name = resource_directory + '/dict.xml'
|
dictionary_file_name = resource_directory + '/dict.xml'
|
||||||
output_file_name = __get_tmp_file_name(CLASSLA_TRANSLATED_FILE_NAME)
|
output_file_name = __get_tmp_file_name('classla-translated')
|
||||||
translate_jos(input_file_name, dictionary_file_name, output_file_name)
|
translate_jos(input_file_name, dictionary_file_name, output_file_name)
|
||||||
|
|
||||||
def do_conllu_to_tei():
|
def do_conllu_to_tei():
|
||||||
input_file_name = __get_tmp_file_name(CLASSLA_TRANSLATED_FILE_NAME)
|
input_file_name = __get_tmp_file_name('classla-translated')
|
||||||
output_file_name = __get_tmp_file_name(TEI_INIT_FILE_NAME)
|
output_file_name = __get_tmp_file_name('tei-initial')
|
||||||
conllu_to_tei(input_file_name, output_file_name)
|
conllu_to_tei(input_file_name, output_file_name)
|
||||||
|
|
||||||
def export_parsed_file(file_name):
|
def export_file(file_name, file_key):
|
||||||
shutil.copyfile(__get_tmp_file_name(TEI_INIT_FILE_NAME), file_name)
|
shutil.copyfile(__get_tmp_file_name(file_key), file_name)
|
||||||
|
|
|
@ -15,13 +15,13 @@ output_file_name = arguments.outtei
|
||||||
|
|
||||||
def run_pipeline(input_file_name, output_file_name):
|
def run_pipeline(input_file_name, output_file_name):
|
||||||
pipeline.initialise(temp_dir='/tmp/structure_assignment_pipeline1', resource_dir='../resources')
|
pipeline.initialise(temp_dir='/tmp/structure_assignment_pipeline1', resource_dir='../resources')
|
||||||
pipeline.import_string_file(input_file_name)
|
pipeline.import_file(input_file_name, 'strings-list')
|
||||||
pipeline.do_tokenise()
|
pipeline.do_tokenise()
|
||||||
pipeline.do_tweak_conllu()
|
pipeline.do_tweak_conllu()
|
||||||
pipeline.do_parse()
|
pipeline.do_parse()
|
||||||
pipeline.do_translate_jos()
|
pipeline.do_translate_jos()
|
||||||
pipeline.do_conllu_to_tei()
|
pipeline.do_conllu_to_tei()
|
||||||
pipeline.export_parsed_file(output_file_name)
|
pipeline.export_file(output_file_name, 'tei-initial')
|
||||||
|
|
||||||
if (__name__ == '__main__'):
|
if (__name__ == '__main__'):
|
||||||
run_pipeline(input_file_name, output_file_name)
|
run_pipeline(input_file_name, output_file_name)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user