|
|
|
@ -27,7 +27,7 @@ def create_nlp(resource_directory):
|
|
|
|
|
|
|
|
|
|
class Pipeline:
|
|
|
|
|
|
|
|
|
|
def __init__(self, nlp, resource_directory):
|
|
|
|
|
def __init__(self, resource_directory, nlp=None):
|
|
|
|
|
self.nlp = nlp
|
|
|
|
|
self.tmp_directory = tempfile.mkdtemp()
|
|
|
|
|
resource_file_names = [resource_directory + '/' + f for f in os.listdir(resource_directory)]
|
|
|
|
@ -42,16 +42,19 @@ class Pipeline:
|
|
|
|
|
shutil.copyfile(file_name, self.file_map[file_key])
|
|
|
|
|
|
|
|
|
|
def do_tokenise(self):
|
|
|
|
|
print('Tokenising with obeliks ...')
|
|
|
|
|
input_file_name = self.file_map['strings-list']
|
|
|
|
|
output_file_name = self.file_map['obeliks-tokenised']
|
|
|
|
|
obeliks.run(in_file=input_file_name, out_file=output_file_name, conllu=True)
|
|
|
|
|
|
|
|
|
|
def do_tweak_conllu(self):
|
|
|
|
|
print('Tweaking conllu ...')
|
|
|
|
|
input_file_name = self.file_map['obeliks-tokenised']
|
|
|
|
|
output_file_name = self.file_map['obeliks-tweaked']
|
|
|
|
|
tweak_conllu(input_file_name, output_file_name)
|
|
|
|
|
|
|
|
|
|
def do_parse(self):
|
|
|
|
|
print('Parsing with classla ...')
|
|
|
|
|
input_file_name = self.file_map['obeliks-tweaked']
|
|
|
|
|
output_file_name = self.file_map['classla-parsed']
|
|
|
|
|
doc = Document(text=None)
|
|
|
|
@ -61,42 +64,50 @@ class Pipeline:
|
|
|
|
|
result.conll_file.write_conll(output_file_name)
|
|
|
|
|
|
|
|
|
|
def do_translate_jos(self):
|
|
|
|
|
print('Translating JOS ...')
|
|
|
|
|
input_file_name = self.file_map['classla-parsed']
|
|
|
|
|
dictionary_file_name = self.file_map['dict']
|
|
|
|
|
output_file_name = self.file_map['classla-translated']
|
|
|
|
|
translate_jos(input_file_name, dictionary_file_name, output_file_name)
|
|
|
|
|
|
|
|
|
|
def do_conllu_to_tei(self):
|
|
|
|
|
print('Converting to TEI ...')
|
|
|
|
|
input_file_name = self.file_map['classla-translated']
|
|
|
|
|
output_file_name = self.file_map['tei-initial']
|
|
|
|
|
conllu_to_tei(input_file_name, output_file_name)
|
|
|
|
|
|
|
|
|
|
def do_split_tei(self):
|
|
|
|
|
print('Splitting TEI ...')
|
|
|
|
|
input_file_name = self.file_map['tei-initial']
|
|
|
|
|
output_single_file_name = self.file_map['tei-single']
|
|
|
|
|
output_multiple_file_name = self.file_map['tei-multiple']
|
|
|
|
|
split_tei(input_file_name, output_single_file_name, output_multiple_file_name)
|
|
|
|
|
|
|
|
|
|
def do_assign_single(self):
|
|
|
|
|
print('Assigning single structures ...')
|
|
|
|
|
input_file_name = self.file_map['tei-single']
|
|
|
|
|
structure_file_name = self.file_map['structures-old']
|
|
|
|
|
output_file_name = self.file_map['tei-single-ids']
|
|
|
|
|
assign_single(input_file_name, structure_file_name, output_file_name)
|
|
|
|
|
|
|
|
|
|
def do_tei_to_dictionary_single(self):
|
|
|
|
|
print('Converting single TEI to dictionary ...')
|
|
|
|
|
input_file_name = self.file_map['tei-single-ids']
|
|
|
|
|
output_file_name = self.file_map['dictionary-single']
|
|
|
|
|
tei_to_dictionary(input_file_name, output_file_name)
|
|
|
|
|
|
|
|
|
|
def do_tei_to_dictionary_multiple(self):
|
|
|
|
|
print('Converting multiple TEI to dictionary ...')
|
|
|
|
|
input_file_name = self.file_map['tei-multiple-ids-2']
|
|
|
|
|
output_file_name = self.file_map['dictionary-multiple']
|
|
|
|
|
tei_to_dictionary(input_file_name, output_file_name)
|
|
|
|
|
|
|
|
|
|
def do_find_structure_units_first(self):
|
|
|
|
|
print('Finding units for existing structures ...')
|
|
|
|
|
self._do_find_structure_units(self.file_map['structures-old'], self.file_map['tei-multiple'], self.file_map['mwes-1'])
|
|
|
|
|
|
|
|
|
|
def do_find_structure_units_second(self):
|
|
|
|
|
print('Finding units for extended structures ...')
|
|
|
|
|
self._do_find_structure_units(self.file_map['structures-new'], self.file_map['tei-multiple'], self.file_map['mwes-2'])
|
|
|
|
|
|
|
|
|
|
def _do_find_structure_units(self, structure_file_name, tei_file_name, csv_file_name):
|
|
|
|
@ -145,20 +156,24 @@ class Pipeline:
|
|
|
|
|
return min_id
|
|
|
|
|
|
|
|
|
|
def do_assign_multiple_first(self):
|
|
|
|
|
print('Assigning ids based on existing structures ...')
|
|
|
|
|
min_other_id = self._find_min_other_id('structures-old')
|
|
|
|
|
assign_multiple(self.file_map['tei-multiple'], self.file_map['mwes-1'], self.file_map['tei-multiple-ids-1'], min_other_id)
|
|
|
|
|
|
|
|
|
|
def do_assign_multiple_second(self):
|
|
|
|
|
print('Assigning ids based on extended structures ...')
|
|
|
|
|
min_other_id = self._find_min_other_id('structures-new')
|
|
|
|
|
assign_multiple(self.file_map['tei-multiple'], self.file_map['mwes-2'], self.file_map['tei-multiple-ids-2'], min_other_id)
|
|
|
|
|
|
|
|
|
|
def do_create_structures(self):
|
|
|
|
|
print('Creating missing structures ...')
|
|
|
|
|
input_file_name = self.file_map['structures-old']
|
|
|
|
|
tei_file_name = self.file_map['tei-multiple-ids-1']
|
|
|
|
|
output_file_name = self.file_map['structures-new']
|
|
|
|
|
create_structures(input_file_name, tei_file_name, output_file_name)
|
|
|
|
|
|
|
|
|
|
def do_merge_dictionaries(self):
|
|
|
|
|
print('Merging single and multiple dictionaries ...')
|
|
|
|
|
single_file_name = self.file_map['dictionary-single']
|
|
|
|
|
multiple_file_name = self.file_map['dictionary-multiple']
|
|
|
|
|
output_file_name = self.file_map['dictionary']
|
|
|
|
@ -170,11 +185,13 @@ class Pipeline:
|
|
|
|
|
xml_schema.assertValid(xml_tree)
|
|
|
|
|
|
|
|
|
|
def do_validate_structures(self):
|
|
|
|
|
print('Validating structures ...')
|
|
|
|
|
schema_file_name = self.file_map['structure-schema']
|
|
|
|
|
xml_file_name = self.file_map['structures-new']
|
|
|
|
|
self._do_validate(schema_file_name, xml_file_name)
|
|
|
|
|
|
|
|
|
|
def do_validate_dictionary(self):
|
|
|
|
|
print('Validating dictionary ...')
|
|
|
|
|
schema_file_name = self.file_map['dictionary-schema']
|
|
|
|
|
xml_file_name = self.file_map['dictionary']
|
|
|
|
|
self._do_validate(schema_file_name, xml_file_name)
|
|
|
|
|