todo: implement parser output separately

This commit is contained in:
voje 2019-03-14 08:30:33 +01:00
parent da460f74f1
commit dc20480e20
4 changed files with 99 additions and 48 deletions

View File

@ -11,6 +11,9 @@ MAKE_ROOT = $(shell pwd)
SSJ_FILE = "$(MAKE_ROOT)/data/samples/ssj_example/ssj500k-sl.body.sample.xml"
KRES_FOLDER = "$(MAKE_ROOT)/data/samples/kres_example"
KRES_SRL_FOLDER = "$(MAKE_ROOT)/data/kres_srl"
OUTPUT = "file"
OUTDIR = "$(HOME)/workdir/outputfolder"
DBADDR = ""
export
.PHONY: dev-env preflight
@ -30,5 +33,5 @@ data/samples:
# when debugging, run this once, then run python3 ... by hand
preflight: data/samples
pip3 install -e src/pkg/corpusparser/.
python3 src/preflight/main_parse.py --kres-folder $(KRES_FOLDER) \
--ssj-file $(SSJ_FILE) --kres-srl-folder $(KRES_SRL_FOLDER)
python3 src/pkg/corpusparser/corpusparser/main.py --kres-folder $(KRES_FOLDER) \
--ssj-file $(SSJ_FILE) --kres-srl-folder $(KRES_SRL_FOLDER) --output $(OUTPUT) --outdir $(OUTDIR) --dbaddr $(DBADDR)

View File

@ -10,7 +10,7 @@ logging.basicConfig(level=logging.INFO)
# Create an iterator that outputs resulting sentences (python dict format).
class Parser():
def __init__(self, corpus, infiles, logger=None):
def __init__(self, corpus, infiles, output=None, outdir=None, dbaddr=None, logger=None):
if corpus == "kres":
self.kres_folder = Path(infiles[0])
@ -20,11 +20,22 @@ class Parser():
else:
raise ValueError("Argument corpus should be 'ssj' or 'kres'.")
self.output = output # None | file | db
if self.output == "file":
self.outdir = Path(outdir)
self.outdir.mkdir(parents=True, exist_ok=True)
elif self.output == "db":
self.dbaddr = "TODO"
self.corpus = corpus
self.W_TAGS = ['w']
self.C_TAGS = ['c']
self.S_TAGS = ['S', 'pc']
self.logger = logger or logging.getLogger(__name__)
self.stats = {
"parsed_count": 0,
"missing_srl": []
}
def parse_jos_links(self, sent_el):
if self.corpus == "kres":
@ -67,7 +78,7 @@ class Parser():
}]
return res_links
def parse_srl_links(self, sent_el, sent_srl_links):
def parse_srl_links(self, sent_el, sent_srl_links=None):
if self.corpus == "kres":
return self.parse_srl_links_kres(sent_el, sent_srl_links)
else:
@ -84,22 +95,23 @@ class Parser():
# find the correspointing json file with srl links
return res_links
def parse(self):
def sentence_generator(self):
# Using generators so we don't copy a whole corpu around in memory.
if self.corpus == "kres":
for xml_file in self.kres_folder.iterdir():
self.parse_xml_file(xml_file)
break # TODO dev break
# self.parse_xml_file(xml_file)
yield from self.parse_xml_file(xml_file)
else:
self.parse_xml_file(self.ssj_file)
yield from self.parse_xml_file(self.ssj_file)
def parse_xml_file(self, xml_file):
srl_dict = {}
srl_from_json = {}
if self.corpus == "kres":
# in case of kres, read the SRL links form a separate json file
file_id = xml_file.name.split(".")[0]
json_file = self.kres_srl_folder / Path(file_id).with_suffix(".srl.json")
with json_file.open("r") as fp:
srl_dict = json.loads(fp.read())
srl_from_json = json.loads(fp.read())
with xml_file.open("rb") as fp:
# remove namespaces
@ -155,7 +167,7 @@ class Parser():
sentence_text += el.text
sentence_tokens += [{
"word": False,
"tid": int(el_id),
"tid": (int(el_id) if self.corpus == "kres" else -1),
"text": el.text,
}]
elif el.tag in self.S_TAGS:
@ -166,23 +178,26 @@ class Parser():
pass
sentence_id = "{}.{}.{}".format(f_id, p_id, s_id)
# make a generator instead of holding the whole corpus in memory
if sentence_id in res_dict:
raise KeyError("duplicated id: {}".format(sentence_id))
jos_links = self.parse_jos_links(s)
srl_links = srl_dict.get(sentence_id) if self.corpus == "kres" else None
if srl_links is None:
srl_links_parsed = None
if self.corpus == "kres":
srl_links_raw = srl_from_json.get(sentence_id)
if srl_links_raw is None:
srl_links_parsed = None
self.stats["missing_srl"] += [(sentence_id, sentence_text)]
else:
srl_links_parsed = self.parse_srl_links(s, srl_links_raw)
else:
srl_links_parsed = self.parse_srl_links(s, srl_links)
res_dict[sentence_id] = {
srl_links_parsed = self.parse_srl_links(s)
if len(srl_links_parsed) == 0:
self.stats["missing_srl"] += [(sentence_id, sentence_text)]
sentence_entry = {
"sid": sentence_id,
"text": sentence_text,
"tokens": sentence_tokens,
"jos_links": jos_links,
"srl_links": srl_links_parsed
}
if srl_links is None:
self.logger.info("srl_links missing:{}:{}".format(
sentence_id, res_dict[sentence_id]["text"]))
return res_dict
self.stats["parsed_count"] += 1
yield (xml_file, sentence_entry)

View File

@ -0,0 +1,58 @@
from corpusparser import Parser
import argparse
import logging
import json
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
## Main handles command line arguments and writing to files / DB.
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Parsing corpora kres and ssj500k.")
parser.add_argument('--kres-folder', required=True)
parser.add_argument('--kres-srl-folder', required=True)
parser.add_argument('--ssj-file', required=True)
parser.add_argument('--output', required=False, default=None)
parser.add_argument('--outdir', required=False, default=None)
parser.add_argument('--dbaddr', required=False, default=None)
args = parser.parse_args()
# parse ssj
logger.info("Parsing ssj500k: {}".format(args.ssj_file))
ssj_parser = Parser(
corpus="ssj",
infiles=[args.ssj_file],
output=args.output,
outdir=args.outdir,
)
res = [x[1]["sid"] for x in ssj_parser.sentence_generator()]
logger.info("Parsed {} sentences (ssj500k)".format(len(res)))
# parse kres
logger.info("Parsing Kres: {}".format(args.ssj_file))
kres_parser = Parser(
corpus="kres",
infiles=[args.kres_folder, args.kres_srl_folder],
output=args.output,
outdir=args.outdir,
)
res = [x[1]["sid"] for x in kres_parser.sentence_generator()]
logger.info("Parsed {} sentences (kres)".format(len(res)))
## Handling output is situational --- implement it outside of Parser.
## Parser returns tuples (orig_file, element)
# 1. parse per-file and output to file (JSON)
# 2. parse and save to DB
# TODO
def handle_output(self, sent_ent, xml_file):
if self.output is None:
pass
if self.output == "file":
outfile = Path(self.outdir) / Path(xml_file.name.split(".")[0]).with_suffix(".json")
with outfile.open("a") as fp:
print(sent_ent)
json.dumps(sent_ent, fp)

View File

@ -1,25 +0,0 @@
from corpusparser import Parser
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Parsing corpora kres and ssj500k.")
parser.add_argument('--kres-folder', required=True)
parser.add_argument('--kres-srl-folder', required=True)
parser.add_argument('--ssj-file', required=True)
args = parser.parse_args()
# parse ssj
"""
ssj_parser = Parser(
corpus="ssj",
infiles=[args.ssj_file]
)
ssj_parser.parse()
"""
# parse kres
kres_parser = Parser(
corpus="kres",
infiles=[args.kres_folder, args.kres_srl_folder]
)
kres_parser.parse()