separate parsing of ssj and kres (mem management)

This commit is contained in:
voje 2019-04-21 19:18:19 +02:00
parent 00d9192993
commit bcc64c767c

View File

@ -9,11 +9,13 @@ import sys
from multiprocessing import Pool from multiprocessing import Pool
import time import time
CORPORA = ["kres", "ssj"]
# logging.basicConfig(filename=Path("/var/tmp/corpusparser.log"), filemode='a', level=logging.INFO) # logging.basicConfig(filename=Path("/var/tmp/corpusparser.log"), filemode='a', level=logging.INFO)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# lfh = logging.FileHandler("/var/tmp/fill-database.log") lfh = logging.FileHandler("/project/logs/fill-database.log")
lfh = logging.StreamHandler(sys.stdout) # lfh = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s") formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
lfh.setFormatter(formatter) lfh.setFormatter(formatter)
logger.addHandler(lfh) logger.addHandler(lfh)
@ -100,31 +102,12 @@ def _get_dbclient(args):
return dbclient return dbclient
# wrap it in a function for better garbage collection
def parse_ssj(args):
logger.info("Parsing Ssj: {}".format(args.ssj_file))
ssj_parser = Parser(logger=logger, corpus="ssj")
ssj_data = ssj_parser.parse_xml_file(Path(args.ssj_file))
if args.output == "file":
ssj_outfile = outdir / "ssj500k.json"
with ssj_outfile.open("w") as fp:
json.dump(ssj_data, fp)
elif args.output == "db":
dbclient = _get_dbclient(args)
valdb = dbclient.valdb
ssj_col = valdb["ssj"]
for sentence in ssj_data:
sentence = _db_preprocess(sentence)
ssj_col.update({"sid": sentence["sid"]}, sentence, upsert=True)
del ssj_parser
del ssj_data
if __name__ == "__main__": if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Parsing corpora kres and ssj500k.") parser = argparse.ArgumentParser(description="Parsing corpora kres and ssj500k.")
parser.add_argument('--kres-folder', required=True) parser.add_argument('--corpus', required=True)
parser.add_argument('--kres-srl-folder', required=True) parser.add_argument('--kres-folder', required=False)
parser.add_argument('--ssj-file', required=True) parser.add_argument('--kres-srl-folder', required=False)
parser.add_argument('--ssj-file', required=False)
parser.add_argument('--output', required=False, default=None) parser.add_argument('--output', required=False, default=None)
parser.add_argument('--outdir', required=False, default=None) parser.add_argument('--outdir', required=False, default=None)
parser.add_argument('--dbaddr', required=False, default=None) parser.add_argument('--dbaddr', required=False, default=None)
@ -133,39 +116,55 @@ if __name__ == "__main__":
parser.add_argument('--cores', required=False, default=1) parser.add_argument('--cores', required=False, default=1)
args = parser.parse_args() args = parser.parse_args()
corpus = args.corpus
assert (corpus in CORPORA), "Wrong corpus name."
outdir = None outdir = None
if args.output == "file": if args.output == "file":
outdir = Path(args.outdir) outdir = Path(args.outdir)
outdir.mkdir(parents=True, exist_ok=True) outdir.mkdir(parents=True, exist_ok=True)
elif args.output == "db": elif args.output == "db":
# Force unique sid
dbclient = _get_dbclient(args) dbclient = _get_dbclient(args)
for corpus in ["kres", "ssj"]: dbclient.valdb[corpus].ensure_index([("sid", pymongo.ASCENDING)])
dbclient.valdb[corpus].ensure_index([("sid", pymongo.ASCENDING)]) dbclient.valdb[corpus].ensure_index([("headwords", pymongo.ASCENDING)])
dbclient.valdb[corpus].ensure_index([("headwords", pymongo.ASCENDING)]) dbclient.valdb[corpus].ensure_index([("functors", pymongo.ASCENDING)])
dbclient.valdb[corpus].ensure_index([("functors", pymongo.ASCENDING)])
# SSJ if corpus == "ssj":
p = Pool(1) logger.info("Parsing Ssj: {}".format(args.ssj_file))
p.map(parse_ssj, [args]) ssj_parser = Parser(logger=logger, corpus="ssj")
ssj_data = ssj_parser.parse_xml_file(Path(args.ssj_file))
if args.output == "file":
ssj_outfile = outdir / "ssj500k.json"
with ssj_outfile.open("w") as fp:
json.dump(ssj_data, fp)
elif args.output == "db":
dbclient = _get_dbclient(args)
valdb = dbclient.valdb
ssj_col = valdb["ssj"]
for sentence in ssj_data:
sentence = _db_preprocess(sentence)
ssj_col.update({"sid": sentence["sid"]}, sentence, upsert=True)
time.sleep(30)
# Kres if corpus == "kres":
logger.info("Parsing Kres: {}".format(args.kres_folder)) # Kres
kres_parser = Parser( logger.info("Parsing Kres: {}".format(args.kres_folder))
logger=logger, kres_parser = Parser(
corpus="kres", logger=logger,
kres_srl_folder=args.kres_srl_folder corpus="kres",
) kres_srl_folder=args.kres_srl_folder
)
# [(idx, filepath)] # [(idx, filepath)]
kres_files = [x for x in Path(args.kres_folder).iterdir()] kres_files = [x for x in Path(args.kres_folder).iterdir()]
kres_files = [x for x in enumerate(kres_files)] kres_files = [x for x in enumerate(kres_files)]
n_kres_files = len(kres_files) n_kres_files = len(kres_files)
p = Pool(int(args.cores)) p = Pool(int(args.cores))
p.map(_handle_kres_file_tpl, kres_files) p.map(_handle_kres_file_tpl, kres_files)
logger.info("Finished parsing.") logger.info("Finished parsing.")