forked from kristjan/cjvt-valency
commit
4e8447d930
@ -1,3 +1,3 @@
|
|||||||
data/samples/
|
data/samples/
|
||||||
*/__pycache__/
|
|
||||||
*egg-info/
|
*egg-info/
|
||||||
|
*.pyc
|
||||||
|
@ -0,0 +1,6 @@
|
|||||||
|
F0034713.20.1": [{"dep": "7", "arg": "REC", "from": "9"}, {"dep": "10", "arg": "ACT", "from": "9"}, {"dep": "13", "arg": "MWPRED", "from": "12"}, {"dep": "18", "arg": "MANN", "from": "19"}, {"dep": "20", "arg": "LOC", "from": "19"}]
|
||||||
|
|
||||||
|
Sentence:
|
||||||
|
F0034713.20.0
|
||||||
|
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
|
||||||
|
Sodobni ali preprosto neosveščeni potrošnik vse prerad zavrže stvar, ki se je malenkostno pokvarila in bi se jo zlahka dalo popraviti.
|
@ -1 +1 @@
|
|||||||
/home/kristjan/kres_srl/final_json/
|
/home/voje/work_data/final_json
|
@ -0,0 +1 @@
|
|||||||
|
/home/voje/work_data/final_json
|
@ -0,0 +1 @@
|
|||||||
|
/home/kristjan/kres_srl/final_json/
|
@ -0,0 +1,38 @@
|
|||||||
|
# corpusparser
|
||||||
|
A tool for parsing ssj500k and Kres into a unified .json format.
|
||||||
|
|
||||||
|
## Input:
|
||||||
|
### ssj500k
|
||||||
|
To parse ssj500k, point to the monolythic `ssj500k-sl.body.xml` file (tested on ssj500k 2.1).
|
||||||
|
|
||||||
|
### Kres
|
||||||
|
To parse Kres, point to folders:
|
||||||
|
* Kres folder, containig several (around 20K) .xml files (`F00XXXXX.xml.parsed.xml`).
|
||||||
|
* Kres SRL folder, containing SRL links for the corresponding F00...xml files (`F00XXXXX.srl.json`).
|
||||||
|
|
||||||
|
## Internal data format
|
||||||
|
This is the internal python dict data format. It can be stored to file as `.json` or stored into a database for application usage.
|
||||||
|
```python
|
||||||
|
{
|
||||||
|
'sid': 'F0034713.5.0',
|
||||||
|
'text': 'Mednarodni denarni sklad je odobril 30 milijard evrov vredno posojilo Grčiji. ',
|
||||||
|
'tokens': [
|
||||||
|
{'text': 'Mednarodni', 'lemma': 'mednaroden', 'msd': 'Ppnmeid', 'word': True, 'tid': 1},
|
||||||
|
{'text': 'denarni', 'lemma': 'denaren', 'msd': 'Ppnmeid', 'word': True, 'tid': 2},
|
||||||
|
{'text': 'sklad', 'lemma': 'sklad', 'msd': 'Somei', 'word': True, 'tid': 3},
|
||||||
|
{'text': 'je', 'lemma': 'biti', 'msd': 'Gp-ste-n', 'word': True, 'tid': 4},
|
||||||
|
{'text': 'odobril', 'lemma': 'odobriti', 'msd': 'Ggdd-em', 'word': True, 'tid': 5},
|
||||||
|
{'text': '30', 'lemma': '30', 'msd': 'Kag', 'word': True, 'tid': 6},
|
||||||
|
{'text': 'milijard', 'lemma': 'milijarda', 'msd': 'Sozmr', 'word': True, 'tid': 7}, # ...
|
||||||
|
]
|
||||||
|
'jos_links': [
|
||||||
|
{'to': 1, 'from': 3, 'afun': 'dol'},
|
||||||
|
{'to': 2, 'from': 3, 'afun': 'dol'},
|
||||||
|
{'to': 3, 'from': 5, 'afun': 'ena'}, # ...
|
||||||
|
]
|
||||||
|
'srl_links': [
|
||||||
|
{'to': 3, 'from': 5, 'afun': 'ACT'},
|
||||||
|
{'to': 7, 'from': 5, 'afun': 'PAT'}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
@ -1,3 +0,0 @@
|
|||||||
class Sentence():
|
|
||||||
def __init__():
|
|
||||||
print("Sentence __init__(): TODO")
|
|
@ -1,2 +1 @@
|
|||||||
from corpusparser.Parser import Parser
|
from corpusparser.Parser import Parser
|
||||||
from corpusparser.Sentence import Sentence
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,102 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
from corpusparser import Parser
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
## Main handles command line arguments and writing to files / DB.
|
||||||
|
|
||||||
|
def ssj_to_json_file(sentence_generator, outfolder):
|
||||||
|
# this funciton is based on the fact that files are parsed sequentially
|
||||||
|
outfolder = Path(outfolder)
|
||||||
|
outfolder.mkdir(parents=True, exist_ok=True)
|
||||||
|
outfile = outfolder / "ssj500k.json"
|
||||||
|
|
||||||
|
data_buffer = []
|
||||||
|
for s in sentence_generator:
|
||||||
|
sdata = s[1]
|
||||||
|
data_buffer += [sdata]
|
||||||
|
|
||||||
|
# outfile = Path(outfile)
|
||||||
|
with outfile.open("w") as fp:
|
||||||
|
logger.info("Writing to {}".format(outfile))
|
||||||
|
json.dump(data_buffer, fp)
|
||||||
|
|
||||||
|
def kres_to_json_files(sentence_generator, outfolder):
|
||||||
|
outfolder = Path(outfolder) / "kres_json"
|
||||||
|
outfolder.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
def write_buffer_to_file(outfile, outfile_buffer):
|
||||||
|
logger.info("Writing file: {}".format(outfile))
|
||||||
|
with outfile.open("w") as fp:
|
||||||
|
json.dump(outfile_buffer, fp)
|
||||||
|
|
||||||
|
outfile_buffer = None
|
||||||
|
current_outfile = None
|
||||||
|
for s in sentence_generator:
|
||||||
|
infile = s[0]
|
||||||
|
outfile = outfolder / Path(infile.name.split(".")[0]).with_suffix(".json")
|
||||||
|
|
||||||
|
# parser sequentially parses files; when we're done with a file, write it out
|
||||||
|
if current_outfile is None:
|
||||||
|
current_outfile = outfile
|
||||||
|
outfile_buffer = []
|
||||||
|
elif outfile != current_outfile:
|
||||||
|
write_buffer_to_file(current_outfile, outfile_buffer)
|
||||||
|
current_outfile = outfile
|
||||||
|
outfile_buffer = []
|
||||||
|
|
||||||
|
# update buffer
|
||||||
|
sdata = s[1]
|
||||||
|
outfile_buffer += [sdata]
|
||||||
|
write_buffer_to_file(current_outfile, outfile_buffer)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def to_db():
|
||||||
|
return "TODO"
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = argparse.ArgumentParser(description="Parsing corpora kres and ssj500k.")
|
||||||
|
parser.add_argument('--kres-folder', required=True)
|
||||||
|
parser.add_argument('--kres-srl-folder', required=True)
|
||||||
|
parser.add_argument('--ssj-file', required=True)
|
||||||
|
parser.add_argument('--output', required=False, default=None)
|
||||||
|
parser.add_argument('--outdir', required=False, default=None)
|
||||||
|
parser.add_argument('--dbaddr', required=False, default=None)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# parse ssj
|
||||||
|
logger.info("Parsing ssj500k: {}".format(args.ssj_file))
|
||||||
|
ssj_parser = Parser(
|
||||||
|
corpus="ssj",
|
||||||
|
infiles=[args.ssj_file],
|
||||||
|
)
|
||||||
|
# res = [x[1]["sid"] for x in ssj_parser.sentence_generator()]
|
||||||
|
# logger.info("Parsed {} sentences (ssj500k)".format(len(res)))
|
||||||
|
|
||||||
|
# ssj to json
|
||||||
|
ssj_to_json_file(ssj_parser.sentence_generator(), args.outdir)
|
||||||
|
|
||||||
|
# parse kres
|
||||||
|
logger.info("Parsing Kres: {}".format(args.ssj_file))
|
||||||
|
kres_parser = Parser(
|
||||||
|
corpus="kres",
|
||||||
|
infiles=[args.kres_folder, args.kres_srl_folder],
|
||||||
|
)
|
||||||
|
# res = [x[1]["sid"] for x in kres_parser.sentence_generator()]
|
||||||
|
# logger.info("Parsed {} sentences (kres)".format(len(res)))
|
||||||
|
|
||||||
|
# kres to json
|
||||||
|
kres_to_json_files(kres_parser.sentence_generator(), args.outdir)
|
||||||
|
|
||||||
|
|
||||||
|
## Handling output is situational --- implement it outside of Parser.
|
||||||
|
## Parser returns tuples (orig_file, element)
|
||||||
|
# 1. parse per-file and output to file (JSON)
|
||||||
|
# 2. parse and save to DB
|
||||||
|
|
||||||
|
# TODO
|
@ -1,25 +0,0 @@
|
|||||||
from corpusparser import Parser
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
parser = argparse.ArgumentParser(description="Parsing corpora kres and ssj500k.")
|
|
||||||
parser.add_argument('--kres-folder', required=True)
|
|
||||||
parser.add_argument('--kres-srl-folder', required=True)
|
|
||||||
parser.add_argument('--ssj-file', required=True)
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
# parse ssj
|
|
||||||
"""
|
|
||||||
ssj_parser = Parser(
|
|
||||||
corpus="ssj",
|
|
||||||
infiles=[args.ssj_file]
|
|
||||||
)
|
|
||||||
ssj_parser.parse()
|
|
||||||
"""
|
|
||||||
|
|
||||||
# parse kres
|
|
||||||
kres_parser = Parser(
|
|
||||||
corpus="kres",
|
|
||||||
infiles=[args.kres_folder, args.kres_srl_folder]
|
|
||||||
)
|
|
||||||
kres_parser.parse()
|
|
Loading…
Reference in new issue