Compare commits
10 Commits
mt-dont-de
...
master
Author | SHA1 | Date | |
---|---|---|---|
300f90822c | |||
|
37980d56e5 | ||
|
0b2bf53a6f | ||
c7a405fc46 | |||
4812c18a1d | |||
197cc6199c | |||
19d3e38dcb | |||
|
76c80baa09 | ||
8802a09c9c | |||
|
04c8bc1471 |
|
@ -57,17 +57,12 @@ def export_entry(entry):
|
|||
lexunit.appendChild(lexeme)
|
||||
head.appendChild(lexunit)
|
||||
|
||||
# Example of keeping original xml and adding changes to it only
|
||||
grammar_category = entry.original_xml.querySelector("head grammar category")
|
||||
if grammar_category is None:
|
||||
grammar = doc.createElement("grammar")
|
||||
grammar_category = doc.createElement("category")
|
||||
grammar.appendChild(grammar_category)
|
||||
entry.original_xml.querySelector("head").appendChild(grammar_category)
|
||||
|
||||
grammar = doc.createElement("grammar")
|
||||
grammar_category = doc.createElement("category")
|
||||
grammar_category.textContent = entry.grammar
|
||||
|
||||
head.appendChild(entry.original_xml.querySelector("head grammar"))
|
||||
grammar.appendChild(grammar_category)
|
||||
head.appendChild(grammar)
|
||||
|
||||
|
||||
if len(entry.measure) > 0:
|
||||
|
|
|
@ -24,32 +24,33 @@ def build_structure_conversions():
|
|||
if line[1] == "struktura":
|
||||
continue
|
||||
|
||||
vto_structure = line[1].strip().split(">")[1].split("<")[0]
|
||||
vto_name = line[2].strip()
|
||||
vto_id = line[4].strip()
|
||||
vto_id = line[6].strip()
|
||||
|
||||
if 0 in (len(vto_name), len(vto_id)):
|
||||
continue
|
||||
|
||||
vfrom = "^" + line[0].replace("?", "\?").replace("%s", "([a-zA-Z螚ȎŠ-]+)") + "$"
|
||||
structure_conversions.append((__new__(RegExp(vfrom, 'u')), vto_name, vto_id))
|
||||
structure_conversions.append((__new__(RegExp(vfrom, 'u')), vto_name, vto_structure, vto_id))
|
||||
|
||||
|
||||
def convert_structure(structure, type):
|
||||
if structure_conversions is None:
|
||||
build_structure_conversions()
|
||||
|
||||
for vfrom, vto_name, vto_id in structure_conversions:
|
||||
for vfrom, vto_name, vto_structure, vto_id in structure_conversions:
|
||||
match = structure.match(vfrom)
|
||||
# fix for ids 65, 66, 67 which instead matched with 64
|
||||
if match and vto_id == '64' and '-s' in type:
|
||||
# fix for ids 106, 107, 44 which instead matched with 30
|
||||
if match and vto_id == '30' and '-s' in type:
|
||||
vto_name = 's0-vp-s0'
|
||||
vto_id = '65'
|
||||
elif match and vto_id == '64' and '-g' in type:
|
||||
vto_id = '106'
|
||||
elif match and vto_id == '30' and '-g' in type:
|
||||
vto_name = 'gg-vp-gg'
|
||||
vto_id = '66'
|
||||
elif match and vto_id == '64' and '-r' in type:
|
||||
vto_id = '107'
|
||||
elif match and vto_id == '30' and '-r' in type:
|
||||
vto_name = 'r-vp-r'
|
||||
vto_id = '67'
|
||||
vto_id = '44'
|
||||
|
||||
if match:
|
||||
# we need to remove replace alias here as we want to use javascript's one
|
||||
|
@ -57,7 +58,7 @@ def convert_structure(structure, type):
|
|||
result = structure.replace(vfrom, vto_name).strip()
|
||||
__pragma__('alias', 'replace', "py_replace")
|
||||
|
||||
return result, vto_id
|
||||
return result, vto_structure, vto_id
|
||||
|
||||
window.console.log("Unknown structure: ", structure)
|
||||
return 'N/A', '/'
|
||||
|
|
|
@ -59,7 +59,8 @@ class SkeCollocation:
|
|||
def __init__(self, data):
|
||||
self.word = data.word
|
||||
self.frequency = data.count
|
||||
self.structure_name, self.structure_id = convert_structure(data.gramrel, data.lempos)
|
||||
self.gramrel = data.gramrel
|
||||
self.structure_name, self.structure, self.structure_id = convert_structure(data.gramrel, data.lempos)
|
||||
|
||||
self.other = {"score": data.score, "cm": data.cm}
|
||||
|
||||
|
@ -102,13 +103,28 @@ def match_gf2_examples(data, *args):
|
|||
xhr.send(to_send)
|
||||
|
||||
|
||||
def make_cql_query(ske_index, search_term, pos):
|
||||
cql_pos= {
|
||||
"samostalnik": ("S.*", "-s"),
|
||||
"glagol": ("G.*", "-g"),
|
||||
"pridevnik": ("P.*", "-p"),
|
||||
"prislov": ("R.*", "-r"),
|
||||
"zaimek": ("Z.*", "-z")
|
||||
}
|
||||
|
||||
if ske_index == 0:
|
||||
return "[ lemma=\"{0}\" & tag=\"{1}\" ]".format(search_term, cql_pos[pos][0])
|
||||
else:
|
||||
return search_term + cql_pos[pos][1]
|
||||
|
||||
|
||||
class SkeModal(ClickMessage):
|
||||
def on_event(self, event):
|
||||
# event could be data if this is the return from external library
|
||||
if type(event) in [list, int]:
|
||||
self.add_arg(event)
|
||||
else:
|
||||
if len(self._args) < 4:
|
||||
if len(self._args) < 5:
|
||||
self.add_arg(None)
|
||||
super().on_event(event)
|
||||
|
||||
|
@ -116,18 +132,20 @@ class SkeModal(ClickMessage):
|
|||
page_num = self.get_arg(0, int)
|
||||
search_term = self.get_arg(1, str)
|
||||
ske_index = self.get_arg(2, int)
|
||||
ske_pos_query = self.get_arg(3, str)
|
||||
ske_lookup = model.ske.url_for_kind_index(ske_index)
|
||||
|
||||
next_message = msg(SkeModal, page_num, search_term, ske_index)
|
||||
next_message = msg(SkeModal, page_num, search_term, ske_index, ske_pos_query)
|
||||
|
||||
# could be none if empty
|
||||
data = self.get_arg(3)
|
||||
data = self.get_arg(4)
|
||||
|
||||
if data is None:
|
||||
params = {"additional_refs": "s.id,p.id",
|
||||
"page_num": page_num,
|
||||
"error_callback": next_message,
|
||||
"data_parser": get_parser(ske_index)}
|
||||
"data_parser": get_parser(ske_index),
|
||||
"querytype": ske_pos_query}
|
||||
|
||||
gdex = get_preference("ske_gdex")
|
||||
if gdex:
|
||||
|
@ -136,15 +154,36 @@ class SkeModal(ClickMessage):
|
|||
return
|
||||
params["gdex"] = gdex
|
||||
|
||||
# enable CQL query
|
||||
if ske_pos_query is not "simple":
|
||||
search_term_old = search_term
|
||||
search_term = make_cql_query(ske_index, search_term, ske_pos_query)
|
||||
|
||||
model.ske.request(search_term, next_message, ske_lookup, params)
|
||||
search_term = search_term_old
|
||||
|
||||
elif type(data) is list:
|
||||
window.console.log(data)
|
||||
# check if gf2 examples are loaded or not
|
||||
if not data[0].gf2_check and type(data[0]) is SkeExample:
|
||||
# we get the data, we have to match it with available data on our gf2 examples API
|
||||
match_gf2_examples(data, page_num, search_term, ske_index)
|
||||
|
||||
match_gf2_examples(data, page_num, search_term, ske_index, ske_pos_query)
|
||||
elif type(data[0]) is SkeCollocation:
|
||||
# filtering, grouping and sorting data
|
||||
data.sort(key= lambda x: float(x.other["score"]), reverse=True)
|
||||
_data = []
|
||||
while len(data) > 0:
|
||||
max_item = data.pop(0) # max(data, key= lambda x: x.other["score"])
|
||||
_data.append(max_item)
|
||||
for item in data:
|
||||
if "N/A" in item.structure_name:
|
||||
data.remove(item)
|
||||
elif item.structure_name.strip() == max_item.structure_name.strip():
|
||||
_data.append(item)
|
||||
for delete_item in _data:
|
||||
if delete_item in data:
|
||||
data.remove(delete_item)
|
||||
data = _data
|
||||
model.modal_set(lambda: modals.ske_list(
|
||||
search_term, data, page_num, model.entry.senses, model.ske.request_kinds))
|
||||
|
||||
|
@ -155,7 +194,7 @@ class SkeModal(ClickMessage):
|
|||
class SkeModalGf2Update(SkeModal):
|
||||
def on_event(self, event):
|
||||
response_data = window.JSON.parse(event.target.response)
|
||||
data = self.get_arg(3)
|
||||
data = self.get_arg(4)
|
||||
|
||||
data_dict = {}
|
||||
for example in data:
|
||||
|
@ -191,6 +230,7 @@ class SearchInSkeModal(SkeModal):
|
|||
self.add_arg(int(document.getElementById("ske-page-num").value))
|
||||
self.add_arg(document.getElementById("ske-search").value)
|
||||
self.add_arg(document.getElementById("ske-select").selectedIndex)
|
||||
self.add_arg(document.getElementById("ske-pos-query").value)
|
||||
super().on_event(event)
|
||||
|
||||
|
||||
|
@ -238,7 +278,7 @@ class SkeInsert(DataChgClickMessage):
|
|||
|
||||
lex_mid = ComponentLexeme()
|
||||
lex_mid.text = example["mid"]
|
||||
lex_mid.role = "collocation"
|
||||
lex_mid.role = "headword"
|
||||
|
||||
lex_right = ComponentLexeme()
|
||||
lex_right.text = example["right"]
|
||||
|
@ -257,17 +297,45 @@ class SkeInsert(DataChgClickMessage):
|
|||
new_collocation.inner.other_attributes["frequency"] = example.frequency
|
||||
new_collocation.inner.type = "collocation"
|
||||
|
||||
lex_left = ComponentLexeme()
|
||||
lex_left.text = ""
|
||||
lex_left.role = None
|
||||
headword = document.getElementById("ske-search").value
|
||||
lexemes = []
|
||||
structure_name = example.structure_name.split("-")
|
||||
gramrel = example.gramrel.split("_")
|
||||
structure = example.structure.split(" ")
|
||||
structure.append("") # Bad fix: we have to add something for structure l-gg-ggn
|
||||
|
||||
lex_mid = ComponentLexeme()
|
||||
lex_mid.text = example.word
|
||||
lex_mid.role = "collocation"
|
||||
for i in range(len(structure_name)):
|
||||
lex = ComponentLexeme()
|
||||
structure[i] = structure[i].replace("Inf-", "")
|
||||
|
||||
lex_right = ComponentLexeme()
|
||||
lex_right.text = ""
|
||||
lex_right.role = None
|
||||
# take care of negations "ne"
|
||||
if "Neg-" in structure[i]:
|
||||
structure[i] = structure[i].replace("Neg-", "")
|
||||
negation_flag = True
|
||||
n_lex = ComponentLexeme()
|
||||
n_lex.text = "ne"
|
||||
n_lex.role = "other"
|
||||
lexemes.append(n_lex)
|
||||
|
||||
new_collocation.components.extend([lex_left, lex_mid, lex_right])
|
||||
if structure[i] is "":
|
||||
continue # skipping bcs of fix
|
||||
elif "Vez-gbz" in structure[i]:
|
||||
lex.text = "je"
|
||||
lex.role = "other"
|
||||
elif structure_name[i] in ["d", "vd", "zp"]:
|
||||
lex.text = gramrel[i]
|
||||
lex.text = lex.text.replace("-d", "").replace("%", "")
|
||||
lex.role = "other"
|
||||
elif structure_name[i] is "vp":
|
||||
lex.text = structure[i]
|
||||
lex.role = "other"
|
||||
elif structure[i][0] in ["S", "G", "P", "R"]:
|
||||
lex.text = headword
|
||||
lex.role = "headword"
|
||||
else:
|
||||
lex.text = example.word
|
||||
lex.role = "collocate"
|
||||
lexemes.append(lex)
|
||||
|
||||
new_collocation.components.extend(lexemes)
|
||||
return new_collocation
|
||||
|
|
|
@ -64,14 +64,18 @@ def edit_example(example, sense):
|
|||
result.append(h("span.example-component-button.example-component-none",
|
||||
{"on": {"click": role_msg(idx, "none")}}, "N"))
|
||||
|
||||
result.extend([
|
||||
h("span.example-component-button",
|
||||
{"on": {"click": message.msg(message.ExampleComponentAdd, example_original, idx)}}, "+"),
|
||||
h("span.example-component-button",
|
||||
{"on": {"click": message.msg(message.ExampleComponentRemove, example_original, idx)}}, "-")])
|
||||
if "-" not in example.inner.other_attributes["structureName"]:
|
||||
result.extend([
|
||||
h("span.example-component-button",
|
||||
{"on": {"click": message.msg(message.ExampleComponentAdd, example_original, idx)}}, "+"),
|
||||
h("span.example-component-button",
|
||||
{"on": {"click": message.msg(message.ExampleComponentRemove, example_original, idx)}}, "-")])
|
||||
|
||||
return result
|
||||
|
||||
divs.append(h("div.flex.five.example-component", {}, [h("div.one-fifth", {}, "Struktura:"),
|
||||
h("div.three-fifth", {}, example.inner.other_attributes["structureName"])]))
|
||||
|
||||
for idx, component in enumerate(example.components):
|
||||
role_txt = component.role if component.role is not None else "none"
|
||||
color_class = ".example-component-" + role_txt
|
||||
|
@ -177,6 +181,10 @@ def ske_list(search_term, data, page_num, senses, ske_kinds):
|
|||
"type": "number",
|
||||
"min": 1,
|
||||
"step": 1}}, "")]),
|
||||
h("label.fourth.ske-mid-input", {}, [
|
||||
h("select#ske-pos-query", {}, [h("option", {}, "{}".format(pos)) for pos in ["simple", "samostalnik", "glagol", "pridevnik", "prislov", "zaimek"]])
|
||||
]),
|
||||
|
||||
h("span.fourth.button.ske-right-button",
|
||||
{"on": {"click": message.msg(message.SearchInSkeModal)}}, "Isci")]),
|
||||
h("div.ske-list",
|
||||
|
|
|
@ -58,7 +58,7 @@ class View:
|
|||
def view_ske_button(model):
|
||||
return h(
|
||||
"span#ske-button.button.toggle",
|
||||
{ "on": {"click": msg(ShowSkeModal, 1, model.entry.headword, 0)} },
|
||||
{ "on": {"click": msg(ShowSkeModal, 1, model.entry.headword, 0, "simple")} },
|
||||
h("svg#ske-img", {
|
||||
"attrs": {
|
||||
"xmlns": "http://www.w3.org/2000/svg",
|
||||
|
|
Loading…
Reference in New Issue
Block a user