parent
d5f5533139
commit
bb0f2f3b3e
@ -1 +1,36 @@
|
||||
# cjvt-vezljivost
|
||||
|
||||
## Components
|
||||
|
||||
|
||||
### Database (2 containers)
|
||||
Input:
|
||||
|
||||
* ssj500k.xml
|
||||
* kres.xml
|
||||
* kres_SRL.json
|
||||
|
||||
Intermediate:
|
||||
|
||||
* ssj500k.json
|
||||
* kres.json
|
||||
|
||||
Output:
|
||||
|
||||
* DB containing both corpora (1:1 map of the intermediate json formats)
|
||||
|
||||
|
||||
### Flask backend (1 container)
|
||||
Input: see Database
|
||||
|
||||
API endpoints:
|
||||
|
||||
* GET word list (pre-cached)
|
||||
* GET reduced frames (pre-cached)
|
||||
* POST senses
|
||||
* User auth logic
|
||||
|
||||
|
||||
### Vue frontend (1 container)
|
||||
|
||||
* ngnix server
|
||||
|
@ -0,0 +1,230 @@
|
||||
# Izdelava vezljivostnih vzorcev za slovenske glagole #
|
||||
|
||||
Študent: Kristjan Voje
|
||||
|
||||
Mentor: prof. Marko Robnik Šikonja
|
||||
Somentorica: dr. Apolonija Gantar
|
||||
|
||||
### Requirements:
|
||||
|
||||
* Linux (built and tested on Ubuntu 16.04),
|
||||
* python3
|
||||
|
||||
## Quick workspace preparation
|
||||
```bash
|
||||
# Clone the repo.
|
||||
$ git clone https://voje@bitbucket.org/voje/diploma.git
|
||||
|
||||
# Prepare the data.
|
||||
$ cd ./data
|
||||
$ unzip data.zip
|
||||
```
|
||||
|
||||
Virtualenv recommended. `$ sudo pip3 install virtualenv`.
|
||||
```bash
|
||||
$ cd ./script
|
||||
$ virtualenv -p /usr/bin/python3 venv
|
||||
|
||||
# A folder venv with python libraries will appear.
|
||||
# To activate the virtual environment:
|
||||
$ source ./venv/bin/activate
|
||||
|
||||
# To deactivate:
|
||||
$ deactivate
|
||||
```
|
||||
|
||||
Now, to build the python packages:
|
||||
```bash
|
||||
# Polyglot dependencies
|
||||
$ sudo apt-get install libicu-dev
|
||||
# NOTE: installing this broke my Arch system.
|
||||
# If on Arch, use the AUR version (it's supposed to be safe).
|
||||
|
||||
$ cd ./script
|
||||
$ pip3 install -e .
|
||||
|
||||
# Polyglot downloads
|
||||
$ polyglot download morph2.sl
|
||||
```
|
||||
|
||||
To test the installation, fire up a python3 shell (while in virtualenv) and:
|
||||
```python
|
||||
import valency
|
||||
```
|
||||
|
||||
## Nodejs environment
|
||||
There's a compiled front end client in the git repo.
|
||||
In case you want to change anything, you'll need to set up the development environment:
|
||||
|
||||
* install nodejs (default Ubuntu versions are usually behind),
|
||||
* check `$ npm -v`, if you don't have it, install npm,
|
||||
```
|
||||
$ npm install vue-cli
|
||||
$ cd ./vue_frontend
|
||||
$ npm install
|
||||
$ npm run dev # for development
|
||||
$ npm run build # for production
|
||||
```
|
||||
You will also need to change some path variables if you'll want to access the backend api from vue development server.
|
||||
See chapter "Web app deployment".
|
||||
|
||||
## MongoDB
|
||||
### Set up the database on the system
|
||||
[install MongoDB on Linux](https://docs.mongodb.com/manual/tutorial/install-mongodb-on-ubuntu/)
|
||||
|
||||
> The MongoDB instance stores its data files in /var/lib/mongodb and its log files in /var/log/mongodb by default, and runs using the mongodb user account. You can specify alternate log and data file directories in /etc/mongod.conf. See systemLog.path and storage.dbPath for additional information.
|
||||
>
|
||||
> If you change the user that runs the MongoDB process, you must modify the access control rights to the /var/lib/mongodb and /var/log/mongodb directories to give this user access to these directories.
|
||||
|
||||
#### Check if it's working.
|
||||
```bash
|
||||
$ sudo service mongod start / stop / restart
|
||||
$ tail -n 30 /var/log/mongodb/mongod.log
|
||||
|
||||
# If you want the mongo shell on local machine:
|
||||
$ mongo --host 127.0.0.1:<27017> # Check net.port in /etc/mongod.conf
|
||||
```
|
||||
|
||||
#### Security
|
||||
Create admin and user. Localhost only.
|
||||
[sedurity man](https://docs.mongodb.com/v3.4/tutorial/enable-authentication/)
|
||||
Basically, create an admin user then user that admin to create a normal user.
|
||||
Something like this:
|
||||
```
|
||||
use admin
|
||||
|
||||
db.createUser(
|
||||
{
|
||||
user: "admin_name",
|
||||
pwd: "admin_pass",
|
||||
roles: [ { role: "userAdminAnyDatabase", db: "admin" } ]
|
||||
}
|
||||
)
|
||||
|
||||
# Restart mongod.
|
||||
|
||||
use texts
|
||||
|
||||
db.createUser(
|
||||
{
|
||||
user: "user_name",
|
||||
pwd: "user_pass",
|
||||
roles: [ { role: "readWrite", db: "texts" } ]
|
||||
}
|
||||
)
|
||||
|
||||
# Restart mongod.
|
||||
|
||||
# Also useful
|
||||
db.dropUser("username")
|
||||
```
|
||||
|
||||
Then go to `/etc/mongod.conf` and add this:
|
||||
Do not use tabs! mongod won't start.
|
||||
```bash
|
||||
security:
|
||||
authorization: enabled
|
||||
```
|
||||
You need to `use <db>`, `db.auth("username", "pass")` to have access.
|
||||
|
||||
#### DB migration
|
||||
I installed mongo on remote with same user accounts as on local.
|
||||
```bash
|
||||
# On local.
|
||||
$ mongodump --db texts --host mongodb1.example.net --port 3017 --username user --password "pass" --out ./file.db
|
||||
|
||||
# Rsync it over to remote.
|
||||
# On remote.
|
||||
# Need to turn off authorization for the drop part.
|
||||
|
||||
$ mongorestore --db texts --drop --port <port> ./<dbfolder>
|
||||
```
|
||||
|
||||
#### DB collections, needed for web app
|
||||
|
||||
* v2_senses,
|
||||
* v2_senses_map,
|
||||
* v2_users,
|
||||
* v2_user_tokens
|
||||
* sskj
|
||||
|
||||
|
||||
## Web app deployment
|
||||
We need to set flask to serve the vuejs frontend:
|
||||
```python
|
||||
# in ./script/flask_app/app.py
|
||||
app = Flask(
|
||||
__name__,
|
||||
static_folder="./vue/dist/static",
|
||||
template_folder="./vue/dist"
|
||||
)
|
||||
```
|
||||
Set the correct CORS options.
|
||||
|
||||
Might need to reinstall the package. Make sure you're in python virtualenv.
|
||||
`$ pip3 install -e .`
|
||||
|
||||
We also need to tell vuejs client to look for the api on server's address.
|
||||
```javascript
|
||||
// in script/vue_frontend/src/main.js
|
||||
api_addr: "http://<my_server_addr>:5004",
|
||||
```
|
||||
Compile frontend client:
|
||||
```bash
|
||||
$ cd ./script/vue_frontend
|
||||
$ npm run build
|
||||
$ rm ../flask_app/vue -rf
|
||||
$ cp ./dist ../flask_app/vue -r
|
||||
```
|
||||
|
||||
After preparing frontend, backend and database, run the command:
|
||||
```bash
|
||||
$ cd ./script
|
||||
$ ./autostart.sh
|
||||
```
|
||||
|
||||
## When fixing things in dev environment:
|
||||
```bash
|
||||
## Backend
|
||||
# git clone everything
|
||||
$ cd script
|
||||
|
||||
# unzip some zipped files
|
||||
$ ./sherpa --unpack
|
||||
|
||||
# virtualenv
|
||||
$ source venv/bin/activate
|
||||
$ pip3 install -e .
|
||||
|
||||
# start a mongod instance
|
||||
$ sudo systemctl start mongod
|
||||
|
||||
# start the backend server
|
||||
$ ./autostart.sh --debug
|
||||
|
||||
# you can watch the logs with
|
||||
$ tail -f log/main.log
|
||||
|
||||
|
||||
## Frontend
|
||||
# set the right config file (path to api)
|
||||
$ ./sherpa --frontend_config dev
|
||||
$ cd vue_frontend
|
||||
$ npm run dev
|
||||
|
||||
|
||||
## Done making changes
|
||||
# build frontend for production with the right config files
|
||||
$ ./sherpa --build_vue
|
||||
|
||||
# if you've made changes to static .pickle files
|
||||
$ ./sherpa --pack
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -0,0 +1,68 @@
|
||||
#!/bin/bash
|
||||
|
||||
# prepare args for app.py
|
||||
datapath="../data"
|
||||
logpath="./log"
|
||||
|
||||
function echo_help() {
|
||||
echo "arguments:
|
||||
--help
|
||||
--rm_pickles (removes .pickle files from ./data/tmp_pickles)
|
||||
--prepare_for_shipment (build frontend, zip some data)
|
||||
--reload_sskj_senses (removes author: SSKJ from db.v2_senses
|
||||
and inserts senses from ./data/no_del_pickles/sskj_sense.pickle)
|
||||
--debug"
|
||||
}
|
||||
|
||||
function rm_pickles () {
|
||||
pickles_path="${datapath}/tmp_pickles"
|
||||
nfiles=$(ls "${pickles_path}" | grep ".pickle" | wc -l)
|
||||
if [[ $nfiles -gt 0 ]]; then
|
||||
echo "Removing:"
|
||||
ls "${pickles_path}"/*.pickle
|
||||
rm "${pickles_path}"/*.pickle
|
||||
else
|
||||
echo "No .pickle files to remove."
|
||||
fi
|
||||
}
|
||||
|
||||
function prepare_for_shipment () {
|
||||
./sherpa.sh --pack
|
||||
./sherpa.sh --build_vue
|
||||
}
|
||||
|
||||
args=""
|
||||
# Parse arguments
|
||||
while [[ "$#" -gt 0 ]]; do
|
||||
key="$1"
|
||||
case "$key" in
|
||||
--help)
|
||||
echo_help
|
||||
exit 0
|
||||
;;
|
||||
--rm_pickles)
|
||||
rm_pickles
|
||||
exit 0
|
||||
;;
|
||||
--prepare_for_shipment)
|
||||
prepare_for_shipment
|
||||
exit 0
|
||||
;;
|
||||
--reload_sskj_senses)
|
||||
args="$args --reload_sskj_senses"
|
||||
shift
|
||||
;;
|
||||
--debug)
|
||||
args="$args --debug"
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
echo "Unknown argument: $key"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
|
||||
args="${args} --datapath=${datapath} --logpath=${logpath}"
|
||||
python3 ./flask_app/app.py "$args"
|
@ -0,0 +1,473 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from flask import Flask, render_template, request, url_for, redirect
|
||||
|
||||
from valency import k_utils
|
||||
from valency.ssj_struct import *
|
||||
from valency.val_struct import *
|
||||
from valency.reduce_functions import *
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import json
|
||||
from flask_cors import CORS
|
||||
import hashlib
|
||||
import uuid
|
||||
import datetime
|
||||
import string
|
||||
import random
|
||||
import smtplib
|
||||
from email.mime.text import MIMEText
|
||||
from copy import deepcopy as DC
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
PORT = 5004
|
||||
args = []
|
||||
|
||||
|
||||
def get_arg(argname):
|
||||
for arg in args:
|
||||
if "--{}".format(argname) in arg:
|
||||
spl = arg.split("=")
|
||||
if len(spl) == 2:
|
||||
return spl[1]
|
||||
else:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
vallex = None
|
||||
# app = Flask(__name__)
|
||||
|
||||
# v2 (serving vuejs frontend)
|
||||
# change api path in vue to localhost:5004
|
||||
app = Flask(
|
||||
__name__,
|
||||
static_folder="./vue/dist/static",
|
||||
template_folder="./vue/dist"
|
||||
)
|
||||
|
||||
# when running vuejs via webpack
|
||||
# CORS(app)
|
||||
CORS(app, resources={r"/api/*": {
|
||||
"origins": "*",
|
||||
}})
|
||||
|
||||
|
||||
# for testing functions
|
||||
@app.route("/test_dev")
|
||||
def test_dev():
|
||||
ret = vallex.test_dev()
|
||||
return(str(ret) or "edit val_struct.py: test_dev()")
|
||||
|
||||
|
||||
@app.route("/")
|
||||
def index():
|
||||
return(render_template("index.html"))
|
||||
|
||||
|
||||
@app.route("/home", defaults={"pathname": ""})
|
||||
@app.route("/home/<path:pathname>")
|
||||
def home(pathname):
|
||||
return redirect(url_for("index"), code=302)
|
||||
|
||||
|
||||
@app.route("/api/words")
|
||||
def api_words():
|
||||
return json.dumps({
|
||||
"sorted_words": vallex.sorted_words,
|
||||
"has_se": vallex.has_se
|
||||
})
|
||||
|
||||
|
||||
@app.route("/api/functors")
|
||||
def api_functors():
|
||||
res = []
|
||||
for key in sorted(vallex.functors_index.keys()):
|
||||
res.append((key, len(vallex.functors_index[key])))
|
||||
return json.dumps(res)
|
||||
|
||||
|
||||
@app.route("/api/register", methods=["POST"])
|
||||
def api_register():
|
||||
USERS_COLL = "v2_users"
|
||||
b = request.get_data()
|
||||
data = json.loads(b.decode())
|
||||
username = data["username"]
|
||||
password = data["password"]
|
||||
email = data["email"]
|
||||
if (
|
||||
username == "" or
|
||||
password == "" or
|
||||
email == ""
|
||||
):
|
||||
return "ERR"
|
||||
existing = list(vallex.db[USERS_COLL].find({
|
||||
"$or": [{"username": username}, {"email": email}]
|
||||
}))
|
||||
if len(existing) > 0:
|
||||
return "ERR: Username or email already exists."
|
||||
entry = {
|
||||
"username": username,
|
||||
"hpass": hashlib.sha256(
|
||||
password.encode("utf-8")).hexdigest(),
|
||||
"email": hashlib.sha256(
|
||||
email.encode("utf-8")).hexdigest()
|
||||
}
|
||||
vallex.db[USERS_COLL].insert(entry)
|
||||
return "OK"
|
||||
|
||||
|
||||
@app.route("/api/login", methods=["POST"])
|
||||
def api_login():
|
||||
USERS_COLL = "v2_users"
|
||||
TOKENS_COLL = "v2_user_tokens"
|
||||
b = request.get_data()
|
||||
data = json.loads(b.decode())
|
||||
username = data["username"]
|
||||
password = data["password"]
|
||||
hpass = hashlib.sha256(password.encode("utf-8")).hexdigest()
|
||||
|
||||
db_user = list(vallex.db[USERS_COLL].find({
|
||||
"username": username,
|
||||
"hpass": hpass
|
||||
}))
|
||||
if len(db_user) == 0:
|
||||
return json.dumps({"token": None})
|
||||
|
||||
# update or create token
|
||||
token = uuid.uuid4().hex
|
||||
token_entry = {
|
||||
"username": username,
|
||||
"date": datetime.datetime.utcnow(),
|
||||
"token": token
|
||||
}
|
||||
vallex.db[TOKENS_COLL].update(
|
||||
{"username": token_entry["username"]},
|
||||
token_entry,
|
||||
upsert=True
|
||||
)
|
||||
return json.dumps({"token": token})
|
||||
|
||||
|
||||
def send_new_pass_mail(recipient, new_pass):
|
||||
# dtime = str(datetime.datetime.now())
|
||||
SENDER = "valencaglagolov@gmail.com"
|
||||
msg = MIMEText(
|
||||
"Pošiljamo vam novo geslo za "
|
||||
"vstop v aplikacijo Vezljivostni vzorci slovenskih glagolov.\n"
|
||||
"Geslo: {}.".format(new_pass)
|
||||
)
|
||||
msg["Subject"] = "Pozabljeno geslo"
|
||||
msg["From"] = SENDER
|
||||
msg["To"] = recipient
|
||||
|
||||
try:
|
||||
server = smtplib.SMTP("smtp.gmail.com", 587)
|
||||
server.ehlo()
|
||||
server.starttls()
|
||||
server.login(
|
||||
SENDER,
|
||||
"rapid limb soapy fermi"
|
||||
)
|
||||
server.sendmail(SENDER, [recipient], msg.as_string())
|
||||
server.close()
|
||||
log.info("Sent new password.")
|
||||
except Error as e:
|
||||
log.error("Sending new password failed")
|
||||
log.error(e)
|
||||
|
||||
|
||||
@app.route("/api/new_pass", methods=["POST"])
|
||||
def api_new_pass():
|
||||
b = request.get_data()
|
||||
data = json.loads(b.decode())
|
||||
username = data["username"]
|
||||
email = data["email"]
|
||||
hemail = hashlib.sha256(email.encode("utf-8")).hexdigest()
|
||||
db_res = list(vallex.db.v2_users.find({
|
||||
"username": username,
|
||||
"email": hemail
|
||||
}))
|
||||
# check if user is valid
|
||||
if len(db_res) == 0:
|
||||
return json.dumps({"confirmation": False})
|
||||
# create a new password
|
||||
new_pass = "".join([random.choice(
|
||||
string.ascii_letters + string.digits) for i in range(10)])
|
||||
# update locally
|
||||
hpass = hashlib.sha256(new_pass.encode("utf-8")).hexdigest()
|
||||
vallex.db.v2_users.update(
|
||||
{
|
||||
"username": username,
|
||||
"email": hemail
|
||||
},
|
||||
{"$set": {
|
||||
"hpass": hpass
|
||||
}}
|
||||
)
|
||||
# send via mail
|
||||
send_new_pass_mail(email, new_pass)
|
||||
return json.dumps({"confirmation": True})
|
||||
|
||||
|
||||
def prepare_frames(ret_frames):
|
||||
# append sentences
|
||||
for frame in ret_frames:
|
||||
frame.sentences = []
|
||||
unique_sids = {".".join(x.split(".")[:-1]): x for x in frame.tids}
|
||||
log.debug(str(unique_sids))
|
||||
frame.sentences = []
|
||||
frame.aggr_sent = {}
|
||||
for sid, tid in unique_sids.items():
|
||||
hwl = vallex.get_token(tid)["lemma"]
|
||||
tmp_idx = len(frame.sentences)
|
||||
if hwl not in frame.aggr_sent:
|
||||
frame.aggr_sent[hwl] = []
|
||||
frame.aggr_sent[hwl].append(tmp_idx)
|
||||
frame.sentences.append(
|
||||
vallex.get_tokenized_sentence(tid)
|
||||
)
|
||||
# return (n-frames, rendered template)
|
||||
# json frames
|
||||
json_ret = {"frames": []}
|
||||
for frame in ret_frames:
|
||||
json_ret["frames"].append(DC(frame.to_json()))
|
||||
return json.dumps(json_ret)
|
||||
|
||||
|
||||
@app.route("/api/frames")
|
||||
def api_get_frames():
|
||||
hw = request.args.get("hw")
|
||||
if hw is None:
|
||||
return json.dumps({"error": "Headword not found."})
|
||||
|
||||
rf_name = request.args.get("rf", "reduce_0") # 2nd is default
|
||||
RF = reduce_functions[rf_name]["f"]
|
||||
entry = vallex.entries[hw]
|
||||
ret_frames = RF(entry.raw_frames, vallex)
|
||||
return prepare_frames(ret_frames)
|
||||
|
||||
|
||||
@app.route("/api/functor-frames")
|
||||
def api_get_functor_frames():
|
||||
functor = request.args.get("functor")
|
||||
if functor is None:
|
||||
return json.dumps({"error": "Missing argument: functor."})
|
||||
rf_name = request.args.get("rf", "reduce_0") # 2nd is default
|
||||
RF = reduce_functions[rf_name]["f"]
|
||||
raw_frames = vallex.functors_index[functor]
|
||||
ret_frames = RF(raw_frames, vallex)
|
||||
return prepare_frames(ret_frames)
|
||||
|
||||
|
||||
def token_to_username(token):
|
||||
COLLNAME = "v2_user_tokens"
|
||||
key = {
|
||||
"token": token
|
||||
}
|
||||
res = list(vallex.db[COLLNAME].find(key))
|
||||
if len(res) != 1:
|
||||
return None
|
||||
username = res[0]["username"]
|
||||
# update deletion interval
|
||||
vallex.db[COLLNAME].update(
|
||||
key, {"$set": {"date": datetime.datetime.utcnow()}})
|
||||
return username
|
||||
|
||||
|
||||
@app.route("/api/token", methods=["POST"])
|
||||
def api_token():
|
||||
# check if token is valid
|
||||
b = request.get_data()
|
||||
data = json.loads(b.decode())
|
||||
token = data.get("token")
|
||||
# user = data.get("user")
|
||||
user = token_to_username(token)
|
||||
confirm = (user is not None)
|
||||
return json.dumps({
|
||||
"confirmation": confirm,
|
||||
"username": user
|
||||
})
|
||||
|
||||
|
||||
@app.route("/api/senses/get")
|
||||
def api_senses_get():
|
||||
# returns senses and mapping for hw
|
||||
hw = request.args.get("hw")
|
||||
senses = list(vallex.db["v2_senses"].find({
|
||||
"hw": hw
|
||||
}))
|
||||
sense_map_query = list(vallex.db["v2_sense_map"].find({
|
||||
"hw": hw
|
||||
}))
|
||||
# aggregation by max date possible on DB side
|
||||
# but no simple way of returning full entries
|
||||
# aggregate hw and ssj_id by max date
|
||||
sense_map_aggr = {}
|
||||
for sm in sense_map_query:
|
||||
key = sm["hw"] + sm["ssj_id"]
|
||||
if key in sense_map_aggr:
|
||||
sense_map_aggr[key] = max(
|
||||
[sm, sense_map_aggr[key]], key=lambda x: x["date"])
|
||||
else:
|
||||
sense_map_aggr[key] = sm
|
||||
sense_map_list = [x[1] for x in sense_map_aggr.items()]
|
||||
sense_map = {}
|
||||
for el in sense_map_list:
|
||||
sense_map[el["ssj_id"]] = el
|
||||
for k, e in sense_map.items():
|
||||
del(e["_id"])
|
||||
del(e["date"])
|
||||
for e in senses:
|
||||
del(e["_id"])
|
||||
if "date" in e:
|
||||
del(e["date"])
|
||||
|
||||
# sort senses: user defined first, sskj second
|
||||
# sskj senses sorted by sskj sense_id
|
||||
user_senses = [s for s in senses if s["author"] != "SSKJ"]
|
||||
sskj_senses = [s for s in senses if s["author"] == "SSKJ"]
|
||||
|
||||
def sorting_helper(sense):
|
||||
arr = sense["sense_id"].split("-")
|
||||
return "{:03d}-{:03d}-{:03d}".format(
|
||||
int(arr[1]), int(arr[2]), int(arr[3]))
|
||||
|
||||
sskj_senses = sorted(sskj_senses, key=sorting_helper)
|
||||
senses = user_senses + sskj_senses
|
||||
|
||||
return json.dumps({
|
||||
"senses": senses,
|
||||
"sense_map": sense_map,
|
||||
})
|
||||
|
||||
|
||||
@app.route("/api/senses/update", methods=["POST"])
|
||||
def api_senses_update():
|
||||
b = request.get_data()
|
||||
data = json.loads(b.decode())
|
||||
token = data.get("token")
|
||||
hw = data.get("hw")
|
||||
sense_map = data.get("sense_map")
|
||||
new_senses = data.get("new_senses")
|
||||
|
||||
username = token_to_username(token)
|
||||
if username is None:
|
||||
log.debug("Not a user.")
|
||||
return "Not a user."
|
||||
|
||||
# store new senses,
|
||||
# create new sense_ids
|
||||
id_map = {}
|
||||
for ns in new_senses:
|
||||
tmp_dt = datetime.datetime.utcnow()
|
||||
new_sense_id = "{}-{}".format(
|
||||
username,
|
||||
hashlib.sha256("{}{}{}".format(
|
||||
username,
|
||||
ns["desc"],
|
||||
str(tmp_dt)
|
||||
).encode("utf-8")).hexdigest()[:10]
|
||||
)
|
||||
frontend_sense_id = ns["sense_id"]
|
||||
ns["sense_id"] = new_sense_id
|
||||
ns["date"] = tmp_dt
|
||||
id_map[frontend_sense_id] = new_sense_id
|
||||
|
||||
# insert into db
|
||||
vallex.db["v2_senses"].insert(ns)
|
||||
|
||||
# replace tmp_id with mongo's _id
|
||||
for ssj_id, el in sense_map.items():
|
||||
sense_id = el["sense_id"]
|
||||
if sense_id in id_map.keys():
|
||||
sense_id = id_map[sense_id]
|
||||
data = {
|
||||
"user": username,
|
||||
"hw": hw,
|
||||
"ssj_id": ssj_id,
|
||||
"sense_id": sense_id,
|
||||
"date": datetime.datetime.utcnow()
|
||||
}
|
||||
# vallex.db["v2_sense_map"].update(key, data, upsert=True)
|
||||
vallex.db["v2_sense_map"].insert(data)
|
||||
return "OK"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Files needed to run:
|
||||
# pre-generated .pickle files in /data/no_del_pickles
|
||||
# temporary .pickle files can speed up startup (/data/tmp_pickles)
|
||||
# main input file: annotated sentences (ssj.xml)
|
||||
ANNOTATED_SSJ_XML_PATH = "/ssj500k-sl.TEI/ssj500k-sl.body.xml"
|
||||
|
||||
# Read arguments from autostart.sh script.
|
||||
for arg in sys.argv:
|
||||
args.extend(arg.split())
|
||||
|
||||
app.debug = get_arg("debug")
|
||||
|
||||
# Set up logging
|
||||
logfile = get_arg("logpath") + "/main.log"
|
||||
logging.basicConfig(filename=logfile, level=logging.DEBUG)
|
||||
|
||||
datapath = get_arg("datapath")
|
||||
if datapath is None:
|
||||
log.error("No path to data.")
|
||||
exit(1)
|
||||
|
||||
# Prepare vallex.
|
||||
vallex = Vallex()
|
||||
|
||||
vallex_pickle_path = datapath + "/tmp_pickles/vallex.pickle"
|
||||
vallex_data = k_utils.pickle_load(vallex_pickle_path)
|
||||
|
||||
if vallex_data is None:
|
||||
log.info("No pickle found, creating vallex_data.")
|
||||
|
||||
# get ssj data from pickle
|
||||
ssj_pickle_path = datapath + "/tmp_pickles/ssj.pickle"
|
||||
ssj = k_utils.pickle_load(ssj_pickle_path)
|
||||
|
||||
if ssj is None:
|
||||
ssj_path = datapath + ANNOTATED_SSJ_XML_PATH
|
||||
log.info("No pickle found, creating ssj pickle from {}.".format(
|
||||
ssj_path))
|
||||
ssj = SsjDict()
|
||||
ssj.read_xml_v2(ssj_path)
|
||||
|
||||
# create fresh pickle
|
||||
k_utils.pickle_dump(ssj, ssj_pickle_path)
|
||||
|
||||
vallex.read_ssj(ssj)
|
||||
vallex_data = {
|
||||
"entries": vallex.entries,
|
||||
"tokens": vallex.tokens
|
||||
}
|
||||
k_utils.pickle_dump(vallex_data, vallex_pickle_path)
|
||||
|
||||
vallex.entries = DC(vallex_data["entries"])
|
||||
vallex.tokens = DC(vallex_data["tokens"])
|
||||
|
||||
# Generate senses and se_list after we've built the vallex object.
|
||||
seqparser_sskj_path = datapath + "/no_del_pickles/sskj_senses.pickle"
|
||||
seqparser_se_list_path = datapath + "/no_del_pickles/se_list.pickle"
|
||||
vallex.process_after_read(
|
||||
seqparser_sskj_path, seqparser_se_list_path,
|
||||
reload_sskj_senses=get_arg("reload_sskj_senses")
|
||||
)
|
||||
|
||||
log.info(
|
||||
"\n[*] Starting the app." +
|
||||
"\n[*] args: {}".format(args) +
|
||||
"\n[*] | logfile: {}".format(logfile) +
|
||||
"\n[*] | debug: {}".format(str(app.debug))
|
||||
)
|
||||
# Run the app.
|
||||
if app.debug:
|
||||
app.run(port=PORT)
|
||||
else:
|
||||
app.run(host="0.0.0.0", port=PORT)
|
@ -0,0 +1,587 @@
|
||||
/*!
|
||||
* Bootstrap v3.3.7 (http://getbootstrap.com)
|
||||
* Copyright 2011-2016 Twitter, Inc.
|
||||
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
|
||||
*/
|
||||
.btn-default,
|
||||
.btn-primary,
|
||||
.btn-success,
|
||||
.btn-info,
|
||||
.btn-warning,
|
||||
.btn-danger {
|
||||
text-shadow: 0 -1px 0 rgba(0, 0, 0, .2);
|
||||
-webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, .15), 0 1px 1px rgba(0, 0, 0, .075);
|
||||
box-shadow: inset 0 1px 0 rgba(255, 255, 255, .15), 0 1px 1px rgba(0, 0, 0, .075);
|
||||
}
|
||||
.btn-default:active,
|
||||
.btn-primary:active,
|
||||
.btn-success:active,
|
||||
.btn-info:active,
|
||||
.btn-warning:active,
|
||||
.btn-danger:active,
|
||||
.btn-default.active,
|
||||
.btn-primary.active,
|
||||
.btn-success.active,
|
||||
.btn-info.active,
|
||||
.btn-warning.active,
|
||||
.btn-danger.active {
|
||||
-webkit-box-shadow: inset 0 3px 5px rgba(0, 0, 0, .125);
|
||||
box-shadow: inset 0 3px 5px rgba(0, 0, 0, .125);
|
||||
}
|
||||
.btn-default.disabled,
|
||||
.btn-primary.disabled,
|
||||
.btn-success.disabled,
|
||||
.btn-info.disabled,
|
||||
.btn-warning.disabled,
|
||||
.btn-danger.disabled,
|
||||
.btn-default[disabled],
|
||||
.btn-primary[disabled],
|
||||
.btn-success[disabled],
|
||||
.btn-info[disabled],
|
||||
.btn-warning[disabled],
|
||||
.btn-danger[disabled],
|
||||
fieldset[disabled] .btn-default,
|
||||
fieldset[disabled] .btn-primary,
|
||||
fieldset[disabled] .btn-success,
|
||||
fieldset[disabled] .btn-info,
|
||||
fieldset[disabled] .btn-warning,
|
||||
fieldset[disabled] .btn-danger {
|
||||
-webkit-box-shadow: none;
|
||||
box-shadow: none;
|
||||
}
|
||||
.btn-default .badge,
|
||||
.btn-primary .badge,
|
||||
.btn-success .badge,
|
||||
.btn-info .badge,
|
||||
.btn-warning .badge,
|
||||
.btn-danger .badge {
|
||||
text-shadow: none;
|
||||
}
|
||||
.btn:active,
|
||||
.btn.active {
|
||||
background-image: none;
|
||||
}
|
||||
.btn-default {
|
||||
text-shadow: 0 1px 0 #fff;
|
||||
background-image: -webkit-linear-gradient(top, #fff 0%, #e0e0e0 100%);
|
||||
background-image: -o-linear-gradient(top, #fff 0%, #e0e0e0 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#fff), to(#e0e0e0));
|
||||
background-image: linear-gradient(to bottom, #fff 0%, #e0e0e0 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#ffe0e0e0', GradientType=0);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
||||
background-repeat: repeat-x;
|
||||
border-color: #dbdbdb;
|
||||
border-color: #ccc;
|
||||
}
|
||||
.btn-default:hover,
|
||||
.btn-default:focus {
|
||||
background-color: #e0e0e0;
|
||||
background-position: 0 -15px;
|
||||
}
|
||||
.btn-default:active,
|
||||
.btn-default.active {
|
||||
background-color: #e0e0e0;
|
||||
border-color: #dbdbdb;
|
||||
}
|
||||
.btn-default.disabled,
|
||||
.btn-default[disabled],
|
||||
fieldset[disabled] .btn-default,
|
||||
.btn-default.disabled:hover,
|
||||
.btn-default[disabled]:hover,
|
||||
fieldset[disabled] .btn-default:hover,
|
||||
.btn-default.disabled:focus,
|
||||
.btn-default[disabled]:focus,
|
||||
fieldset[disabled] .btn-default:focus,
|
||||
.btn-default.disabled.focus,
|
||||
.btn-default[disabled].focus,
|
||||
fieldset[disabled] .btn-default.focus,
|
||||
.btn-default.disabled:active,
|
||||
.btn-default[disabled]:active,
|
||||
fieldset[disabled] .btn-default:active,
|
||||
.btn-default.disabled.active,
|
||||
.btn-default[disabled].active,
|
||||
fieldset[disabled] .btn-default.active {
|
||||
background-color: #e0e0e0;
|
||||
background-image: none;
|
||||
}
|
||||
.btn-primary {
|
||||
background-image: -webkit-linear-gradient(top, #337ab7 0%, #265a88 100%);
|
||||
background-image: -o-linear-gradient(top, #337ab7 0%, #265a88 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#337ab7), to(#265a88));
|
||||
background-image: linear-gradient(to bottom, #337ab7 0%, #265a88 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff337ab7', endColorstr='#ff265a88', GradientType=0);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
||||
background-repeat: repeat-x;
|
||||
border-color: #245580;
|
||||
}
|
||||
.btn-primary:hover,
|
||||
.btn-primary:focus {
|
||||
background-color: #265a88;
|
||||
background-position: 0 -15px;
|
||||
}
|
||||
.btn-primary:active,
|
||||
.btn-primary.active {
|
||||
background-color: #265a88;
|
||||
border-color: #245580;
|
||||
}
|
||||
.btn-primary.disabled,
|
||||
.btn-primary[disabled],
|
||||
fieldset[disabled] .btn-primary,
|
||||
.btn-primary.disabled:hover,
|
||||
.btn-primary[disabled]:hover,
|
||||
fieldset[disabled] .btn-primary:hover,
|
||||
.btn-primary.disabled:focus,
|
||||
.btn-primary[disabled]:focus,
|
||||
fieldset[disabled] .btn-primary:focus,
|
||||
.btn-primary.disabled.focus,
|
||||
.btn-primary[disabled].focus,
|
||||
fieldset[disabled] .btn-primary.focus,
|
||||
.btn-primary.disabled:active,
|
||||
.btn-primary[disabled]:active,
|
||||
fieldset[disabled] .btn-primary:active,
|
||||
.btn-primary.disabled.active,
|
||||
.btn-primary[disabled].active,
|
||||
fieldset[disabled] .btn-primary.active {
|
||||
background-color: #265a88;
|
||||
background-image: none;
|
||||
}
|
||||
.btn-success {
|
||||
background-image: -webkit-linear-gradient(top, #5cb85c 0%, #419641 100%);
|
||||
background-image: -o-linear-gradient(top, #5cb85c 0%, #419641 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#5cb85c), to(#419641));
|
||||
background-image: linear-gradient(to bottom, #5cb85c 0%, #419641 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5cb85c', endColorstr='#ff419641', GradientType=0);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
||||
background-repeat: repeat-x;
|
||||
border-color: #3e8f3e;
|
||||
}
|
||||
.btn-success:hover,
|
||||
.btn-success:focus {
|
||||
background-color: #419641;
|
||||
background-position: 0 -15px;
|
||||
}
|
||||
.btn-success:active,
|
||||
.btn-success.active {
|
||||
background-color: #419641;
|
||||
border-color: #3e8f3e;
|
||||
}
|
||||
.btn-success.disabled,
|
||||
.btn-success[disabled],
|
||||
fieldset[disabled] .btn-success,
|
||||
.btn-success.disabled:hover,
|
||||
.btn-success[disabled]:hover,
|
||||
fieldset[disabled] .btn-success:hover,
|
||||
.btn-success.disabled:focus,
|
||||
.btn-success[disabled]:focus,
|
||||
fieldset[disabled] .btn-success:focus,
|
||||
.btn-success.disabled.focus,
|
||||
.btn-success[disabled].focus,
|
||||
fieldset[disabled] .btn-success.focus,
|
||||
.btn-success.disabled:active,
|
||||
.btn-success[disabled]:active,
|
||||
fieldset[disabled] .btn-success:active,
|
||||
.btn-success.disabled.active,
|
||||
.btn-success[disabled].active,
|
||||
fieldset[disabled] .btn-success.active {
|
||||
background-color: #419641;
|
||||
background-image: none;
|
||||
}
|
||||
.btn-info {
|
||||
background-image: -webkit-linear-gradient(top, #5bc0de 0%, #2aabd2 100%);
|
||||
background-image: -o-linear-gradient(top, #5bc0de 0%, #2aabd2 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#5bc0de), to(#2aabd2));
|
||||
background-image: linear-gradient(to bottom, #5bc0de 0%, #2aabd2 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff2aabd2', GradientType=0);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
||||
background-repeat: repeat-x;
|
||||
border-color: #28a4c9;
|
||||
}
|
||||
.btn-info:hover,
|
||||
.btn-info:focus {
|
||||
background-color: #2aabd2;
|
||||
background-position: 0 -15px;
|
||||
}
|
||||
.btn-info:active,
|
||||
.btn-info.active {
|
||||
background-color: #2aabd2;
|
||||
border-color: #28a4c9;
|
||||
}
|
||||
.btn-info.disabled,
|
||||
.btn-info[disabled],
|
||||
fieldset[disabled] .btn-info,
|
||||
.btn-info.disabled:hover,
|
||||
.btn-info[disabled]:hover,
|
||||
fieldset[disabled] .btn-info:hover,
|
||||
.btn-info.disabled:focus,
|
||||
.btn-info[disabled]:focus,
|
||||
fieldset[disabled] .btn-info:focus,
|
||||
.btn-info.disabled.focus,
|
||||
.btn-info[disabled].focus,
|
||||
fieldset[disabled] .btn-info.focus,
|
||||
.btn-info.disabled:active,
|
||||
.btn-info[disabled]:active,
|
||||
fieldset[disabled] .btn-info:active,
|
||||
.btn-info.disabled.active,
|
||||
.btn-info[disabled].active,
|
||||
fieldset[disabled] .btn-info.active {
|
||||
background-color: #2aabd2;
|
||||
background-image: none;
|
||||
}
|
||||
.btn-warning {
|
||||
background-image: -webkit-linear-gradient(top, #f0ad4e 0%, #eb9316 100%);
|
||||
background-image: -o-linear-gradient(top, #f0ad4e 0%, #eb9316 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#f0ad4e), to(#eb9316));
|
||||
background-image: linear-gradient(to bottom, #f0ad4e 0%, #eb9316 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff0ad4e', endColorstr='#ffeb9316', GradientType=0);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
||||
background-repeat: repeat-x;
|
||||
border-color: #e38d13;
|
||||
}
|
||||
.btn-warning:hover,
|
||||
.btn-warning:focus {
|
||||
background-color: #eb9316;
|
||||
background-position: 0 -15px;
|
||||
}
|
||||
.btn-warning:active,
|
||||
.btn-warning.active {
|
||||
background-color: #eb9316;
|
||||
border-color: #e38d13;
|
||||
}
|
||||
.btn-warning.disabled,
|
||||
.btn-warning[disabled],
|
||||
fieldset[disabled] .btn-warning,
|
||||
.btn-warning.disabled:hover,
|
||||
.btn-warning[disabled]:hover,
|
||||
fieldset[disabled] .btn-warning:hover,
|
||||
.btn-warning.disabled:focus,
|
||||
.btn-warning[disabled]:focus,
|
||||
fieldset[disabled] .btn-warning:focus,
|
||||
.btn-warning.disabled.focus,
|
||||
.btn-warning[disabled].focus,
|
||||
fieldset[disabled] .btn-warning.focus,
|
||||
.btn-warning.disabled:active,
|
||||
.btn-warning[disabled]:active,
|
||||
fieldset[disabled] .btn-warning:active,
|
||||
.btn-warning.disabled.active,
|
||||
.btn-warning[disabled].active,
|
||||
fieldset[disabled] .btn-warning.active {
|
||||
background-color: #eb9316;
|
||||
background-image: none;
|
||||
}
|
||||
.btn-danger {
|
||||
background-image: -webkit-linear-gradient(top, #d9534f 0%, #c12e2a 100%);
|
||||
background-image: -o-linear-gradient(top, #d9534f 0%, #c12e2a 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#d9534f), to(#c12e2a));
|
||||
background-image: linear-gradient(to bottom, #d9534f 0%, #c12e2a 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9534f', endColorstr='#ffc12e2a', GradientType=0);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
||||
background-repeat: repeat-x;
|
||||
border-color: #b92c28;
|
||||
}
|
||||
.btn-danger:hover,
|
||||
.btn-danger:focus {
|
||||
background-color: #c12e2a;
|
||||
background-position: 0 -15px;
|
||||
}
|
||||
.btn-danger:active,
|
||||
.btn-danger.active {
|
||||
background-color: #c12e2a;
|
||||
border-color: #b92c28;
|
||||
}
|
||||
.btn-danger.disabled,
|
||||
.btn-danger[disabled],
|
||||
fieldset[disabled] .btn-danger,
|
||||
.btn-danger.disabled:hover,
|
||||
.btn-danger[disabled]:hover,
|
||||
fieldset[disabled] .btn-danger:hover,
|
||||
.btn-danger.disabled:focus,
|
||||
.btn-danger[disabled]:focus,
|
||||
fieldset[disabled] .btn-danger:focus,
|
||||
.btn-danger.disabled.focus,
|
||||
.btn-danger[disabled].focus,
|
||||
fieldset[disabled] .btn-danger.focus,
|
||||
.btn-danger.disabled:active,
|
||||
.btn-danger[disabled]:active,
|
||||
fieldset[disabled] .btn-danger:active,
|
||||
.btn-danger.disabled.active,
|
||||
.btn-danger[disabled].active,
|
||||
fieldset[disabled] .btn-danger.active {
|
||||
background-color: #c12e2a;
|
||||
background-image: none;
|
||||
}
|
||||
.thumbnail,
|
||||
.img-thumbnail {
|
||||
-webkit-box-shadow: 0 1px 2px rgba(0, 0, 0, .075);
|
||||
box-shadow: 0 1px 2px rgba(0, 0, 0, .075);
|
||||
}
|
||||
.dropdown-menu > li > a:hover,
|
||||
.dropdown-menu > li > a:focus {
|
||||
background-color: #e8e8e8;
|
||||
background-image: -webkit-linear-gradient(top, #f5f5f5 0%, #e8e8e8 100%);
|
||||
background-image: -o-linear-gradient(top, #f5f5f5 0%, #e8e8e8 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#f5f5f5), to(#e8e8e8));
|
||||
background-image: linear-gradient(to bottom, #f5f5f5 0%, #e8e8e8 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#ffe8e8e8', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
.dropdown-menu > .active > a,
|
||||
.dropdown-menu > .active > a:hover,
|
||||
.dropdown-menu > .active > a:focus {
|
||||
background-color: #2e6da4;
|
||||
background-image: -webkit-linear-gradient(top, #337ab7 0%, #2e6da4 100%);
|
||||
background-image: -o-linear-gradient(top, #337ab7 0%, #2e6da4 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#337ab7), to(#2e6da4));
|
||||
background-image: linear-gradient(to bottom, #337ab7 0%, #2e6da4 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff337ab7', endColorstr='#ff2e6da4', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
.navbar-default {
|
||||
background-image: -webkit-linear-gradient(top, #fff 0%, #f8f8f8 100%);
|
||||
background-image: -o-linear-gradient(top, #fff 0%, #f8f8f8 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#fff), to(#f8f8f8));
|
||||
background-image: linear-gradient(to bottom, #fff 0%, #f8f8f8 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#fff8f8f8', GradientType=0);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
||||
background-repeat: repeat-x;
|
||||
border-radius: 4px;
|
||||
-webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, .15), 0 1px 5px rgba(0, 0, 0, .075);
|
||||
box-shadow: inset 0 1px 0 rgba(255, 255, 255, .15), 0 1px 5px rgba(0, 0, 0, .075);
|
||||
}
|
||||
.navbar-default .navbar-nav > .open > a,
|
||||
.navbar-default .navbar-nav > .active > a {
|
||||
background-image: -webkit-linear-gradient(top, #dbdbdb 0%, #e2e2e2 100%);
|
||||
background-image: -o-linear-gradient(top, #dbdbdb 0%, #e2e2e2 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#dbdbdb), to(#e2e2e2));
|
||||
background-image: linear-gradient(to bottom, #dbdbdb 0%, #e2e2e2 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdbdbdb', endColorstr='#ffe2e2e2', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
-webkit-box-shadow: inset 0 3px 9px rgba(0, 0, 0, .075);
|
||||
box-shadow: inset 0 3px 9px rgba(0, 0, 0, .075);
|
||||
}
|
||||
.navbar-brand,
|
||||
.navbar-nav > li > a {
|
||||
text-shadow: 0 1px 0 rgba(255, 255, 255, .25);
|
||||
}
|
||||
.navbar-inverse {
|
||||
background-image: -webkit-linear-gradient(top, #3c3c3c 0%, #222 100%);
|
||||
background-image: -o-linear-gradient(top, #3c3c3c 0%, #222 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#3c3c3c), to(#222));
|
||||
background-image: linear-gradient(to bottom, #3c3c3c 0%, #222 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff3c3c3c', endColorstr='#ff222222', GradientType=0);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(enabled = false);
|
||||
background-repeat: repeat-x;
|
||||
border-radius: 4px;
|
||||
}
|
||||
.navbar-inverse .navbar-nav > .open > a,
|
||||
.navbar-inverse .navbar-nav > .active > a {
|
||||
background-image: -webkit-linear-gradient(top, #080808 0%, #0f0f0f 100%);
|
||||
background-image: -o-linear-gradient(top, #080808 0%, #0f0f0f 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#080808), to(#0f0f0f));
|
||||
background-image: linear-gradient(to bottom, #080808 0%, #0f0f0f 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff080808', endColorstr='#ff0f0f0f', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
-webkit-box-shadow: inset 0 3px 9px rgba(0, 0, 0, .25);
|
||||
box-shadow: inset 0 3px 9px rgba(0, 0, 0, .25);
|
||||
}
|
||||
.navbar-inverse .navbar-brand,
|
||||
.navbar-inverse .navbar-nav > li > a {
|
||||
text-shadow: 0 -1px 0 rgba(0, 0, 0, .25);
|
||||
}
|
||||
.navbar-static-top,
|
||||
.navbar-fixed-top,
|
||||
.navbar-fixed-bottom {
|
||||
border-radius: 0;
|
||||
}
|
||||
@media (max-width: 767px) {
|
||||
.navbar .navbar-nav .open .dropdown-menu > .active > a,
|
||||
.navbar .navbar-nav .open .dropdown-menu > .active > a:hover,
|
||||
.navbar .navbar-nav .open .dropdown-menu > .active > a:focus {
|
||||
color: #fff;
|
||||
background-image: -webkit-linear-gradient(top, #337ab7 0%, #2e6da4 100%);
|
||||
background-image: -o-linear-gradient(top, #337ab7 0%, #2e6da4 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#337ab7), to(#2e6da4));
|
||||
background-image: linear-gradient(to bottom, #337ab7 0%, #2e6da4 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff337ab7', endColorstr='#ff2e6da4', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
}
|
||||
.alert {
|
||||
text-shadow: 0 1px 0 rgba(255, 255, 255, .2);
|
||||
-webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, .25), 0 1px 2px rgba(0, 0, 0, .05);
|
||||
box-shadow: inset 0 1px 0 rgba(255, 255, 255, .25), 0 1px 2px rgba(0, 0, 0, .05);
|
||||
}
|
||||
.alert-success {
|
||||
background-image: -webkit-linear-gradient(top, #dff0d8 0%, #c8e5bc 100%);
|
||||
background-image: -o-linear-gradient(top, #dff0d8 0%, #c8e5bc 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#dff0d8), to(#c8e5bc));
|
||||
background-image: linear-gradient(to bottom, #dff0d8 0%, #c8e5bc 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdff0d8', endColorstr='#ffc8e5bc', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
border-color: #b2dba1;
|
||||
}
|
||||
.alert-info {
|
||||
background-image: -webkit-linear-gradient(top, #d9edf7 0%, #b9def0 100%);
|
||||
background-image: -o-linear-gradient(top, #d9edf7 0%, #b9def0 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#d9edf7), to(#b9def0));
|
||||
background-image: linear-gradient(to bottom, #d9edf7 0%, #b9def0 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9edf7', endColorstr='#ffb9def0', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
border-color: #9acfea;
|
||||
}
|
||||
.alert-warning {
|
||||
background-image: -webkit-linear-gradient(top, #fcf8e3 0%, #f8efc0 100%);
|
||||
background-image: -o-linear-gradient(top, #fcf8e3 0%, #f8efc0 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#fcf8e3), to(#f8efc0));
|
||||
background-image: linear-gradient(to bottom, #fcf8e3 0%, #f8efc0 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fff8efc0', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
border-color: #f5e79e;
|
||||
}
|
||||
.alert-danger {
|
||||
background-image: -webkit-linear-gradient(top, #f2dede 0%, #e7c3c3 100%);
|
||||
background-image: -o-linear-gradient(top, #f2dede 0%, #e7c3c3 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#f2dede), to(#e7c3c3));
|
||||
background-image: linear-gradient(to bottom, #f2dede 0%, #e7c3c3 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2dede', endColorstr='#ffe7c3c3', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
border-color: #dca7a7;
|
||||
}
|
||||
.progress {
|
||||
background-image: -webkit-linear-gradient(top, #ebebeb 0%, #f5f5f5 100%);
|
||||
background-image: -o-linear-gradient(top, #ebebeb 0%, #f5f5f5 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#ebebeb), to(#f5f5f5));
|
||||
background-image: linear-gradient(to bottom, #ebebeb 0%, #f5f5f5 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffebebeb', endColorstr='#fff5f5f5', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
.progress-bar {
|
||||
background-image: -webkit-linear-gradient(top, #337ab7 0%, #286090 100%);
|
||||
background-image: -o-linear-gradient(top, #337ab7 0%, #286090 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#337ab7), to(#286090));
|
||||
background-image: linear-gradient(to bottom, #337ab7 0%, #286090 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff337ab7', endColorstr='#ff286090', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
.progress-bar-success {
|
||||
background-image: -webkit-linear-gradient(top, #5cb85c 0%, #449d44 100%);
|
||||
background-image: -o-linear-gradient(top, #5cb85c 0%, #449d44 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#5cb85c), to(#449d44));
|
||||
background-image: linear-gradient(to bottom, #5cb85c 0%, #449d44 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5cb85c', endColorstr='#ff449d44', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
.progress-bar-info {
|
||||
background-image: -webkit-linear-gradient(top, #5bc0de 0%, #31b0d5 100%);
|
||||
background-image: -o-linear-gradient(top, #5bc0de 0%, #31b0d5 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#5bc0de), to(#31b0d5));
|
||||
background-image: linear-gradient(to bottom, #5bc0de 0%, #31b0d5 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff31b0d5', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
.progress-bar-warning {
|
||||
background-image: -webkit-linear-gradient(top, #f0ad4e 0%, #ec971f 100%);
|
||||
background-image: -o-linear-gradient(top, #f0ad4e 0%, #ec971f 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#f0ad4e), to(#ec971f));
|
||||
background-image: linear-gradient(to bottom, #f0ad4e 0%, #ec971f 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff0ad4e', endColorstr='#ffec971f', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
.progress-bar-danger {
|
||||
background-image: -webkit-linear-gradient(top, #d9534f 0%, #c9302c 100%);
|
||||
background-image: -o-linear-gradient(top, #d9534f 0%, #c9302c 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#d9534f), to(#c9302c));
|
||||
background-image: linear-gradient(to bottom, #d9534f 0%, #c9302c 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9534f', endColorstr='#ffc9302c', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
.progress-bar-striped {
|
||||
background-image: -webkit-linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent);
|
||||
background-image: -o-linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent);
|
||||
background-image: linear-gradient(45deg, rgba(255, 255, 255, .15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, .15) 50%, rgba(255, 255, 255, .15) 75%, transparent 75%, transparent);
|
||||
}
|
||||
.list-group {
|
||||
border-radius: 4px;
|
||||
-webkit-box-shadow: 0 1px 2px rgba(0, 0, 0, .075);
|
||||
box-shadow: 0 1px 2px rgba(0, 0, 0, .075);
|
||||
}
|
||||
.list-group-item.active,
|
||||
.list-group-item.active:hover,
|
||||
.list-group-item.active:focus {
|
||||
text-shadow: 0 -1px 0 #286090;
|
||||
background-image: -webkit-linear-gradient(top, #337ab7 0%, #2b669a 100%);
|
||||
background-image: -o-linear-gradient(top, #337ab7 0%, #2b669a 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#337ab7), to(#2b669a));
|
||||
background-image: linear-gradient(to bottom, #337ab7 0%, #2b669a 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff337ab7', endColorstr='#ff2b669a', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
border-color: #2b669a;
|
||||
}
|
||||
.list-group-item.active .badge,
|
||||
.list-group-item.active:hover .badge,
|
||||
.list-group-item.active:focus .badge {
|
||||
text-shadow: none;
|
||||
}
|
||||
.panel {
|
||||
-webkit-box-shadow: 0 1px 2px rgba(0, 0, 0, .05);
|
||||
box-shadow: 0 1px 2px rgba(0, 0, 0, .05);
|
||||
}
|
||||
.panel-default > .panel-heading {
|
||||
background-image: -webkit-linear-gradient(top, #f5f5f5 0%, #e8e8e8 100%);
|
||||
background-image: -o-linear-gradient(top, #f5f5f5 0%, #e8e8e8 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#f5f5f5), to(#e8e8e8));
|
||||
background-image: linear-gradient(to bottom, #f5f5f5 0%, #e8e8e8 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#ffe8e8e8', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
.panel-primary > .panel-heading {
|
||||
background-image: -webkit-linear-gradient(top, #337ab7 0%, #2e6da4 100%);
|
||||
background-image: -o-linear-gradient(top, #337ab7 0%, #2e6da4 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#337ab7), to(#2e6da4));
|
||||
background-image: linear-gradient(to bottom, #337ab7 0%, #2e6da4 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff337ab7', endColorstr='#ff2e6da4', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
.panel-success > .panel-heading {
|
||||
background-image: -webkit-linear-gradient(top, #dff0d8 0%, #d0e9c6 100%);
|
||||
background-image: -o-linear-gradient(top, #dff0d8 0%, #d0e9c6 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#dff0d8), to(#d0e9c6));
|
||||
background-image: linear-gradient(to bottom, #dff0d8 0%, #d0e9c6 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdff0d8', endColorstr='#ffd0e9c6', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
.panel-info > .panel-heading {
|
||||
background-image: -webkit-linear-gradient(top, #d9edf7 0%, #c4e3f3 100%);
|
||||
background-image: -o-linear-gradient(top, #d9edf7 0%, #c4e3f3 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#d9edf7), to(#c4e3f3));
|
||||
background-image: linear-gradient(to bottom, #d9edf7 0%, #c4e3f3 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9edf7', endColorstr='#ffc4e3f3', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
.panel-warning > .panel-heading {
|
||||
background-image: -webkit-linear-gradient(top, #fcf8e3 0%, #faf2cc 100%);
|
||||
background-image: -o-linear-gradient(top, #fcf8e3 0%, #faf2cc 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#fcf8e3), to(#faf2cc));
|
||||
background-image: linear-gradient(to bottom, #fcf8e3 0%, #faf2cc 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fffaf2cc', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
.panel-danger > .panel-heading {
|
||||
background-image: -webkit-linear-gradient(top, #f2dede 0%, #ebcccc 100%);
|
||||
background-image: -o-linear-gradient(top, #f2dede 0%, #ebcccc 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#f2dede), to(#ebcccc));
|
||||
background-image: linear-gradient(to bottom, #f2dede 0%, #ebcccc 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2dede', endColorstr='#ffebcccc', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
}
|
||||
.well {
|
||||
background-image: -webkit-linear-gradient(top, #e8e8e8 0%, #f5f5f5 100%);
|
||||
background-image: -o-linear-gradient(top, #e8e8e8 0%, #f5f5f5 100%);
|
||||
background-image: -webkit-gradient(linear, left top, left bottom, from(#e8e8e8), to(#f5f5f5));
|
||||
background-image: linear-gradient(to bottom, #e8e8e8 0%, #f5f5f5 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffe8e8e8', endColorstr='#fff5f5f5', GradientType=0);
|
||||
background-repeat: repeat-x;
|
||||
border-color: #dcdcdc;
|
||||
-webkit-box-shadow: inset 0 1px 3px rgba(0, 0, 0, .05), 0 1px 0 rgba(255, 255, 255, .1);
|
||||
box-shadow: inset 0 1px 3px rgba(0, 0, 0, .05), 0 1px 0 rgba(255, 255, 255, .1);
|
||||
}
|
||||
/*# sourceMappingURL=bootstrap-theme.css.map */
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
After Width: | Height: | Size: 106 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
@ -0,0 +1,13 @@
|
||||
// This file is autogenerated via the `commonjs` Grunt task. You can require() this file in a CommonJS environment.
|
||||
require('../../js/transition.js')
|
||||
require('../../js/alert.js')
|
||||
require('../../js/button.js')
|
||||
require('../../js/carousel.js')
|
||||
require('../../js/collapse.js')
|
||||
require('../../js/dropdown.js')
|
||||
require('../../js/modal.js')
|
||||
require('../../js/tooltip.js')
|
||||
require('../../js/popover.js')
|
||||
require('../../js/scrollspy.js')
|
||||
require('../../js/tab.js')
|
||||
require('../../js/affix.js')
|
File diff suppressed because one or more lines are too long
@ -0,0 +1,306 @@
|
||||
var hover_emph = [];
|
||||
var highlight_lock = false;
|
||||
var last_sense_group = "";
|
||||
|
||||
$(document).ready(function() {
|
||||
$.get("/letters", function(data, status) {
|
||||
$("#letters").html(data);
|
||||
});
|
||||
|
||||
$.get("/reduce_functions", function(data, status) {
|
||||
$("#reduce-functions").html(data);
|
||||
$("#reduce-functions input[type=radio]").change(function() {
|
||||
get_frames(
|
||||
$("#chosen-one").text()
|
||||
);
|
||||
})
|
||||
//Click on the first radio button - for 100% up to date with the model.
|
||||
var first_radio = $("#reduce-functions input[type=radio]:first");
|
||||
first_radio.attr("checked", true)
|
||||
get_frames($("#chosen-one").text());
|
||||
});
|
||||
handle_resizing();
|
||||
})
|
||||
|
||||
window.onresize = handle_resizing;
|
||||
|
||||
function view_get_rf() {
|
||||
return $("#reduce-functions").find("input:checked").val();
|
||||
}
|
||||
|
||||
function get_words(letter) {
|
||||
$.get("/words/" + letter, function(data, status){
|
||||
$("#words").html(data);
|
||||
})
|
||||
}
|
||||
|
||||
function get_frames(word, reduce_function=null, modify_view=true, callback=null) {
|
||||
if (word == "") {
|
||||
return
|
||||
}
|
||||
if (reduce_function == null) {
|
||||
reduce_function = view_get_rf();
|
||||
}
|
||||
$("#chosen-one").text(word);
|
||||
var opt_rf = "";
|
||||
if (reduce_function != null) {
|
||||
opt_rf = "&rf=" + reduce_function;
|
||||
}
|
||||
$.get("/frames?hw=" + word + opt_rf, function(data, status) {
|
||||
$("#frames-area").html(data);
|
||||
$("#n-frames").text("število stavčnih vzorcev: " + $(".frame-div").length);
|
||||
|
||||
// Add functor highlighting
|
||||
$(".functor-link")
|
||||
.mouseover(function() {
|
||||
highlight_linked($(this))
|
||||
})
|
||||
.mouseout(unhighlight_linked)
|
||||
.click(function() {
|
||||
toggle_highlight_lock($(this))
|
||||
});
|
||||
// modify sense information div
|
||||
if (modify_view) {
|
||||
$("#word-info-right").html("");
|
||||
$(".frame-sense-id").hide();
|
||||
$(".frame-sense-id").find("input").prop("disabled", true);
|
||||
$(".frame-sense-desc").hide();
|
||||
//$(".frame-sense-desc").find("input").prop("disabled", true);
|
||||
switch (reduce_function) {
|
||||
case "reduce_0":
|
||||
case "reduce_1":
|
||||
break;
|
||||
case "reduce_3":
|
||||
//ssj
|
||||
$(".frame-sense-id").show();
|
||||
$(".frame-sense-desc").show();
|
||||
break;
|
||||
case "reduce_4":
|
||||
//kmeans
|
||||
$(".frame-sense-id").show();
|
||||
break;
|
||||
case "reduce_5":
|
||||
//user
|
||||
user_input_menu(false);
|
||||
$(".frame-sense-id").show();
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (callback != null) {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function handle_resizing() {
|
||||
$("#words").height( ($(document).height() - $("#words").position().top) * 0.95 );
|
||||
}
|
||||
|
||||
function highlight_linked(dom_element) {
|
||||
if (highlight_lock) {
|
||||
return;
|
||||
}
|
||||
var frame_div = dom_element.parents(".frame-div");
|
||||
var frame_table = frame_div.find(".frame-table");
|
||||
var frame_sentences = frame_div.find(".frame-sentences");
|
||||
var classes = dom_element.attr("class").split(" ");
|
||||
classes.forEach(function(cls) {
|
||||
if (cls == "functor-link") {
|
||||
return
|
||||
}
|
||||
cls = cls.replace(".", "\\."); //escaping dots!
|
||||
var frame_table_matches = frame_table.find("." + cls);
|
||||
var frame_sentences_matches = frame_sentences.find("." + cls);
|
||||
if (frame_table_matches.length > 0 && frame_sentences_matches.length > 0) {
|
||||
var matches = $.merge(frame_table_matches, frame_sentences_matches);
|
||||
matches.addClass("functor-highlight");
|
||||
hover_emph.push(matches);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function unhighlight_linked() {
|
||||
if (highlight_lock) {
|
||||
return;
|
||||
}
|
||||
hover_emph.forEach(function(el) {
|
||||
el.removeClass("functor-highlight");
|
||||
})
|
||||
hover_emph = [];
|
||||
}
|
||||
|
||||
function toggle_highlight_lock(dom_element) {
|
||||
if (
|
||||
hover_emph.len == 0 ||
|
||||
!dom_element.hasClass("functor-highlight")
|
||||
) {
|
||||
return;
|
||||
}
|
||||
highlight_lock = !highlight_lock;
|
||||
}
|
||||
|
||||
function helper_highlight_ssj_id(pdiv, ssj_ids) {
|
||||
hover_emph = []
|
||||
for (var i=0; i<ssj_ids.length; i++) {
|
||||
ssj_id = ssj_ids[i];
|
||||
ssj_id = ssj_id.replace(".", "\\.");
|
||||
ssj_id = "." + ssj_id;
|
||||
matches = pdiv.find(ssj_id).toArray();
|
||||
if (matches.length > 1) {
|
||||
hover_emph = matches;
|
||||
break;
|
||||
}
|
||||
}
|
||||
hover_emph.forEach(function(element) {
|
||||
$(element).css("color", "red");
|
||||
});
|
||||
}
|
||||
|
||||
function helper_clear_highlight_ssj_id() {
|
||||
hover_emph.forEach(function(element) {
|
||||
$(element).css("color", "");
|
||||
});
|
||||
hover_emph = []
|
||||
}
|
||||
|
||||
function toggle_frame_sentences(el, sign=null) {
|
||||
pdiv = el.parents(".frame-div");
|
||||
fs = pdiv.find(".frame-sentences");
|
||||
sign_element = pdiv.find(".sign-element");
|
||||
if ((sign == "+") || (sign_element.text() == "[+]")) {
|
||||
sign_element.text("[-]");
|
||||
fs.show();
|
||||
} else {
|
||||
sign_element.text("[+]");
|
||||
fs.hide();
|
||||
}
|
||||
}
|
||||
|
||||
function user_input_menu(new_entries) {
|
||||
if (new_entries) {
|
||||
var tmp_sense_group = $("#word-info-right").find("option:selected").text();
|
||||
if (tmp_sense_group != "-- izberi --") {
|
||||
last_sense_group = tmp_sense_group;
|
||||
}
|
||||
$("#word-info-right").html(
|
||||
"<input type=text name='sense_group' \
|
||||
placeholder='ime skupine pomenov' value='" + last_sense_group + "'></input>"
|
||||
);
|
||||
$("#word-info-right").append(
|
||||
"<button onclick='user_input_finish(false)'>prekliči</button>"
|
||||
)
|
||||
$("#word-info-right").append(
|
||||
"<input name='sense_passwd' \
|
||||
type=password placeholder='geslo' value='" + getCookie("sense_passwd") + "'></input>"
|
||||
)
|
||||
$("#word-info-right").append(
|
||||
"<button onclick='user_input_finish(true)'>shrani</button>"
|
||||
)
|
||||
get_frames($("#chosen-one").text(), "reduce_0", false, function() {
|
||||
$(".frame-sense-id").find("input")
|
||||
.prop("disabled", false)
|
||||
.val("");
|
||||
$(".frame-sense-id").show();
|
||||
//$(".frame-sense-desc").find("input").prop("disabled", true);
|
||||
$(".frame-sense-desc").hide();
|
||||
toggle_frame_sentences($(".frame-sense"), "+");
|
||||
//fill input fields with known sense_ids
|
||||
$.get("/get_sense_ids?collname=user_senses&hw=" +
|
||||
$("#chosen-one").text() +
|
||||
"&sg=" + last_sense_group, function(data, status) {
|
||||
data = JSON.parse(data);
|
||||
$(".frame-div").each(function(idx, el) {
|
||||
var jqel = $(el);
|
||||
var ssj_id = jqel.find(".frame-hw-id").text();
|
||||
if (ssj_id in data) {
|
||||
jqel.find("input[name='sense_id']").val(data[ssj_id]);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
} else {
|
||||
$.get("/user_sense_groups/" + $("#chosen-one").text(), function(data, status){
|
||||
$("#word-info-right").html(data);
|
||||
$("#word-info-right").append(
|
||||
"<button onclick='user_input_menu(true)'>novi pomeni</button>"
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function user_input_finish(save) {
|
||||
if (save) {
|
||||
var sense_group = $("input[name='sense_group']").val().applyXSSprotection();
|
||||
var sense_passwd = $("input[name='sense_passwd']").val().applyXSSprotection();
|
||||
setCookie("sense_passwd", sense_passwd, 1);
|
||||
last_sense_group = sense_group;
|
||||
if (sense_group == "") {
|
||||
user_input_finish(false);
|
||||
return;
|
||||
}
|
||||
sense_data = {
|
||||
"headword": $("#chosen-one").text(),
|
||||
"sense_group": sense_group,
|
||||
"sense_passwd": sense_passwd,
|
||||
"entries": {},
|
||||
}
|
||||
$(".frame-div").each(function(index){
|
||||
var sense_id = $(this).find("input[name='sense_id']").val().applyXSSprotection();
|
||||
if (sense_id === "None") {
|
||||
return;
|
||||
}
|
||||
var frame_data = {};
|
||||
ssj_id = $(this).find(".frame-hw-id").text();
|
||||
sense_data["entries"][ssj_id] = sense_id;
|
||||
});
|
||||
if (Object.keys(sense_data["entries"]).length > 0) {
|
||||
$.ajax({
|
||||
method: "POST",
|
||||
url: "/user_senses",
|
||||
data: JSON.stringify(sense_data),
|
||||
async: false,
|
||||
complete: function() {
|
||||
pick_sense_group(sense_group);
|
||||
}
|
||||
});
|
||||
}
|
||||
//pick_sense_group(sense_group);
|
||||
}
|
||||
user_input_menu(false);
|
||||
get_frames($("#chosen-one").text());
|
||||
}
|
||||
|
||||
function pick_sense_group(sense_group, gf=false) {
|
||||
last_sense_group = sense_group;
|
||||
$.get("/pick_sense_group/" + sense_group, function(){
|
||||
if (gf) {
|
||||
get_frames($("#chosen-one").text());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
String.prototype.applyXSSprotection = function(){
|
||||
return this.replace(/</g, "<").replace(/>/g, ">");
|
||||
};
|
||||
|
||||
function setCookie(cname, cvalue, exdays) {
|
||||
var d = new Date();
|
||||
d.setTime(d.getTime() + (exdays*24*60*60*1000));
|
||||
var expires = "expires="+ d.toUTCString();
|
||||
document.cookie = cname + "=" + cvalue + ";" + expires + ";path=/";
|
||||
}
|
||||
|
||||
function getCookie(cname) {
|
||||
var name = cname + "=";
|
||||
var ca = document.cookie.split(';');
|
||||
for(var i = 0; i < ca.length; i++) {
|
||||
var c = ca[i];
|
||||
while (c.charAt(0) == ' ') {
|
||||
c = c.substring(1);
|
||||
}
|
||||
if (c.indexOf(name) == 0) {
|
||||
return c.substring(name.length, c.length);
|
||||
}
|
||||
}
|
||||
return "";
|
||||
}
|
@ -0,0 +1,205 @@
|
||||
html {
|
||||
height: 95%;
|
||||
}
|
||||
|
||||
body {
|
||||
height: 95%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
/* font-family: "Arial", Helvetica, sans-serif; */
|
||||
background-color: var(--color-1);
|
||||
}
|
||||
|
||||
div {
|
||||
border: 0.0px solid gray;
|
||||
}
|
||||
|
||||
#letters a, #words a, .frame-sentences-wrapper a {
|
||||
text-decoration: none;
|
||||
color: inherit;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
#letters a:hover, #words a:hover {
|
||||
color: var(--color-1);
|
||||
}
|
||||
|
||||
.monospace {
|
||||
font-family: "Lucida Console", Monaco, monospace
|
||||
}
|
||||
|
||||
#header h2 {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
#letters {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
justify-content: space-evenly;
|
||||
padding: 5px;
|
||||
margin: 5px;
|
||||
background-color: var(--color-3);
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
#letters span {
|
||||
margin: 5px;
|
||||
}
|
||||
|
||||
#words {
|
||||
width: 200px;
|
||||
overflow-y: scroll;
|
||||
background-color: var(--color-3);
|
||||
border-radius: 5px;
|
||||
margin: 5px;
|
||||
padding-top: 5px;
|
||||
}
|
||||
|
||||
#main-body {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
}
|
||||
|
||||
#main-content {
|
||||
flex-grow: 1;
|
||||
margin: 5px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
#reduce-functions {
|
||||
background-color: var(--color-3);
|
||||
padding: 5px;
|
||||
margin-bottom: 5px;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
#reduce-functions * {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
#word-info {
|
||||
background-color: var(--color-4);
|
||||
border-top-left-radius: 5px;
|
||||
border-top-right-radius: 5px;
|
||||
margin-top: 5px;
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
#word-info-left {
|
||||
display: inline-block;
|
||||
float: left;
|
||||
margin-right: 50px;
|
||||
}
|
||||
|
||||
#word-info-right {
|
||||
display: inline-block;
|
||||
float: left;
|
||||
}
|
||||
|
||||
#frames-area {
|
||||
flex-grow: 1;
|
||||
background-color: var(--color-2);
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
#words {
|
||||
width: 200px;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
|
||||
.frame-sense {
|
||||
display: inline;
|
||||
float: left;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.frame-table {
|
||||
display: inline;
|
||||
float: left;
|
||||
border-collapse: collapse;
|
||||
border: 1px solid;
|
||||
margin-right: 20px;
|
||||
}
|
||||
|
||||
.frame-table td, th {
|
||||
border-left: 1px solid;
|
||||
border-right: 1px solid;
|
||||
padding: 2px;
|
||||
min-width: 50px;
|
||||
max-width: 100px;
|
||||
max-height: 200px;
|
||||
}
|
||||
|
||||
.frame-sentences-wrapper {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
.frame-sentences td {
|
||||
padding-bottom: 10px;
|
||||
}
|
||||
|
||||
/* Hide some data*/
|
||||
/*
|
||||
.frame-table tr:nth-child(2) {
|
||||
display: none;
|
||||
}
|
||||
.frame-table tr:nth-child(3) {
|
||||
display: none;
|
||||
}
|
||||
*/
|
||||
|
||||
.token-div {
|
||||
margin-top: 10px;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
.token-div td:nth-child(1) {
|
||||
min-width: 100px;
|
||||
}
|
||||
|
||||
.hw-highlight {
|
||||
color: blue;
|
||||
}
|
||||
|
||||
.functor-highlight {
|
||||
color: red;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* CSS - Cascading Style Sheet */
|
||||
/* Palette color codes */
|
||||
/* Palette URL: http://paletton.com/#uid=13D0u0k7UUa3cZA5wXlaiQ5cFL3 */
|
||||
|
||||
/* Feel free to copy&paste color codes to your application */
|
||||
|
||||
|
||||
/* As hex codes */
|
||||
|
||||
|
||||
|
||||
|
||||
/* As RGBa codes */
|
||||
|
||||
.rgba-primary-0 { color: rgba(183,206,236,1) } /* Main Primary color */
|
||||
.rgba-primary-1 { color: rgba(228,238,251,1) }
|
||||
.rgba-primary-2 { color: rgba(206,223,245,1) }
|
||||
.rgba-primary-3 { color: rgba(159,188,225,1) }
|
||||
.rgba-primary-4 { color: rgba(135,168,211,1) }
|
||||
|
||||
|
||||
|
||||
/* Generated by Paletton.com © 2002-2014 */
|
||||
/* http://paletton.com */
|
||||
|
||||
:root {
|
||||
--color-0: #B7CEEC;
|
||||
--color-1: #E4EEFB;
|
||||
--color-2: #CEDFF5;
|
||||
--color-3: #9FBCE1;
|
||||
--color-4: #87A8D3;
|
||||
}
|
@ -0,0 +1,64 @@
|
||||
{% for frame in frames %}
|
||||
<div class="frame-div">
|
||||
<div hidden class="frame-hw-id">{{ frame.tids[0] }}</div>
|
||||
<div class="frame-table-wrapper">
|
||||
<table class="frame-table">
|
||||
<tr>
|
||||
{% for slot in frame.slots %}
|
||||
<td><span class="functor-link {{ slot.tids|join(' ') }}">{{ slot.functor }}</span></td>
|
||||
{% endfor %}
|
||||
</tr>
|
||||
<tr>
|
||||
{% for slot in frame.slots %}
|
||||
<td title="št. pojavitev/št. povedi">{{ slot.tids|length }}/{{ frame.tids|length }}</td>
|
||||
{% endfor %}
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<table class="frame-sense">
|
||||
<tr class="frame-sense-id">
|
||||
<td>ID pomena: </td>
|
||||
<td>
|
||||
<input type="text" name="sense_id" value="{{ frame.sense_info.get('sense_id') }}">
|
||||
</td>
|
||||
</tr>
|
||||
<tr class="frame-sense-desc">
|
||||
<td>Opis pomena: </td>
|
||||
<td>
|
||||
<!--input type="text" name="sense_desc" value="{{ frame.sense_info.get('sense_desc') }}"-->
|
||||
<ul>
|
||||
{% set desc_arr = frame.sense_info.get('sense_desc') %}
|
||||
{% if desc_arr is not none %}
|
||||
{% for desc in desc_arr %}
|
||||
<li>{{ desc }}</li>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
</ul>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div class="frame-sentences-wrapper">
|
||||
<p><a class="monospace sign-element" href="javascript:void(0)" onClick="toggle_frame_sentences($(this))">[+]</a> št. stavkov: {{ frame.tids|length }}</p>
|
||||
<div hidden class="frame-sentences">
|
||||
<table>
|
||||
{% for sentence in frame.sentences %}
|
||||
<tr><td>
|
||||
{% for token in sentence %}
|
||||
{% if token[1]["word"] != None %}
|
||||
{% set hwclass = "" %}
|
||||
{% if token[0] in frame.tids %}
|
||||
{% set hwclass = " hw-highlight" %}
|
||||
{% endif %}
|
||||
<span class="functor-link {{ token[0] }}{{ hwclass }}">{{ token[1]["word"] }} </span>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</td></tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
</div>
|
||||
<hr />
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
@ -0,0 +1,40 @@
|
||||
{% for frame in frames %}
|
||||
<div class="frame-div">
|
||||
<p>[{% for tid in frame.tids %}
|
||||
<a href="javascript:void(0)" onClick="get_token_info('{{ tid }}', this)">{{ tid }}</a>
|
||||
{% if not loop.last %}
|
||||
,
|
||||
{% endif %}
|
||||
{% endfor %}]</p>
|
||||
|
||||
<table class="frame-table">
|
||||
<tr>
|
||||
{% for slot in frame.slots %}
|
||||
<td class="{{ slot.tids|join(" ") }}">{{ slot.functor }}</td>
|
||||
{% endfor %}
|
||||
</tr>
|
||||
<tr>
|
||||
{% for slot in frame.slots %}
|
||||
<td>{% for sh in slot.shallows %}
|
||||
{{ sh }}
|
||||
{% if not loop.last %}
|
||||
,
|
||||
{% endif %}
|
||||
{% endfor %}</td>
|
||||
{% endfor %}
|
||||
</tr>
|
||||
<tr>
|
||||
{% for slot in frame.slots %}
|
||||
<td>{% for tid in slot.tids %}
|
||||
<a href="javascript:void(0)" onClick="get_token_info('{{ tid }}', this)">{{ tid }}</a>
|
||||
{% if not loop.last %}
|
||||
,
|
||||
{% endif %}
|
||||
{% endfor %}</td>
|
||||
{% endfor %}
|
||||
</tr>
|
||||
</table>
|
||||
<div class="token-div"></div>
|
||||
<hr />
|
||||
</div>
|
||||
{% endfor %}
|
@ -0,0 +1,37 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<meta charset="utf-8">
|
||||
<link rel=stylesheet type=text/css href="{{ url_for('static', filename='./lib/bootstrap/css/bootstrap.min.css') }}">
|
||||
<link rel=stylesheet type=text/css href="{{ url_for('static', filename='style.css') }}">
|
||||
<head>
|
||||
<title>Leksikon</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id=header>
|
||||
<h2>Leksikon vezljivosti slovenskih glagolov</h2>
|
||||
<div id=letters></div>
|
||||
</div>
|
||||
<div id=main-body>
|
||||
<div id=left>
|
||||
<div id=words></div>
|
||||
</div>
|
||||
<div id=main-content>
|
||||
<div id=reduce-functions></div>
|
||||
<div id=word-info>
|
||||
<div id=word-info-left>
|
||||
<h3 id="chosen-one"></h3>
|
||||
<p><span id="n-frames"></span></p>
|
||||
</div>
|
||||
<div id="word-info-right">
|
||||
<!--button onclick="user_input_menu('new')">debug_button</button-->
|
||||
</div>
|
||||
</div>
|
||||
<div id=frames-area></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="{{ url_for('static', filename='./lib/jquery.min.js') }}"></script>
|
||||
<script src="{{ url_for('static', filename='./lib/bootstrap/js/bootstrap.min.js') }}"></script>
|
||||
<script src="{{ url_for('static', filename='script.js') }}"></script>
|
||||
</body>
|
||||
</html>
|
@ -0,0 +1,5 @@
|
||||
{% for l in letters %}
|
||||
<span>
|
||||
<a href="javascript:get_words('{{ l[0] }}')">{{ l[0]|upper + "(" + l[1]|string + ")" }}</a>
|
||||
</span>
|
||||
{% endfor %}
|
@ -0,0 +1,10 @@
|
||||
<div class="row">
|
||||
<div class="container">
|
||||
<div class="col-sm-2">Način prikaza:</div>
|
||||
<div class="col-sm-10">
|
||||
{% for k, e in reduce_functions.items() | sort(attribute="0") %}
|
||||
<label class=radio-inline title="{{ e['desc'] }}"><input type="radio" name="rf" value="{{ k }}">{{ e["simple_name"] }}</label>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
@ -0,0 +1,6 @@
|
||||
<select>
|
||||
<option disabled selected value>-- izberi --</option>
|
||||
{% for sg in sense_groups %}
|
||||
<option {% if sg == state_sense_group %}selected {% endif %}onclick="pick_sense_group('{{ sg }}', true)">{{ sg }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
@ -0,0 +1,6 @@
|
||||
|
||||
{% set alphabet = ["A", "B", "C", "Č", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "R", "S", "Š", "T", "U", "V", "Z", "Ž"] %}
|
||||
{% for i in range(0, alphabet|length - 1) %}
|
||||
<span><a href="javascript:get_words('{{ alphabet[i] }}')">{{ alphabet[i] }}</a>, </span>
|
||||
{% endfor %}
|
||||
<a href="javascript:get_words('{{ alphabet[-1] }}')">{{ alphabet[-1] }}</a>
|
@ -0,0 +1,16 @@
|
||||
<table>
|
||||
<tr>
|
||||
<td>{{ tid }}: </td>
|
||||
<td>{{ token }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Stavek: </td>
|
||||
<td class="token-sentence">{% for token in sentence %}
|
||||
<span title="{{ token[1] }}" class="{{ token[0] }}">{{ token[1]["word"] }}</span>
|
||||
{% endfor %}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>sense_ids: </td>
|
||||
<td>{{ sense_ids }}</td>
|
||||
</tr>
|
||||
</table>
|
@ -0,0 +1,12 @@
|
||||
<ul>
|
||||
{% for w in words %}
|
||||
<li>
|
||||
<a href="javascript:get_frames('{{ w[0] }}')">{{ w[0] + " (" + w[1]|string + ")" }}
|
||||
{# Star indicates sense data has been preprocessed. #}
|
||||
{% if w[2] %}
|
||||
{{ "*" }}
|
||||
{% endif %}
|
||||
</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
@ -0,0 +1 @@
|
||||
<!DOCTYPE html><html><head><meta charset=utf-8><meta name=viewport content="width=device-width,initial-scale=1"><title>vue_frontend</title><link href=/static/css/app.8cc8ad5cf4cf830949529795eff01e7f.css rel=stylesheet></head><body><div id=app></div><script type=text/javascript src=/static/js/manifest.2ae2e69a05c33dfc65f8.js></script><script type=text/javascript src=/static/js/vendor.3ae5086c460a20dc799c.js></script><script type=text/javascript src=/static/js/app.abe281d88cb13e210c8b.js></script></body></html>
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1,2 @@
|
||||
!function(r){var n=window.webpackJsonp;window.webpackJsonp=function(e,u,c){for(var f,i,p,a=0,l=[];a<e.length;a++)i=e[a],o[i]&&l.push(o[i][0]),o[i]=0;for(f in u)Object.prototype.hasOwnProperty.call(u,f)&&(r[f]=u[f]);for(n&&n(e,u,c);l.length;)l.shift()();if(c)for(a=0;a<c.length;a++)p=t(t.s=c[a]);return p};var e={},o={2:0};function t(n){if(e[n])return e[n].exports;var o=e[n]={i:n,l:!1,exports:{}};return r[n].call(o.exports,o,o.exports,t),o.l=!0,o.exports}t.m=r,t.c=e,t.d=function(r,n,e){t.o(r,n)||Object.defineProperty(r,n,{configurable:!1,enumerable:!0,get:e})},t.n=function(r){var n=r&&r.__esModule?function(){return r.default}:function(){return r};return t.d(n,"a",n),n},t.o=function(r,n){return Object.prototype.hasOwnProperty.call(r,n)},t.p="/",t.oe=function(r){throw console.error(r),r}}([]);
|
||||
//# sourceMappingURL=manifest.2ae2e69a05c33dfc65f8.js.map
|
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["webpack:///webpack/bootstrap 08aa727d7b9c76add5d3"],"names":["parentJsonpFunction","window","chunkIds","moreModules","executeModules","moduleId","chunkId","result","i","resolves","length","installedChunks","push","Object","prototype","hasOwnProperty","call","modules","shift","__webpack_require__","s","installedModules","2","exports","module","l","m","c","d","name","getter","o","defineProperty","configurable","enumerable","get","n","__esModule","object","property","p","oe","err","console","error"],"mappings":"aACA,IAAAA,EAAAC,OAAA,aACAA,OAAA,sBAAAC,EAAAC,EAAAC,GAIA,IADA,IAAAC,EAAAC,EAAAC,EAAAC,EAAA,EAAAC,KACQD,EAAAN,EAAAQ,OAAoBF,IAC5BF,EAAAJ,EAAAM,GACAG,EAAAL,IACAG,EAAAG,KAAAD,EAAAL,GAAA,IAEAK,EAAAL,GAAA,EAEA,IAAAD,KAAAF,EACAU,OAAAC,UAAAC,eAAAC,KAAAb,EAAAE,KACAY,EAAAZ,GAAAF,EAAAE,IAIA,IADAL,KAAAE,EAAAC,EAAAC,GACAK,EAAAC,QACAD,EAAAS,OAAAT,GAEA,GAAAL,EACA,IAAAI,EAAA,EAAYA,EAAAJ,EAAAM,OAA2BF,IACvCD,EAAAY,IAAAC,EAAAhB,EAAAI,IAGA,OAAAD,GAIA,IAAAc,KAGAV,GACAW,EAAA,GAIA,SAAAH,EAAAd,GAGA,GAAAgB,EAAAhB,GACA,OAAAgB,EAAAhB,GAAAkB,QAGA,IAAAC,EAAAH,EAAAhB,IACAG,EAAAH,EACAoB,GAAA,EACAF,YAUA,OANAN,EAAAZ,GAAAW,KAAAQ,EAAAD,QAAAC,IAAAD,QAAAJ,GAGAK,EAAAC,GAAA,EAGAD,EAAAD,QAKAJ,EAAAO,EAAAT,EAGAE,EAAAQ,EAAAN,EAGAF,EAAAS,EAAA,SAAAL,EAAAM,EAAAC,GACAX,EAAAY,EAAAR,EAAAM,IACAhB,OAAAmB,eAAAT,EAAAM,GACAI,cAAA,EACAC,YAAA,EACAC,IAAAL,KAMAX,EAAAiB,EAAA,SAAAZ,GACA,IAAAM,EAAAN,KAAAa,WACA,WAA2B,OAAAb,EAAA,SAC3B,WAAiC,OAAAA,GAEjC,OADAL,EAAAS,EAAAE,EAAA,IAAAA,GACAA,GAIAX,EAAAY,EAAA,SAAAO,EAAAC,GAAsD,OAAA1B,OAAAC,UAAAC,eAAAC,KAAAsB,EAAAC,IAGtDpB,EAAAqB,EAAA,IAGArB,EAAAsB,GAAA,SAAAC,GAA8D,MAApBC,QAAAC,MAAAF,GAAoBA","file":"static/js/manifest.2ae2e69a05c33dfc65f8.js","sourcesContent":[" \t// install a JSONP callback for chunk loading\n \tvar parentJsonpFunction = window[\"webpackJsonp\"];\n \twindow[\"webpackJsonp\"] = function webpackJsonpCallback(chunkIds, moreModules, executeModules) {\n \t\t// add \"moreModules\" to the modules object,\n \t\t// then flag all \"chunkIds\" as loaded and fire callback\n \t\tvar moduleId, chunkId, i = 0, resolves = [], result;\n \t\tfor(;i < chunkIds.length; i++) {\n \t\t\tchunkId = chunkIds[i];\n \t\t\tif(installedChunks[chunkId]) {\n \t\t\t\tresolves.push(installedChunks[chunkId][0]);\n \t\t\t}\n \t\t\tinstalledChunks[chunkId] = 0;\n \t\t}\n \t\tfor(moduleId in moreModules) {\n \t\t\tif(Object.prototype.hasOwnProperty.call(moreModules, moduleId)) {\n \t\t\t\tmodules[moduleId] = moreModules[moduleId];\n \t\t\t}\n \t\t}\n \t\tif(parentJsonpFunction) parentJsonpFunction(chunkIds, moreModules, executeModules);\n \t\twhile(resolves.length) {\n \t\t\tresolves.shift()();\n \t\t}\n \t\tif(executeModules) {\n \t\t\tfor(i=0; i < executeModules.length; i++) {\n \t\t\t\tresult = __webpack_require__(__webpack_require__.s = executeModules[i]);\n \t\t\t}\n \t\t}\n \t\treturn result;\n \t};\n\n \t// The module cache\n \tvar installedModules = {};\n\n \t// objects to store loaded and loading chunks\n \tvar installedChunks = {\n \t\t2: 0\n \t};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, {\n \t\t\t\tconfigurable: false,\n \t\t\t\tenumerable: true,\n \t\t\t\tget: getter\n \t\t\t});\n \t\t}\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"/\";\n\n \t// on error function for async loading\n \t__webpack_require__.oe = function(err) { console.error(err); throw err; };\n\n\n\n// WEBPACK FOOTER //\n// webpack/bootstrap 08aa727d7b9c76add5d3"],"sourceRoot":""}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1,28 @@
|
||||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
name='Valency',
|
||||
version='0.1',
|
||||
description='Valency lexicon of slovenian verbs.',
|
||||
url='https://bitbucket.org/voje/diploma',
|
||||
author='Kristjan Voje',
|
||||
author_email='kristjan.voje@gmail.com',
|
||||
license='MIT',
|
||||
packages=['valency'], # where to look for __init__.py
|
||||
install_requires=[
|
||||
'bs4',
|
||||
'requests',
|
||||
'matplotlib',
|
||||
'flask',
|
||||
'nltk',
|
||||
'pymongo',
|
||||
'xmltodict',
|
||||
'scipy',
|
||||
'scikit-learn',
|
||||
'polyglot',
|
||||
'pyicu',
|
||||
'pycld2',
|
||||
'morfessor',
|
||||
'flask-cors',
|
||||
]
|
||||
)
|
@ -0,0 +1,101 @@
|
||||
#!/bin/bash
|
||||
|
||||
function echo_help () {
|
||||
echo "
|
||||
sherpa.sh
|
||||
--help
|
||||
--pack (zip watched files)
|
||||
--unpack (unzip watched files)
|
||||
--list (list watched files)
|
||||
--build_vue (requires npm, builds vue_frontend for production)
|
||||
"
|
||||
}
|
||||
|
||||
function frontend_config () {
|
||||
orig_path="$(pwd)"
|
||||
cd ./vue_frontend/config/
|
||||
if [[ $1 == "dev" ]]; then
|
||||
cp config_dev.json config.json
|
||||
elif [[ $1 == "pro" ]]; then
|
||||
cp config_pro.json config.json
|
||||
fi
|
||||
echo "Using config: "
|
||||
cat config.json
|
||||
cd "${orig_path}"
|
||||
}
|
||||
|
||||
function build_vue () {
|
||||
echo "
|
||||
API settings, pick a number:
|
||||
----------------------------
|
||||
1) development (API on localhost)
|
||||
2) production (see conf. files in vue folder for IP settings)
|
||||
"
|
||||
read choice
|
||||
if [ $choice == 1 ]; then
|
||||
frontend_config "dev"
|
||||
elif [ $choice == 2]; then
|
||||
frontend_config "pro"
|
||||
else
|
||||
exit
|
||||
fi
|
||||
orig_path="$(pwd)"
|
||||
cd ./vue_frontend
|
||||
npm run build
|
||||
cd "${orig_path}"
|
||||
if [ -d ./flask_app/vue/dist ]; then
|
||||
rm -r ./flask_app/vue/dist
|
||||
fi
|
||||
cp -r ./vue_frontend/dist/ ./flask_app/vue/
|
||||
}
|
||||
|
||||
function unpack () {
|
||||
orig_path="$(pwd)"
|
||||
cd ../data/
|
||||
unzip ./no_del_pickles.zip
|
||||
cd "$orig_path"
|
||||
}
|
||||
|
||||
function pack () {
|
||||
orig_path="$(pwd)"
|
||||
cd ../data/
|
||||
zip -r no_del_pickles.zip ./no_del_pickles/
|
||||
rm -r ./no_del_pickles
|
||||
cd "$orig_path"
|
||||
}
|
||||
|
||||
if [[ "$#" -eq 0 ]]; then
|
||||
echo_help
|
||||
fi
|
||||
|
||||
while [[ "$#" -gt 0 ]]; do
|
||||
key="$1"
|
||||
case "$key" in
|
||||
--help)
|
||||
echo_help
|
||||
exit 0
|
||||
;;
|
||||
--pack)
|
||||
pack
|
||||
exit 0
|
||||
;;
|
||||
--unpack)
|
||||
unpack
|
||||
exit 0
|
||||
;;
|
||||
--build_vue)
|
||||
build_vue
|
||||
exit 0
|
||||
;;
|
||||
--frontend_config)
|
||||
shift
|
||||
arg1="$1"
|
||||
frontend_config "$arg1"
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo_help
|
||||
exit 0
|
||||
shift
|
||||
esac
|
||||
done
|
@ -0,0 +1,33 @@
|
||||
# Drevesna struktura direktorija:
|
||||
|
||||
* data -- vhodne datoteke (sskj, ssj500k, slownet) (prazno)
|
||||
* script -- izvorna koda
|
||||
* flask_app -- zaledni del, backend
|
||||
* app.py -- vhodna točka (main)
|
||||
* valency -- modul za obdelavo vhodnih korpusov (glavna logika)
|
||||
* seqparser -- orodje za razčlenjevanje vhodnega korpusa v .xml obliki
|
||||
* dictionary_interface.py -- vmesnik za delo s slovarji v MongDB bazi
|
||||
* evaluation.py -- algoritmi za evalvacijo
|
||||
* rand_index()
|
||||
* clustering_purity()
|
||||
* ars() -- adjusted_rand_index sem uvozil iz sklearn.metrics
|
||||
* frame.py -- objekt za valenčni okvir
|
||||
* k_means.py -- algoritem k-voditeljev
|
||||
* k_means()
|
||||
* bkm() -- bisekcijski k-means
|
||||
* silhouette_wrapper -- izračun silhuetne ocene
|
||||
* kmeans_wrapper() -- kliče izbran algoritem k-means z različnimi vrednostmi K,
|
||||
s pomočjo funkcije silhouette_wrapper() izbere najbolj optimalen K
|
||||
* k_utils.py -- podporna orodja
|
||||
* slo_bucket_sort() -- sort za slovenske besede
|
||||
* stem_slo() -- približek korenjenja slovenskih besed
|
||||
* leskFour.py -- implementacija štirih verzij Leskovega algoritma
|
||||
* lesk_nltk()
|
||||
* lesk_sl()
|
||||
* lesk_al()
|
||||
* lesk_ram()
|
||||
* reduce_functions.py -- funkcije za združevanje vezljivostnih vzorcev (uporaljene v aplikaciji)
|
||||
* ssj_struct.py -- vmesni objekt za branje korpusa
|
||||
* sskj_scraper.py -- orodje za zbiranje podatkov iz spletnega SSKJ
|
||||
* val_struct.py() -- objekt, ki predstavlja prebrani korpus
|
||||
* vue_frontend -- uporabniški vmesnik
|
@ -0,0 +1 @@
|
||||
*.pickle
|
@ -0,0 +1,386 @@
|
||||
from valency import k_utils
|
||||
import logging
|
||||
from time import time
|
||||
from valency.k_utils import dict_safe_key as dsk
|
||||
from copy import deepcopy as DC
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Upper limit for how many senses a lemma can have.
|
||||
GUL = 20
|
||||
SLOWNET_CACHE = "slownet_glosses_cache"
|
||||
|
||||
|
||||
class DictionaryInterface:
|
||||
def __init__(self, vallex, dictionary):
|
||||
self.vallex = vallex
|
||||
self.dictionary = "interface"
|
||||
|
||||
def find(self, lemma):
|
||||
return []
|
||||
|
||||
def contains(self, lemma, upper_limit=GUL):
|
||||
# useless. need to check if sense_glosses returns non empty list
|
||||
res = self.find(lemma)
|
||||
if upper_limit is not None and len(res) > upper_limit:
|
||||
return False
|
||||
return (len(res) is not 0)
|
||||
|
||||
def cached_glosses(self, lemma):
|
||||
# preprocessed self_glosses (not used)
|
||||
res = list(self.vallex.db.cached_glosses.find(
|
||||
{"lemma": lemma, "dictionary": self.dictionary}))
|
||||
if len(res) == 0:
|
||||
return []
|
||||
return res[0]["glosses"]
|
||||
|
||||
def sense_glosses(self, lemma):
|
||||
# array: gloss for each sense
|
||||
# gloss: {"gloss": ["<sense>", ...], "def": ["<sense"], ...}
|
||||
return "dictionary_interface.py: not_yet_implemented"
|
||||
|
||||
# Recursively pull strgins out of a dictionary,
|
||||
# based on a list of keys.
|
||||
# uses self.recursion_buffer
|
||||
def pull_strings_wrapper(self, element, keys):
|
||||
if element is None:
|
||||
return []
|
||||
self.recursion_buffer = []
|
||||
self.pull_strings(element, keys)
|
||||
return self.recursion_buffer[:]
|
||||
|
||||
def pull_strings(self, element, keys):
|
||||
# Recursively pull values out of a dict.
|
||||
# correct key + element as string or list of strings
|
||||
for k, e in element.items():
|
||||
if k not in keys:
|
||||
continue
|
||||
if isinstance(e, dict):
|
||||
self.pull_strings(e, keys)
|
||||
elif isinstance(e, str):
|
||||
self.recursion_buffer.append(e)
|
||||
elif isinstance(e, list):
|
||||
for ee in e:
|
||||
if isinstance(ee, dict):
|
||||
self.pull_strings(ee, keys)
|
||||
elif isinstance(ee, str):
|
||||
self.recursion_buffer.append(ee)
|
||||
|
||||
|
||||
class Sskj(DictionaryInterface):
|
||||
def __init__(self, vallex):
|
||||
super().__init__(vallex, "sskj")
|
||||
|
||||
def find(self, lemma):
|
||||
res = list(self.vallex.db.sskj.find(
|
||||
{"ns0:entry.ns0:form.ns0:orth": lemma}
|
||||
))
|
||||
return res
|
||||
|
||||
def sense_glosses(self, lemma, upper_limit=GUL):
|
||||
entries = self.find(lemma)
|
||||
if upper_limit is not None and len(entries) > upper_limit:
|
||||
log.info("sense_glosses({}): too many sense entries".format(lemma))
|
||||
return []
|
||||
senses = []
|
||||
if len(entries) == 0:
|
||||
return []
|
||||
for e in entries:
|
||||
senses.extend(dsk(
|
||||
e["ns0:entry"], "ns0:sense"))
|
||||
keys = [
|
||||
"ns0:def", "ns0:cit", "ns0:quote",
|
||||
"ns0:gloss", "ns0:sense", "ns0:orth",
|
||||
"ns0:form", "#text"
|
||||
]
|
||||
glosses = []
|
||||
for s in senses:
|
||||
gloss = self.pull_strings_wrapper(s, keys)
|
||||
if len(gloss) == 0:
|
||||
continue
|
||||
glosses.append({
|
||||
"gloss": gloss,
|
||||
"def": self.pull_strings_wrapper(s, ["ns0:sense", "ns0:def"])
|
||||
})
|
||||
return glosses
|
||||
|
||||
|
||||
class SloWnet(DictionaryInterface):
|
||||
def __init__(self, vallex):
|
||||
super().__init__(vallex, "slownet")
|
||||
self.hypernym_buffer = []
|
||||
|
||||
def slo_to_eng(self, lemma):
|
||||
|
||||
def helper_get_eng_lemmas(r):
|
||||
res = []
|
||||
for literal in dsk(r, "SYNONYM"):
|
||||
if literal["@xml:lang"] == "en":
|
||||
for lt in dsk(literal, "LITERAL"):
|
||||
res.append(lt["#text"])
|
||||
return res
|
||||
|
||||
# takes a slo token, returns array of english counterparts
|
||||
results = self.find(lemma)
|
||||
eng_lemmas = []
|
||||
for r in results:
|
||||
eng_lemmas.extend(helper_get_eng_lemmas(r))
|
||||
return eng_lemmas
|
||||
|
||||
def helper_get_hypernyms(self, entry):
|
||||
res = []
|
||||
dd = dsk(entry, "ILR")
|
||||
for d in dd:
|
||||
if d["@type"] == "hypernym":
|
||||
res.append(d["#text"])
|
||||
return res
|
||||
|
||||
def helper_get_en_literals(self, entry):
|
||||
res = []
|
||||
synonyms = dsk(entry, "SYNONYM")
|
||||
for syn in synonyms:
|
||||
if syn["@xml:lang"] == "en":
|
||||
literals = dsk(syn, "LITERAL")
|
||||
for lit in literals:
|
||||
res.append(lit["#text"])
|
||||
return res
|
||||
|
||||
def rek_root_chain(self, slownet_id):
|
||||
entry = self.find_by_id(slownet_id)
|
||||
if entry is None:
|
||||
return []
|
||||
res = self.helper_get_en_literals(entry)
|
||||
for hypernym_id in self.helper_get_hypernyms(slownet_id):
|
||||
res.extend(self.rek_root_chain(hypernym_id))
|
||||
return res
|
||||
|
||||
def root_chain(self, lemma):
|
||||
cached = list(self.vallex.db.cached_root_chains.find({
|
||||
"lemma": lemma
|
||||
}))
|
||||
if cached:
|
||||
return cached[0]["data"]
|
||||
|
||||
res = self.slo_to_eng(lemma)
|
||||
entries = self.find(lemma)
|
||||
start_hypernym_ids = []
|
||||
for ent in entries:
|
||||
start_hypernym_ids.extend(self.helper_get_hypernyms(ent))
|
||||
for shi in start_hypernym_ids:
|
||||
res.extend(self.rek_root_chain(shi))
|
||||
self.vallex.db.cached_root_chains.insert({
|
||||
"lemma": lemma,
|
||||
"data": res
|
||||
})
|
||||
return res
|
||||
|
||||
def find_by_id(self, slownet_id):
|
||||
res = list(self.vallex.db.slownet.find({"ID": slownet_id}))
|
||||
if len(res) == 0:
|
||||
log.error("ID: {} not in db.slownet.".format(slownet_id))
|
||||
return None
|
||||
return res[0]
|
||||
|
||||
def find(self, lemma):
|
||||
return list(self.vallex.db.slownet.find({"slo_lemma": lemma}))
|
||||
"""
|
||||
# elemMatch for array query
|
||||
res = list(self.vallex.db.slownet.find({
|
||||
"SYNONYM": {'$elemMatch': {
|
||||
"LITERAL": {'$elemMatch': {"#text": lemma}}
|
||||
}}
|
||||
}))
|
||||
"""
|
||||
|
||||
def hypernyms(self, slownet_id, level):
|
||||
if level == 3:
|
||||
return
|
||||
elements = list(self.vallex.db.slownet.find({"ID": slownet_id}))
|
||||
if len(elements) == 0:
|
||||
return
|
||||
for e in elements:
|
||||
ei = self.extract_element_info(e)
|
||||
self.hypernym_buffer.append({
|
||||
"def": ei["domain"] + ei["def"],
|
||||
"gloss": ei["domain"] + ei["def"] + ei["usage"]
|
||||
})
|
||||
for ilr in ei["ilr"]:
|
||||
self.hypernyms(ilr, level + 1)
|
||||
|
||||
def extract_element_info(self, e):
|
||||
domain = []
|
||||
dd = dsk(e, "DOMAIN")
|
||||
for d in dd:
|
||||
domain.append(d)
|
||||
definition = []
|
||||
dd = dsk(e, "DEF")
|
||||
for d in dd:
|
||||
if d["@xml:lang"] == "en":
|
||||
definition.append(d["#text"])
|
||||
ilr = []
|
||||
dd = dsk(e, "ILR")
|
||||
for d in dd:
|
||||
if d["@type"] == "hypernym":
|
||||
ilr.append(d["#text"])
|
||||
usage = []
|
||||
dd = dsk(e, "USAGE")
|
||||
for d in dd:
|
||||
if d["@xml:lang"] == "en":
|
||||
usage.append(d["#text"])
|
||||
return {
|
||||
"domain": domain,
|
||||
"def": definition,
|
||||
"ilr": ilr,
|
||||
"usage": usage,
|
||||
}
|
||||
|
||||
def sense_glosses(self, lemma, upper_limit=GUL):
|
||||
# stime = time()
|
||||
|
||||
# caching
|
||||
db_key = {
|
||||
"lemma": lemma,
|
||||
"upper_limit": upper_limit
|
||||
}
|
||||
cache = list(self.vallex.db[SLOWNET_CACHE].find(db_key))
|
||||
if len(cache) > 0:
|
||||
return cache[0]["data"]
|
||||
|
||||
entries = self.find(lemma)
|
||||
if upper_limit is not None and len(entries) > upper_limit:
|
||||
# log.info("sense_glosses({}): too many senses".format(lemma))
|
||||
return []
|
||||
ret_glosses = []
|
||||
for e in entries:
|
||||
defs = []
|
||||
glosses = []
|
||||
self.hypernym_buffer = []
|
||||
ei = self.extract_element_info(e)
|
||||
self.hypernym_buffer.append({
|
||||
"def": ei["domain"] + ei["def"],
|
||||
"gloss": ei["domain"] + ei["def"] + ei["usage"]
|
||||
})
|
||||
for ilr in ei["ilr"]:
|
||||
self.hypernyms(ilr, 1)
|
||||
|
||||
[defs.extend(x["def"]) for x in self.hypernym_buffer]
|
||||
[glosses.extend(x["gloss"]) for x in self.hypernym_buffer]
|
||||
ret_glosses.append({
|
||||
"def": defs,
|
||||
"gloss": glosses,
|
||||
})
|
||||
|
||||
# log.debug("slownet.sense_glosses({}): {:.2f}s".format(
|
||||
# lemma, time() - stime))
|
||||
|
||||
# caching
|
||||
db_entry = {
|
||||
"lemma": db_key["lemma"],
|
||||
"upper_limit": db_key["upper_limit"],
|
||||
"data": ret_glosses
|
||||
}
|
||||
self.vallex.db.slownet_sense_glosses.update(
|
||||
db_key, db_entry, upsert=True
|
||||
)
|
||||
return ret_glosses
|
||||
|
||||
|
||||
class Sskj2(DictionaryInterface):
|
||||
def __init__(self, vallex):
|
||||
super().__init__(vallex, "sskj")
|
||||
|
||||
def find(self, lemma):
|
||||
pos = "glagol"
|
||||
if lemma[-1] == "_":
|
||||
pos = "pridevnik"
|
||||
res = list(self.vallex.db.sskj.find({
|
||||
"izt_clean": lemma,
|
||||
"pos": pos
|
||||
}))
|
||||
return res
|
||||
|
||||
def count_senses(self, lemma):
|
||||
entries = self.find(lemma)
|
||||
if len(entries) == 0:
|
||||
return 0
|
||||
ol = dsk(entries[0], "ol")
|
||||
if len(ol) == 0:
|
||||
return 1
|
||||
return len(ol[0]["li"])
|
||||
|
||||
def sense_glosses(self, lemma, upper_limit=GUL):
|
||||
|
||||
def helper_dict_safe_add(dic, key, el):
|
||||
if key not in dic:
|
||||
dic[key] = []
|
||||
dic[key].append(el)
|
||||
|
||||
def helper_pull_rec(el_lst, res_dct):
|
||||
for el in el_lst:
|
||||
if isinstance(el, dict):
|
||||
if ("@title" in el) and ("#text" in el):
|
||||
helper_dict_safe_add(
|
||||
res_dct, el["@title"], el["#text"])
|
||||
if "span" in el:
|
||||
helper_pull_rec(dsk(el, "span"), res_dct)
|
||||
if ("ol" in el) and ("li" in el["ol"]):
|
||||
helper_pull_rec(el["ol"]["li"], res_dct)
|
||||
if "li" in el:
|
||||
helper_pull_rec(el["li"], res_dct)
|
||||
|
||||
entries = self.find(lemma)
|
||||
if len(entries) == 0:
|
||||
return []
|
||||
if len(entries) > 1:
|
||||
log.warning("{} entries for {} in sskj2.".format(
|
||||
len(entries), lemma))
|
||||
glosses_per_entry = []
|
||||
for idx, entry in enumerate(entries):
|
||||
res_dict = {}
|
||||
if "span" in entry:
|
||||
helper_pull_rec(dsk(entry, "span"), res_dict)
|
||||
# senses
|
||||
res_dict["senses"] = []
|
||||
if ("ol" in entry) and ("li" in entry["ol"]):
|
||||
for el in dsk(entry["ol"], "li"):
|
||||
tmp = {"sskj_sense_id": el["span"][0]}
|
||||
helper_pull_rec(dsk(el, "span"), tmp)
|
||||
helper_pull_rec(dsk(el, "ol"), tmp)
|
||||
res_dict["senses"].append(DC(tmp))
|
||||
|
||||
def helper_create_gloss(dct):
|
||||
keys = ["Razlaga", "Zgled", "Stranska razlaga", "Sopomenka"]
|
||||
ret = []
|
||||
for k in keys:
|
||||
ret.extend(dsk(dct, k))
|
||||
return ret
|
||||
|
||||
glosses = []
|
||||
n_senses = len(res_dict["senses"])
|
||||
if n_senses == 0:
|
||||
glosses.append({
|
||||
"sskj_sense_id": "1-1",
|
||||
"gloss": helper_create_gloss(res_dict),
|
||||
"def": dsk(res_dict, "Razlaga")
|
||||
})
|
||||
return glosses
|
||||
|
||||
for sense in res_dict["senses"]:
|
||||
glosses.append({
|
||||
"sskj_sense_id": "{}-{}".format(
|
||||
sense["sskj_sense_id"], n_senses),
|
||||
"gloss": helper_create_gloss(sense),
|
||||
"def": dsk(sense, "Razlaga")
|
||||
})
|
||||
glosses_per_entry.append(glosses)
|
||||
|
||||
# add entry_id before the_sense id
|
||||
# entry_id-sskj_sense_id-n_senses
|
||||
all_glosses = []
|
||||
for idx, glosses in enumerate(glosses_per_entry):
|
||||
entry_id = idx + 1 # start with 1
|
||||
for gloss in glosses:
|
||||
gloss["sskj_sense_id"] = "{}-{}".format(
|
||||
entry_id, gloss["sskj_sense_id"])
|
||||
all_glosses.append(gloss)
|
||||
return all_glosses
|
@ -0,0 +1,96 @@
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Frame():
|
||||
def __init__(self, tids, deep_links=None, slots=None, hw=None):
|
||||
self.hw = hw
|
||||
self.tids = tids # list of tokens with the same hw_lemma
|
||||
# Each tid = "S123.t123";
|
||||
# you can get sentence with vallex.get_sentence(S123)
|
||||
self.slots = []
|
||||
if slots is None:
|
||||
self.slots = self.init_slots(deep_links)
|
||||
else:
|
||||
self.slots = slots
|
||||
self.sense_info = {}
|
||||
self.sentences = None # Used for passing to view in app.py, get_frames
|
||||
self.aggr_sent = None # Dictionary { hw: self.sentences idx }
|
||||
|
||||
def to_json(self):
|
||||
ret = {
|
||||
"hw": self.hw,
|
||||
"tids": self.tids,
|
||||
"slots": [slot.to_json() for slot in self.slots],
|
||||
"sentences": self.sentences,
|
||||
"aggr_sent": self.aggr_sent,
|
||||
"sense_info": self.sense_info
|
||||
}
|
||||
return ret
|
||||
|
||||
def init_slots(self, deep):
|
||||
slots = []
|
||||
for link in deep:
|
||||
slots.append(Slot(
|
||||
functor=link["functor"],
|
||||
tids=[link["to"]]
|
||||
))
|
||||
return slots
|
||||
|
||||
def sort_slots(self):
|
||||
# ACT, PAT, alphabetically
|
||||
srt1 = [
|
||||
x for x in self.slots
|
||||
if (x.functor == "ACT" or
|
||||
x.functor == "PAT")
|
||||
]
|
||||
srt1 = sorted(srt1, key=lambda x: x.functor)
|
||||
srt2 = [
|
||||
x for x in self.slots
|
||||
if (x.functor != "ACT" and
|
||||
x.functor != "PAT")
|
||||
]
|
||||
srt2 = sorted(srt2, key=lambda x: x.functor)
|
||||
self.slots = (srt1 + srt2)
|
||||
|
||||
def to_string(self):
|
||||
ret = "Frame:\n"
|
||||
ret += "sense_info: {}\n".format(str(self.sense_info))
|
||||
ret += "tids: ["
|
||||
for t in self.tids:
|
||||
ret += (str(t) + ", ")
|
||||
ret += "]\n"
|
||||
if self.slots is not None:
|
||||
ret += "slots:\n"
|
||||
for sl in self.slots:
|
||||
ret += (sl.to_string() + "\n")
|
||||
return ret
|
||||
|
||||
|
||||
class Slot():
|
||||
# Each slot is identified by its functor (ACT, PAT, ...)
|
||||
# It consists of different tokens.
|
||||
def __init__(self, functor, tids=None, count=None):
|
||||
self.functor = functor
|
||||
self.tids = tids or []
|
||||
self.count = count or 1
|
||||
|
||||
def to_string(self):
|
||||
ret = "---- Slot:\n"
|
||||
ret += "functor: {}\n".format(self.functor)
|
||||
ret += "tids: ["
|
||||
for t in self.tids:
|
||||
ret += (str(t) + ", ")
|
||||
ret += "]\n"
|
||||
ret += "]\n"
|
||||
ret += "----\n"
|
||||
return ret
|
||||
|
||||
def to_json(self):
|
||||
ret = {
|
||||
"functor": self.functor,
|
||||
"tids": self.tids,
|
||||
"count": self.count
|
||||
}
|
||||
return ret
|
@ -0,0 +1,367 @@
|
||||
import os
|
||||
import pickle
|
||||
import nltk
|
||||
import random
|
||||
from time import time
|
||||
import string
|
||||
from polyglot.text import Word
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
sno = nltk.stem.SnowballStemmer("english")
|
||||
|
||||
|
||||
def dict_safe_key(dic, key):
|
||||
# Returns a list, no matter what.
|
||||
# Transform 1 element into a list.
|
||||
# Return key not found as empty list.
|
||||
if (
|
||||
dic is None or
|
||||
key not in dic
|
||||
):
|
||||
return []
|
||||
subdic = dic[key]
|
||||
if not isinstance(subdic, list):
|
||||
return [subdic]
|
||||
return subdic
|
||||
|
||||
|
||||
def pickle_dump(data, path):
|
||||
with open(path, "wb") as file:
|
||||
pickle.dump(data, file)
|
||||
log.info("Dumped data to {}.".format(path))
|
||||
return True
|
||||
|
||||
|
||||
def pickle_load(path):
|
||||
ret = None
|
||||
if os.path.isfile(path):
|
||||
with open(path, "rb") as file:
|
||||
ret = pickle.load(file)
|
||||
log.info("Loaded data from {}.".format(path))
|
||||
return ret # Returns None in case of failure.
|
||||
|
||||
|
||||
# Implemented bucket sort for alphabetically sorting slovenian words.
|
||||
# Bucket sort >>>>>>>>>>>>>>>>>>>>
|
||||
def gen_sbs_alphabet():
|
||||
alphabet = "abcčdefghijklmnoprsštuvzž"
|
||||
return {letter: (idx + 1) for idx, letter in enumerate(alphabet)}
|
||||
|
||||
|
||||
slo_bucket_sort_alphabet = gen_sbs_alphabet()
|
||||
|
||||
|
||||
def slo_bucket_sort(words, key=None):
|
||||
if key is None:
|
||||
def key(x):
|
||||
return x
|
||||
|
||||
def alph_score(word, idx):
|
||||
kword = key(word)
|
||||
if idx >= len(kword):
|
||||
return 0
|
||||
return slo_bucket_sort_alphabet.get(kword[idx]) or 0
|
||||
|
||||
def list_to_bins(words, idx):
|
||||
bins = [[] for i in range(len(slo_bucket_sort_alphabet.keys()) + 1)]
|
||||
for word in words:
|
||||
bins[alph_score(word, idx)].append(word)
|
||||
return bins
|
||||
|
||||
def bins_to_list(bins):
|
||||
lst = []
|
||||
for b in bins:
|
||||
for el in b:
|
||||
lst.append(el)
|
||||
return lst
|
||||
|
||||
maxLen = 0
|
||||
for w in words:
|
||||
if len(key(w)) > maxLen:
|
||||
maxLen = len(key(w))
|
||||
maxIdx = maxLen - 1
|
||||
for idx in range(maxIdx, -1, -1):
|
||||
bins = list_to_bins(words, idx)
|
||||
words = bins_to_list(bins)
|
||||
"""
|
||||
print(idx)
|
||||
def get_letter(idx, word):
|
||||
kword = key(word)
|
||||
if idx < len(kword):
|
||||
return(kword[idx])
|
||||
return "#"
|
||||
print([(word, get_letter(idx, word)) for word in words])
|
||||
"""
|
||||
return words
|
||||
# Bucket sort <<<<<<<<<<<<<<<<<<<<
|
||||
|
||||
|
||||
def stem_slo(x):
|
||||
# Simplified;
|
||||
# Remove the last syllable.
|
||||
w = Word(x, language="sl").morphemes
|
||||
ret = "".join(w[:-1])
|
||||
return ret
|
||||
|
||||
|
||||
def stem_eng(x):
|
||||
return sno.stem(x)
|
||||
|
||||
|
||||
def tokenize(sentence, min_token_len=3, stem=None):
|
||||
# input: sentence string
|
||||
# output: list of token strings
|
||||
if stem is None:
|
||||
def stem(x):
|
||||
return x
|
||||
all_tokens = []
|
||||
sent_txt = nltk.sent_tokenize(sentence)
|
||||
for sent in sent_txt:
|
||||
tokens = nltk.word_tokenize(sent)
|
||||
all_tokens.extend(tokens)
|
||||
res = []
|
||||
for x in all_tokens:
|
||||
if x in string.punctuation:
|
||||
continue
|
||||
stemmed = stem(x.lower())
|
||||
if len(stemmed) >= min_token_len:
|
||||
res.append(stemmed)
|
||||
return res
|
||||
|
||||
|
||||
def tokenize_multiple(str_list, min_token_len=3, stem=None):
|
||||
# tstart = time()
|
||||
res = []
|
||||
for sentence in str_list:
|
||||
res.extend(tokenize(sentence, min_token_len, stem))
|
||||
# log.debug("tokenize_multiple: {:.2f}s".format(time() - tstart))
|
||||
return res
|
||||
|
||||
|
||||
def t_tokenize():
|
||||
teststring = "This is a test sentence. I hope it works. .. Asdf. asdf ,,,;"
|
||||
print(teststring)
|
||||
res = tokenize(teststring, min_token_len=None)
|
||||
print(res)
|
||||
|
||||
|
||||
def permute_paths(list2d, x=None, y=None, paths=None, current_path=None):
|
||||
# python stuff
|
||||
if x is None:
|
||||
x = -1
|
||||
if paths is None:
|
||||
paths = []
|
||||
if current_path is None:
|
||||
current_path = []
|
||||
|
||||
if x >= len(list2d) - 1:
|
||||
paths.append(current_path)
|
||||
return paths
|
||||
for i in range(len(list2d[x + 1])):
|
||||
tmp_path = current_path + [(x + 1, i)]
|
||||
# Computational complexity peoblem (prune long lists)
|
||||
# len == 12 -> 30%, len == 5 -> 100%
|
||||
# if random.randint(0, 100) <= (100 - 10 * (len(list2d) - 5)):
|
||||
if True:
|
||||
paths = permute_paths(
|
||||
list2d,
|
||||
x + 1,
|
||||
i,
|
||||
paths,
|
||||
tmp_path
|
||||
)
|
||||
return paths
|
||||
|
||||
|
||||
def t_permute_paths():
|
||||
list2d = [
|
||||
["Greta"],
|
||||
["backflips"],
|
||||
["through", "around"],
|
||||
["North Korea", "kindergarten"],
|
||||
["with", "without"],
|
||||
["a"],
|
||||
["bag of", "abundance of"],
|
||||
["bolts", "janitors"]
|
||||
]
|
||||
|
||||
print(list2d)
|
||||
paths = permute_paths(list2d=list2d)
|
||||
for path in paths:
|
||||
print([list2d[p[0]][p[1]] for p in path])
|
||||
|
||||
|
||||
def find_overlaps(list_a, list_b):
|
||||
# Input: two lists.
|
||||
# Output: lists of overlapping elements.
|
||||
dict_a = {}
|
||||
dict_b = {}
|
||||
lists = [list_a, list_b]
|
||||
dicts = [dict_a, dict_b]
|
||||
for lidx in range(len(lists)):
|
||||
for elidx in range(len(lists[lidx])):
|
||||
el = lists[lidx][elidx]
|
||||
if el not in dicts[lidx]:
|
||||
dicts[lidx][el] = []
|
||||
dicts[lidx][el].append(elidx)
|
||||
|
||||
substrings = []
|
||||
|
||||
sda = sorted(dict_a.keys())
|
||||
sdb = sorted(dict_b.keys())
|
||||
|
||||
i_sda = 0
|
||||
i_sdb = 0
|
||||
while ((i_sda < len(sda) and i_sdb < len(sdb))):
|
||||
if sda[i_sda] == sdb[i_sdb]:
|
||||
lia = dict_a[sda[i_sda]]
|
||||
lib = dict_b[sdb[i_sdb]]
|
||||
for llia in lia:
|
||||
for llib in lib:
|
||||
tmp_substr = []
|
||||
ii = 0
|
||||
while (
|
||||
(llia + ii < len(list_a)) and
|
||||
(llib + ii < len(list_b)) and
|
||||
(list_a[llia + ii] == list_b[llib + ii])
|
||||
):
|
||||
tmp_substr.append(list_a[llia + ii])
|
||||
ii += 1
|
||||
ii = 1
|
||||
while (
|
||||
(llia - ii >= 0) and
|
||||
(llib - ii >= 0) and
|
||||
(list_a[llia - ii] == list_b[llib - ii])
|
||||
):
|
||||
tmp_substr.insert(0, list_a[llia - ii])
|
||||
ii += 1
|
||||
substrings.append(tmp_substr)
|
||||
if sda[i_sda] < sdb[i_sdb]:
|
||||
i_sda += 1
|
||||
else:
|
||||
i_sdb += 1
|
||||
|
||||
uniques = set()
|
||||
res = []
|
||||
for ss in substrings:
|
||||
if str(ss) not in uniques:
|
||||
uniques.add(str(ss))
|
||||
res.append(ss)
|
||||
return res
|
||||
|
||||
|
||||
def find_overlaps_str(tokens_a, tokens_b):
|
||||
# Strings only.
|
||||
overlaps = []
|
||||
for N in range(1, 5):
|
||||
ngrams_a = []
|
||||
for i in range(len(tokens_a)):
|
||||
if i + N <= len(tokens_a):
|
||||
ngrams_a.append(tuple(tokens_a[i:i + N]))
|
||||
ngrams_b = []
|
||||
for i in range(len(tokens_b)):
|
||||
if i + N <= len(tokens_b):
|
||||
ngrams_b.append(tuple(tokens_b[i:i + N]))
|
||||
overlaps.extend(list(set(ngrams_a).intersection(set(ngrams_b))))
|
||||
|
||||
res = []
|
||||
for ovl in sorted(overlaps, key=lambda x: len(x), reverse=True):
|
||||
oovl = " ".join(ovl)
|
||||
for r in res:
|
||||
if oovl in r:
|
||||
break
|
||||
else:
|
||||
res.append(oovl)
|
||||
res[:] = [x.split(" ") for x in res]
|
||||
return res
|
||||
|
||||
|
||||
def t_find_overlaps():
|
||||
res = []
|
||||
input_len = [10, 100, 1000, 10000]
|
||||
for ll in input_len:
|
||||
alen = ll + int(ll * random.uniform(0.8, 1))
|
||||
blen = ll + int(ll * random.uniform(0.8, 1))
|
||||
a = [random.randint(0, 100) for x in range(alen)]
|
||||
b = [random.randint(0, 100) for x in range(blen)]
|
||||
tstart = time()
|
||||
find_overlaps(a, b)
|
||||
res.append({
|
||||
"time": time() - tstart,
|
||||
"input_size": ll
|
||||
})
|
||||
"""
|
||||
list_a = [6, 6, 4, 8, 3, 2, 2, 5, 6, 3, 4, 7, 5]
|
||||
list_b = [5, 3, 6, 8, 6, 6, 5, 3, 2, 6, 7, 8, 3, 2, 3, 2, 2, 5]
|
||||
res = find_overlaps(list_a, list_b)
|
||||
"""
|
||||
for r in res:
|
||||
print(r)
|
||||
|
||||
|
||||
def t1_find_overlaps():
|
||||
t1 = "This is a test sentence. I hope it works. .. Asdf. asdf ,,,;"
|
||||
t2 = "this is a seconde sentence. I hope my stuff works."
|
||||
print(t1)
|
||||
print(t2)
|
||||
res = find_overlaps(tokenize(t1), tokenize(t2))
|
||||
for r in res:
|
||||
print(r)
|
||||
|
||||
print()
|
||||
|
||||
res = find_overlaps_str(tokenize(t1), tokenize(t2))
|
||||
for r in res:
|
||||
print(r)
|
||||
|
||||
|
||||
def t_find_overlaps_str():
|
||||
t1 = [
|
||||
'vsa', 'moja', 'možganska', 'beda', 'se', 'združuje',
|
||||
'v', 'dejstvu', 'da', 'sem', 'si', 'čeprav', 'sem', 'pozabil',
|
||||
'ulico', 'zapomnil', 'hišno', 'številko'
|
||||
]
|
||||
t2 = [
|
||||
'narediti', 'doseči', 'da', 'se', 'kaj', 'aktivno', 'ohrani',
|
||||
'v', 'zavesti', 'zapomniti', 'si', 'imena', 'predstavljenih',
|
||||
'gostov', 'dobro', 'natančno', 'slabo', 'si', 'kaj', 'zapomniti',
|
||||
'takega', 'sem', 'si', 'zapomnil', 'zapomnite', 'te', 'prizore'
|
||||
]
|
||||
res = find_overlaps(t1, t2)
|
||||
print(res)
|
||||
|
||||
|
||||
def t_slo_bucket_sort():
|
||||
a1 = []
|
||||
a2 = []
|
||||
with open("./tests/m_besede2.txt") as f:
|
||||
for line in f:
|
||||
a1.append(line.split("\n")[0])
|
||||
a2.append((line.split("\n")[0], random.randint(0, 9)))
|
||||
|
||||
a1 = slo_bucket_sort(a1)
|
||||
a2 = slo_bucket_sort(a2, key=lambda x: x[0])
|
||||
|
||||
check = True
|
||||
for i in range(len(a1)):
|
||||
check &= (a1[i] == a2[i][0])
|
||||
print("{:<10}{:>10}".format(str(a1[i]), str(a2[i])))
|
||||
print(check)
|
||||
|
||||
|
||||
def t1_slo_bucket_sort():
|
||||
words = "_xyz zebra. .bober raca bor borovnica antilopa".split(" ")
|
||||
words.append("test space")
|
||||
words.append("test srrrr")
|
||||
words.append("test saaa")
|
||||
for w in slo_bucket_sort(words):
|
||||
print(w)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# t_find_overlaps()
|
||||
# t1_find_overlaps()
|
||||
# t_tokenize()
|
||||
# t_find_overlaps_str()
|
||||
t1_slo_bucket_sort()
|
@ -0,0 +1,247 @@
|
||||
import pymongo
|
||||
import xmltodict
|
||||
import xml.etree.ElementTree as ET
|
||||
from time import time
|
||||
import json
|
||||
from valency.sskj_scraper import SskjScraper
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
# Get rid of accented characters.
|
||||
intab = "ÁÉÍÓÚàáäçèéêìíîñòóôöùúüčŔŕ"
|
||||
outtb = "AEIOUaaaceeeiiinoooouuučRr"
|
||||
transtab = str.maketrans(intab, outtb)
|
||||
|
||||
|
||||
def mongo_test():
|
||||
client = pymongo.MongoClient(
|
||||
"mongodb://{}:{}@127.0.0.1:26633/texts".format("kristjan", "simple567")
|
||||
)
|
||||
|
||||
db = client.texts
|
||||
|
||||
coll = db.test
|
||||
|
||||
print(coll.find_one())
|
||||
|
||||
|
||||
def basic_connection(ip_addr=None, port=None):
|
||||
if ip_addr is None:
|
||||
ip_addr = "127.0.0.1"
|
||||
if port is None:
|
||||
port = 26644
|
||||
client = pymongo.MongoClient(
|
||||
"mongodb://{}:{}@{}:{}/texts".format(
|
||||
"kristjan", "simple567", ip_addr, str(port))
|
||||
)
|
||||
err_msg = "OK"
|
||||
try:
|
||||
client.server_info()
|
||||
except pymongo.errors.ServerSelectionTimeoutError as err:
|
||||
err_msg = err
|
||||
return (None, err_msg)
|
||||
db = client.texts
|
||||
return (db, err_msg)
|
||||
|
||||
|
||||
def check_collections(db, coll_names):
|
||||
collections = db.collection_names()
|
||||
for cn in coll_names:
|
||||
if cn not in collections:
|
||||
db.create_collection(cn)
|
||||
|
||||
|
||||
def prepare_user_tokens(db):
|
||||
CNAME = "v2_user_tokens"
|
||||
db[CNAME].drop()
|
||||
db.create_collection(CNAME)
|
||||
EXPIRE = 151200 # 2 days
|
||||
# EXPIRE = 10 # 10 sec
|
||||
db[CNAME].ensure_index("date", expireAfterSeconds=EXPIRE)
|
||||
|
||||
# user this: utc_timestamp = datetime.datetime.utcnow()
|
||||
# user_tokens.insert({
|
||||
# '_id': 'utc_session', "date": utc_timestamp,
|
||||
# "session": "test session"})
|
||||
|
||||
|
||||
def sskj_to_mongo(sskj_path):
|
||||
# Deprecated, use sskj2_to_mongo
|
||||
ns = {"tei": "http://www.tei-c.org/ns/1.0"}
|
||||
ts = time()
|
||||
sskj = ET.parse(sskj_path).getroot()
|
||||
db = basic_connection()
|
||||
col_names = ["sskj"]
|
||||
for cn in col_names:
|
||||
if cn in db.collection_names():
|
||||
db[cn].drop()
|
||||
text = sskj.find("tei:text", ns)
|
||||
body = text.find("tei:body", ns)
|
||||
n_ent = 0
|
||||
for entry in body.findall("tei:entry", ns):
|
||||
n_ent += 1
|
||||
tmpstr = ET.tostring(entry)
|
||||
datachunk = xmltodict.parse(tmpstr)
|
||||
dictchunk = json.loads(json.dumps(datachunk))
|
||||
"""
|
||||
pp = pprint.PrettyPrinter()
|
||||
pp.pprint(dictchunk)
|
||||
"""
|
||||
db.sskj.insert(dictchunk)
|
||||
# iskanje: db.sskj.find({'ns0:entry.ns0:form.ns0:orth':"kaplanček"})
|
||||
print("sskj to mongo: {} entries in {:.2f}s".format(n_ent, time() - ts))
|
||||
|
||||
|
||||
def slownet_to_mongo(slw_path):
|
||||
# .slownet contains the database from .xml file
|
||||
# added toplevel field ["slo_lemma"] for faster querying
|
||||
ts = time()
|
||||
slownet = ET.parse(slw_path).getroot()
|
||||
db = basic_connection()
|
||||
col_names = ["slownet_map", "slownet"]
|
||||
for cn in col_names:
|
||||
if cn in db.collection_names():
|
||||
db[cn].drop()
|
||||
|
||||
slo_to_id = {}
|
||||
for synset in slownet.findall("SYNSET"):
|
||||
tmpstr = ET.tostring(synset)
|
||||
datachunk = xmltodict.parse(tmpstr)
|
||||
dictchunk = json.loads(json.dumps(datachunk))
|
||||
dictchunk = dictchunk["SYNSET"]
|
||||
# pp.pprint(dictchunk)
|
||||
|
||||
# insert into slo_ti_id
|
||||
if "SYNONYM" in dictchunk:
|
||||
synonyms = dictchunk["SYNONYM"]
|
||||
if not isinstance(synonyms, list):
|
||||
synonyms = [synonyms]
|
||||
for syn in synonyms:
|
||||
if syn["@xml:lang"] == "sl":
|
||||
if "LITERAL" in syn:
|
||||
literals = syn["LITERAL"]
|
||||
if not isinstance(literals, list):
|
||||
literals = [literals]
|
||||
for lit in literals:
|
||||
slo_keyword = lit["#text"]
|
||||
if "." in slo_keyword:
|
||||
continue
|
||||
if "slo_lemma" not in dictchunk:
|
||||
dictchunk["slo_lemma"] = []
|
||||
dictchunk["slo_lemma"].append(slo_keyword)
|
||||
db.slownet.insert(dictchunk)
|
||||
|
||||
# pp.pprint(slo_to_id)
|
||||
db.slownet.ensure_index([("id", pymongo.ASCENDING)])
|
||||
db.slo_to_id.insert(slo_to_id)
|
||||
print("sloWNet to mongo in {:.2f}s".format(time() - ts))
|
||||
|
||||
|
||||
def scrape_sskj():
|
||||
# Deprecated!
|
||||
# Walk through keys in slo_to_id and scrape sskj data.
|
||||
client = pymongo.MongoClient(
|
||||
"mongodb://{}:{}@127.0.0.1:26633/texts".format("kristjan", "simple567")
|
||||
)
|
||||
db = client.texts
|
||||
words_list = sorted(db.slo_to_id.find_one())
|
||||
|
||||
print(len(words_list))
|
||||
sscraper = SskjScraper()
|
||||
|
||||
last_word = "nogometaš"
|
||||
db.scraped_sskj.remove({"word": last_word})
|
||||
lock = True
|
||||
for word in words_list:
|
||||
if word == last_word:
|
||||
lock = False
|
||||
|
||||
if not lock:
|
||||
res = sscraper.scrape(word)
|
||||
if len(res) > 0:
|
||||
db.scraped_sskj.insert({"word": word, "bag": res})
|
||||
|
||||
|
||||
def sskj2_to_mongo(sskj2_path):
|
||||
tstart = time()
|
||||
|
||||
db = basic_connection()
|
||||
col_names = ["sskj2"]
|
||||
for cn in col_names:
|
||||
if cn in db.collection_names():
|
||||
db[cn].drop()
|
||||
|
||||
with open(sskj2_path) as f:
|
||||
soup = BeautifulSoup(f.read(), "html.parser")
|
||||
|
||||
divs = soup.find_all("div")
|
||||
for i, div in enumerate(divs):
|
||||
if (i) % 100 == 0:
|
||||
print("{}/{}".format(i, len(divs)))
|
||||
datachunk = xmltodict.parse(str(div))
|
||||
datachunk = datachunk["div"]
|
||||
|
||||
# pos (besedna vrsta)
|
||||
pos_keywords = {
|
||||
"samostalnik": 0,
|
||||
"pridevnik": 0,
|
||||
"glagol": 0,
|
||||
"prislov": 0,
|
||||
"predlog": 0,
|
||||
"členek": 0,
|
||||
"veznik": 0,
|
||||
"medmet": 0,
|
||||
"povedkovnik": 0
|
||||
}
|
||||
for span in div.find_all("span"):
|
||||
attrs = [e for k, e in span.attrs.items()]
|
||||
for attr in attrs:
|
||||
for ak in attr.split(" "):
|
||||
akl = ak.lower()
|
||||
if akl in pos_keywords:
|
||||
pos_keywords[akl] += 1
|
||||
pos = "unknonw"
|
||||
pos_max = 0
|
||||
for k, e in pos_keywords.items():
|
||||
if e > pos_max:
|
||||
pos = k
|
||||
pos_max = e
|
||||
datachunk["pos"] = pos
|
||||
|
||||
# izt_clean
|
||||
izts = div.find_all("span", {"title": "Iztočnica"})
|
||||
if len(izts) == 0:
|
||||
print("Entry {} has no Iztočnica.".format(i))
|
||||
continue
|
||||
izt = ((izts[0].text).translate(transtab)).lower()
|
||||
ispl = izt.split(" ")
|
||||
has_se = False
|
||||
if len(ispl) and ispl[-1] == "se":
|
||||
izt = " ".join(ispl[:-1])
|
||||
has_se = True
|
||||
datachunk["izt_clean"] = izt
|
||||
datachunk["has_se"] = has_se
|
||||
|
||||
dictchunk = json.loads(json.dumps(datachunk))
|
||||
db.sskj.insert(dictchunk)
|
||||
|
||||
db.sskj.create_index([("izt_clean", pymongo.TEXT)])
|
||||
print("sskj2 to mongo: {} entries in {:.2f}s".format(
|
||||
len(divs), time() - tstart))
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# slownet_path = "../../data/slownet/slownet-2015-05-07.xml"
|
||||
# slownet_to_mongo(slownet_path)
|
||||
|
||||
# scrape_sskj()
|
||||
|
||||
# sskj_path = "../../data/sskj/sskj.p5.xml"
|
||||
# sskj_to_mongo(sskj_path)
|
||||
|
||||
# first file for testing, the original file takes up most of RAM
|
||||
# sskj2_path = "../../data/sskj/sskj2_200.html"
|
||||
# sskj2_path = "../../data/sskj/sskj2_v1.html"
|
||||
# sskj2_to_mongo(sskj2_path)
|
||||
|
||||
print("nothing here")
|
@ -0,0 +1,239 @@
|
||||
# Reduction function for frames.
|
||||
# Input: list of Frame objects, output: list of Frame objects.
|
||||
# App uses reduce_0, 1 and 5
|
||||
|
||||
from valency.frame import Frame, Slot
|
||||
from copy import deepcopy as DC
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
SENSE_UNDEFINED = "nedefinirano"
|
||||
|
||||
|
||||
def sorted_by_len_tids(frames):
|
||||
return sorted(
|
||||
frames,
|
||||
key=lambda x: len(x.tids),
|
||||
reverse=True
|
||||
)
|
||||
|
||||
|
||||
def reduce_0(frames, vallex=None):
|
||||
# new request... frames should be sorded by
|
||||
# functors list (basically reduce_1, just each
|
||||
# sentence gets its own frame)
|
||||
r1_frames = reduce_1(frames)
|
||||
sorting_strings = []
|
||||
separated_frames = []
|
||||
for frame in r1_frames:
|
||||
for tid in frame.tids:
|
||||
tmp_frame = DC(frame)
|
||||
tmp_frame.tids = [tid]
|
||||
separated_frames.append(tmp_frame)
|
||||
sorting_strings.append("".join(
|
||||
[slot.functor for slot in tmp_frame.slots]
|
||||
))
|
||||
permutation = [x for _, x in sorted(
|
||||
zip(sorting_strings, range(len(sorting_strings))))]
|
||||
sorted_sep_frames = [separated_frames[i] for i in permutation]
|
||||
return sorted_sep_frames
|
||||
|
||||
|
||||
def reduce_1(frames, vallex=None):
|
||||
# Combine frames with the same set of functors.
|
||||
# The order of functors is not important.
|
||||
frame_sets = [] # [set of functors, list of frames]
|
||||
for frame in frames:
|
||||
functors = [slot.functor for slot in frame.slots]
|
||||
|
||||
for fs in frame_sets:
|
||||
if set(functors) == set(fs[0]):
|
||||
fs[1].append(frame)
|
||||
break
|
||||
else:
|
||||
# Python for else -> fires if loop has ended.
|
||||
frame_sets.append([functors, [frame]])
|
||||
|
||||
ret_frames = []
|
||||
for fs in frame_sets:
|
||||
tids = []
|
||||
slots = {}
|
||||
# All possible slots in this frame.
|
||||
for functor in fs[0]:
|
||||
slots[functor] = Slot(functor=functor)
|
||||
# Reduce slots from all frames. (Merge ACT from all frames, ...)
|
||||
for frame in fs[1]:
|
||||
tids += frame.tids
|
||||
for sl in frame.slots:
|
||||
slots[sl.functor].tids += sl.tids
|
||||
slots_list = []
|
||||
for k, e in slots.items():
|
||||
slots_list.append(e)
|
||||
rf = Frame(tids=tids, slots=slots_list)
|
||||
rf.sort_slots()
|
||||
ret_frames.append(rf)
|
||||
return sorted_by_len_tids(ret_frames)
|
||||
|
||||
|
||||
def reduce_3(raw_frames, vallex):
|
||||
# sskj simple lesk ids
|
||||
ssj_ids = [frame.tids[0] for frame in raw_frames]
|
||||
db_results = list(vallex.db.sskj_simple_lesk.find(
|
||||
{"ssj_id": {"$in": ssj_ids}}))
|
||||
id_map = {}
|
||||
for entry in db_results:
|
||||
id_map.update({entry["ssj_id"]: {
|
||||
"sense_id": entry.get("sense_id"),
|
||||
"sense_desc": entry.get("sense_desc")
|
||||
}})
|
||||
return frames_from_sense_ids(raw_frames, id_map)
|
||||
|
||||
|
||||
def reduce_4(raw_frames, vallex):
|
||||
# kmeans ids
|
||||
ssj_ids = [frame.tids[0] for frame in raw_frames]
|
||||
db_results = list(vallex.db.kmeans.find(
|
||||
{"ssj_id": {"$in": ssj_ids}}))
|
||||
id_map = {}
|
||||
for entry in db_results:
|
||||
id_map.update({entry["ssj_id"]: {
|
||||
"sense_id": entry["sense_id"]
|
||||
}})
|
||||
return frames_from_sense_ids(raw_frames, id_map)
|
||||
|
||||
|
||||
def reduce_5(raw_frames, vallex):
|
||||
USER_SENSE_COLL = "v2_sense_map"
|
||||
headword = raw_frames[0].hw
|
||||
ssj_ids_full = [frame.tids[0] for frame in raw_frames]
|
||||
# v2_sense_map stores only sentence half of ssj_id
|
||||
ssj_ids = [".".join(ssj_id.split(".")[:-1]) for ssj_id in ssj_ids_full]
|
||||
db_results = list(vallex.db[USER_SENSE_COLL].find({
|
||||
"ssj_id": {"$in": ssj_ids},
|
||||
"hw": headword,
|
||||
}))
|
||||
id_map = {}
|
||||
for entry in db_results:
|
||||
id_map[entry["ssj_id"]] = entry["sense_id"]
|
||||
|
||||
ret_frames = frames_from_sense_ids(raw_frames, id_map)
|
||||
|
||||
# sort: frames with senses to top
|
||||
senses_undefined = []
|
||||
senses_defined = []
|
||||
for frame in ret_frames:
|
||||
if frame.sense_info["sense_id"] == SENSE_UNDEFINED:
|
||||
senses_undefined.append(frame)
|
||||
else:
|
||||
senses_defined.append(frame)
|
||||
ret_frames = senses_defined + senses_undefined
|
||||
|
||||
return ret_frames
|
||||
|
||||
|
||||
def frames_from_sense_ids(raw_frames, id_map):
|
||||
# id map = dict {
|
||||
# ssj_id: sense_id
|
||||
# }
|
||||
# id_dict = dict {
|
||||
# sense_id: [frame, ...]
|
||||
# }
|
||||
id_dict = {}
|
||||
for frame in raw_frames:
|
||||
# long version ssj_id (S123.t12)
|
||||
frame_ssj_id = frame.tids[0]
|
||||
frame_sense_id = id_map.get(frame_ssj_id)
|
||||
if frame_sense_id is None:
|
||||
# try short version ssj_id (S123)
|
||||
frame_ssj_id = ".".join(frame_ssj_id.split(".")[:-1])
|
||||
frame_sense_id = id_map.get(frame_ssj_id)
|
||||
|
||||
# set default if sense_id not found
|
||||
if frame_sense_id is None:
|
||||
frame_sense_id = SENSE_UNDEFINED
|
||||
"""
|
||||
sense_id = id_map.get(frame.tids[0])
|
||||
if sense_id is not None:
|
||||
sense_id = sense_id.get("sense_id")
|
||||
else:
|
||||
sense_id = "nedefinirano"
|
||||
"""
|
||||
if frame_sense_id not in id_dict:
|
||||
id_dict[frame_sense_id] = []
|
||||
id_dict[frame_sense_id].append(DC(frame))
|
||||
|
||||
ret_frames = []
|
||||
for sense_id, frames in id_dict.items():
|
||||
tids = []
|
||||
reduced_slots = []
|
||||
for frame in frames:
|
||||
tids.extend(frame.tids)
|
||||
for slot in frame.slots:
|
||||
# if functor not in reduced slots,
|
||||
# add new slot; else increase count
|
||||
for rslot in reduced_slots:
|
||||
if slot.functor == rslot.functor:
|
||||
rslot.count += 1
|
||||
rslot.tids.extend(slot.tids)
|
||||
break
|
||||
else:
|
||||
# in case for loop didn't match a slot
|
||||
reduced_slots.append(Slot(
|
||||
functor=slot.functor,
|
||||
tids=slot.tids,
|
||||
count=1
|
||||
))
|
||||
reduced_frame = Frame(tids, slots=reduced_slots)
|
||||
id_map_entry = (
|
||||
id_map.get(tids[0]) or
|
||||
id_map.get(".".join(tids[0].split(".")[:-1]))
|
||||
)
|
||||
if id_map_entry is None:
|
||||
reduced_frame.sense_info = {
|
||||
"sense_id": SENSE_UNDEFINED,
|
||||
}
|
||||
else:
|
||||
reduced_frame.sense_info = {
|
||||
"sense_id": id_map_entry
|
||||
}
|
||||
reduced_frame.sort_slots()
|
||||
ret_frames.append(reduced_frame)
|
||||
return ret_frames
|
||||
|
||||
|
||||
reduce_functions = {
|
||||
"reduce_0": {
|
||||
"f": reduce_0,
|
||||
"desc":
|
||||
"Vsaka pojavitev glagola dobi svoj stavčni vzorec.",
|
||||
"simple_name": "posamezni stavki"
|
||||
},
|
||||
"reduce_1": {
|
||||
"f": reduce_1,
|
||||
"desc":
|
||||
"Združevanje stavčnih vzorcev z enako skupino udeleženskih vlog.",
|
||||
"simple_name": "združeni stavki"
|
||||
},
|
||||
"reduce_3": {
|
||||
"f": reduce_3,
|
||||
"desc":
|
||||
"Združevanje stavčnih vzorcev na osnovi pomenov povedi v SSKJ. "
|
||||
"Pomeni so dodeljeni s pomočjo algoritma Simple Lesk.",
|
||||
"simple_name": "SSKJ_pomeni"
|
||||
},
|
||||
"reduce_4": {
|
||||
"f": reduce_4,
|
||||
"desc":
|
||||
"Združevanje stavčnih vzorcev na osnovi pomenov povedi "
|
||||
"s pomočjo algoritma K-Means. Število predvidenih pomenov "
|
||||
"podano na osnovi SSKJ.",
|
||||
"simple_name": "KMeans_pomeni"
|
||||
},
|
||||
"reduce_5": {
|
||||
"f": reduce_5,
|
||||
"desc":
|
||||
"Uporabniško dodeljeni pomeni povedi.",
|
||||
"simple_name": "po meri"
|
||||
}
|
||||
}
|
@ -0,0 +1,284 @@
|
||||
from bs4 import BeautifulSoup as BS
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from time import time
|
||||
import pickle
|
||||
import json
|
||||
from copy import deepcopy as DC
|
||||
|
||||
# Match sese ordinals (1., 2., ...)
|
||||
rord = re.compile(r"^ *[0-9]+\. *$")
|
||||
|
||||
# Get rid of accented characters.
|
||||
intab = "ÁÉÍÓÚàáäçèéêìíîñòóôöùúüčŔŕ"
|
||||
outtb = "AEIOUaaaceeeiiinoooouuučRr"
|
||||
transtab = str.maketrans(intab, outtb)
|
||||
|
||||
|
||||
class Seqparser:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
# main functions
|
||||
def html_to_raw_pickle(self, sskj_html_filepath, raw_pickle_filepath):
|
||||
entries = dict(self.parse_file(sskj_html_filepath, self.parse_line))
|
||||
print("entries len: " + str(len(entries)))
|
||||
with open(raw_pickle_filepath, "wb") as f:
|
||||
tmpstr = json.dumps(dict(entries))
|
||||
pickle.dump(tmpstr, f)
|
||||
# debugging
|
||||
|
||||
def raw_pickle_to_parsed_pickle(
|
||||
self, raw_pickle_filepath, parsed_pickle_filepath,
|
||||
se_list_filepath
|
||||
):
|
||||
data = self.load_raw_pickle(raw_pickle_filepath)
|
||||
print("raw_pickle data len: " + str(len(data)))
|
||||
se_list = self.gen_se_list(data)
|
||||
print("se_list len: " + str(len(se_list)))
|
||||
with open(se_list_filepath, "wb") as f:
|
||||
pickle.dump(se_list, f)
|
||||
data1 = self.remove_se(data)
|
||||
data2 = self.reorganize(data1, se_list)
|
||||
print("data2 len: " + str(len(data2.keys())))
|
||||
with open(parsed_pickle_filepath, "wb") as f:
|
||||
pickle.dump(data2, f)
|
||||
|
||||
# helper html reading functions
|
||||
def parse_file(self, path, f_parse_line):
|
||||
tstart = time()
|
||||
entries = defaultdict(list)
|
||||
with open(path, "r") as f:
|
||||
for line in f:
|
||||
data = f_parse_line(line)
|
||||
if data is not None:
|
||||
entries[data["izt_clean"]].append(data)
|
||||
print("parse_file({}) in {:.2f}s".format(path, time() - tstart))
|
||||
return entries
|
||||
|
||||
def parse_line(self, line):
|
||||
def helper_bv_set(g_or_p):
|
||||
if g_or_p not in ["G", "P"]:
|
||||
print("Err g_or_p.")
|
||||
exit(1)
|
||||
if data.get("bv") is not None:
|
||||
if data["bv"] != g_or_p:
|
||||
print(str(line))
|
||||
# exit(1)
|
||||
data["bv"] = g_or_p
|
||||
data = {
|
||||
"izt": "",
|
||||
"izt_clean": "",
|
||||
"senses": defaultdict(list)
|
||||
}
|
||||
soup = BS(line, "html.parser")
|
||||
|
||||
current_sense_id = "0"
|
||||
for span in soup.find_all("span"):
|
||||
|
||||
# sense id
|
||||
if span.string is not None:
|
||||
rmatch = rord.match(span.string)
|
||||
if rmatch is not None:
|
||||
current_sense_id = rmatch.group().strip()
|
||||
|
||||
title = span.attrs.get("title")
|
||||
if title is not None:
|
||||
title = title.lower()
|
||||
|
||||
# only verbs and adjectives
|
||||
if "glagol" in title:
|
||||
helper_bv_set("G")
|
||||
data["bv_full"] = title
|
||||
elif "pridevn" in title:
|
||||
helper_bv_set("P")
|
||||
data["bv_full"] = title
|
||||
|
||||
# žšč
|
||||
if title == "iztočnica":
|
||||
data["izt"] = span.string
|
||||
data["izt_clean"] = span.string.translate(transtab).lower()
|
||||
|
||||
# sense description
|
||||
if title == "razlaga" and span.string is not None:
|
||||
data["senses"][current_sense_id].append(
|
||||
("razl", span.string))
|
||||
if "pridevnik od" in span.string:
|
||||
helper_bv_set("P")
|
||||
|
||||
if title == "sopomenka":
|
||||
subspan = span.find_all("a")[0]
|
||||
if subspan.string is not None:
|
||||
data["senses"][current_sense_id].append(
|
||||
("sopo", subspan.string))
|
||||
|
||||
# save verbs and adjectives
|
||||
if (
|
||||
("bv" not in data) or
|
||||
(data["bv"] != "P" and data["bv"] != "G")
|
||||
):
|
||||
return None
|
||||
|
||||
# sanity check
|
||||
if data["bv"] == "P" and " se" in data["izt_clean"]:
|
||||
print(data)
|
||||
exit(1)
|
||||
|
||||
# append _ to adjective keywords
|
||||
if data["bv"] == "P":
|
||||
data["izt_clean"] = data["izt_clean"] + "_"
|
||||
|
||||
# cleanup
|
||||
if "bv" not in data:
|
||||
print("Should not be here (no bv).")
|
||||
exit(1)
|
||||
del(data["bv"])
|
||||
if "bv_full" in data:
|
||||
del(data["bv_full"])
|
||||
|
||||
return data
|
||||
|
||||
# helper functions
|
||||
def load_raw_pickle(self, raw_pickle_filepath):
|
||||
with open(raw_pickle_filepath, "rb") as f:
|
||||
tmpstr = pickle.load(f)
|
||||
return json.loads(tmpstr)
|
||||
|
||||
def helper_loop(self, data, fnc):
|
||||
for k, lst in data.items():
|
||||
for el in lst:
|
||||
fnc(el)
|
||||
|
||||
def gen_se_list(self, data):
|
||||
|
||||
def fnc1(el):
|
||||
ic = el["izt_clean"]
|
||||
if " se" in ic:
|
||||
se_list.append(ic)
|
||||
|
||||
def fnc2(el):
|
||||
ic = el["izt_clean"]
|
||||
if ic in se_pruned:
|
||||
se_pruned.remove(ic)
|
||||
|
||||
# hw entries that only exist with " se"
|
||||
se_list = []
|
||||
self.helper_loop(data, fnc1)
|
||||
se_pruned = set([hw.split(" se")[0] for hw in se_list])
|
||||
self.helper_loop(data, fnc2)
|
||||
return sorted(list(se_pruned))
|
||||
|
||||
def remove_se(self, data):
|
||||
|
||||
def fnc1(el):
|
||||
nel = DC(el)
|
||||
ic = nel["izt_clean"]
|
||||
if " se" in ic:
|
||||
nic = ic.split(" se")[0]
|
||||
nel["izt_clean"] = nic
|
||||
data_new[nel["izt_clean"]].append(nel)
|
||||
|
||||
data_new = defaultdict(list)
|
||||
self.helper_loop(data, fnc1)
|
||||
return dict(data_new)
|
||||
|
||||
def reorganize(self, data, se_list):
|
||||
# some hw entries have several headwords,
|
||||
# some senses have subsenses
|
||||
# index everything, make 1 object per hw
|
||||
|
||||
def helper_prune(sense_str):
|
||||
# remove space padding
|
||||
sense_str = sense_str.strip()
|
||||
|
||||
if len(sense_str) == 1:
|
||||
return sense_str
|
||||
|
||||
# remove banned characters from string ending
|
||||
banned = ": ; . , - ! ?".split(" ")
|
||||
if sense_str[-1] in banned:
|
||||
return sense_str[:-1]
|
||||
|
||||
return sense_str
|
||||
|
||||
data_new = {}
|
||||
for k, lst in data.items():
|
||||
new_el = {
|
||||
"hw": k,
|
||||
"has_se": k in se_list,
|
||||
"senses": []
|
||||
}
|
||||
|
||||
# if there is a single hw entry, hw_id is 0
|
||||
if len(lst) == 1:
|
||||
homonym_id = -1
|
||||
else:
|
||||
homonym_id = 0
|
||||
|
||||
# loop homonyms
|
||||
for el in lst:
|
||||
homonym_id += 1
|
||||
# loop top lvl sense ids
|
||||
for sense_id, sens_lst in el["senses"].items():
|
||||
# loop subsenses
|
||||
for i, sens in enumerate(sens_lst):
|
||||
nsid = sense_id.split(".")[0]
|
||||
if len(sens_lst) == 1:
|
||||
nsid += "-0"
|
||||
else:
|
||||
nsid += ("-" + str(i + 1))
|
||||
new_sense = {
|
||||
"homonym_id": homonym_id,
|
||||
# sense_id: sense_id-subsense_id
|
||||
"sense_id": nsid,
|
||||
"sense_type": sens[0],
|
||||
"sense_desc": helper_prune(sens[1]),
|
||||
}
|
||||
new_el["senses"].append(new_sense)
|
||||
hw = new_el["hw"]
|
||||
if hw in data_new:
|
||||
print("Shouldn't be here.")
|
||||
print(new_el)
|
||||
exit(1)
|
||||
data_new[hw] = DC(new_el)
|
||||
# return data_new
|
||||
|
||||
# check
|
||||
for hw, el in data_new.items():
|
||||
for sens in el["senses"]:
|
||||
if sens["sense_desc"] is None:
|
||||
print(sens)
|
||||
|
||||
return data_new
|
||||
|
||||
|
||||
def plst(lst):
|
||||
for el in lst:
|
||||
print(el)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
datapath = "../../../data"
|
||||
html_filepath = datapath + "/sskj/sskj2_v1.html"
|
||||
raw_pickle_filepath = datapath + "/tmp_pickles/raw_sskj.pickle"
|
||||
parsed_pickle_filepath = datapath + "/no_del_pickles/sskj_senses.pickle"
|
||||
se_list_filepath = datapath + "/no_del_pickles/se_list.pickle"
|
||||
|
||||
p = Seqparser()
|
||||
|
||||
if True:
|
||||
print("html_to_raw_pickle({}, {})".format(
|
||||
html_filepath, raw_pickle_filepath))
|
||||
print("Big file, this might take a while (2 min).")
|
||||
tstart = time()
|
||||
p.html_to_raw_pickle(html_filepath, raw_pickle_filepath)
|
||||
print("Finished in {:.2f}.".format(time() - tstart))
|
||||
|
||||
if True:
|
||||
print("raw_pickle_to_parsed_pickle({}, {}, {})".format(
|
||||
raw_pickle_filepath, parsed_pickle_filepath, se_list_filepath))
|
||||
tstart = time()
|
||||
p.raw_pickle_to_parsed_pickle(
|
||||
raw_pickle_filepath, parsed_pickle_filepath, se_list_filepath)
|
||||
print("Finished in {:.2f}.".format(time() - tstart))
|
||||
print("Done.")
|
@ -0,0 +1,218 @@
|
||||
import xml.etree.ElementTree as ET
|
||||
from copy import deepcopy as DC
|
||||
from time import time
|
||||
import re
|
||||
import logging
|
||||
import sys
|
||||
import pickle
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
ET.register_namespace("xml", "http://www.w3.org/XML/1998/namespace")
|
||||
XML_ID = "{http://www.w3.org/XML/1998/namespace}id"
|
||||
|
||||
|
||||
# |$ for a default empty match
|
||||
re_int = re.compile(r"t\d+|$")
|
||||
|
||||
|
||||
# For sorting a "s" section in ssj; returns key as integer.
|
||||
# example: "S123.t34" --> 34
|
||||
def re_lmbd(el):
|
||||
s = re_int.findall(el)[0]
|
||||
if len(s) == 0:
|
||||
return 0
|
||||
else:
|
||||
return int(s[1:])
|
||||
|
||||
|
||||
class SsjEntry:
|
||||
def __init__(self, ssj_id, s, deep_links):
|
||||
# See ssj xml structure.
|
||||
self.id = ssj_id
|
||||
self.s = DC(s)
|
||||
self.deep_links = DC(deep_links)
|
||||
|
||||
|
||||
class SsjDict:
|
||||
def __init__(self):
|
||||
self.entries = {}
|
||||
|
||||
"""
|
||||
def read_xml(self, filepath):
|
||||
# No data loss.
|
||||
log.info("SsjDict.read_xml({})".format(filepath))
|
||||
t_start = time()
|
||||
tree = ET.parse(filepath)
|
||||
root = tree.getroot()
|
||||
stats = {
|
||||
"skipped": [],
|
||||
"duplicates": []
|
||||
}
|
||||
|
||||
for s in root.iter("s"):
|
||||
s_id = s.attrib[XML_ID]
|
||||
tokens = {}
|
||||
for token in s:
|
||||
if token.tag == "linkGrp":
|
||||
continue
|
||||
|
||||
if token.tag == "w":
|
||||
tokens[token.attrib[XML_ID]] = {
|
||||
"msd": token.attrib["msd"],
|
||||
"lemma": token.attrib["lemma"],
|
||||
"word": token.text
|
||||
}
|
||||
elif token.tag == "c":
|
||||
tokens[token.attrib[XML_ID]] = {
|
||||
"word": token.text
|
||||
}
|
||||
else:
|
||||
# <S />
|
||||
pass
|
||||
|
||||
linkGrps = s.findall("linkGrp")
|
||||
if len(linkGrps) < 2:
|
||||
# Take only entries with both deep and shallow
|
||||
# syntactic annotation
|
||||
stats["skipped"].append(s_id)
|
||||
continue
|
||||
|
||||
linkG = {}
|
||||
for el in linkGrps:
|
||||
if el.attrib["type"] == "dep":
|
||||
linkG["dep"] = el
|
||||
elif el.attrib["type"] == "SRL":
|
||||
linkG["SRL"] = el
|
||||
else:
|
||||
raise KeyError("Unknown linkGrp.")
|
||||
|
||||
if s_id in self.entries:
|
||||
stats["duplicates"].append(s_id)
|
||||
self.entries[s_id] = SsjEntry(
|
||||
s_id,
|
||||
s.attrib["n"],
|
||||
tokens,
|
||||
create_edge_dict(linkG["dep"]),
|
||||
create_edge_dict(linkG["SRL"])
|
||||
)
|
||||
|
||||
t_end = time()
|
||||
log.info("Time: {}s.".format(t_end - t_start))
|
||||
log.info(
|
||||
"{} duplicates, skipped {} elements (missing linkGrp).".format(
|
||||
len(stats["duplicates"]), len(stats["skipped"]))
|
||||
)
|
||||
"""
|
||||
|
||||
def read_xml_v2(self, filepath):
|
||||
NS_DICT = {
|
||||
"tei": "http://www.tei-c.org/ns/1.0",
|
||||
"xml": "http://www.w3.org/XML/1998/namespace",
|
||||
}
|
||||
|
||||
def ns_prefix(ns):
|
||||
return "{" + NS_DICT[ns] + "}"
|
||||
|
||||
def helper_get_sentence(tree_s):
|
||||
# all w and pc elements
|
||||
ret = []
|
||||
for el in tree_s.iter():
|
||||
if (
|
||||
el.tag == ns_prefix("tei") + "w" or
|
||||
el.tag == ns_prefix("tei") + "pc"
|
||||
):
|
||||
ret.append(el)
|
||||
return ret
|
||||
|
||||
def helper_get_functor_links(tree_s):
|
||||
# links for SRL linkGrp
|
||||
lg = None
|
||||
for linkGrp in tree_s.findall("tei:linkGrp", NS_DICT):
|
||||
if linkGrp.attrib["type"] == "SRL":
|
||||
lg = linkGrp
|
||||
break
|
||||
else:
|
||||
return []
|
||||
ret = []
|
||||
for link in lg:
|
||||
ret.append(link)
|
||||
return ret
|
||||
|
||||
def helper_gen_deep_links(link_list):
|
||||
deep_links = []
|
||||
for link in link_list:
|
||||
deep_links.append({
|
||||
"from": link.attrib["target"].split(" ")[0][1:],
|
||||
"to": link.attrib["target"].split(" ")[1][1:],
|
||||
"functor": link.attrib["ana"].split(":")[1]
|
||||
})
|
||||
return deep_links
|
||||
|
||||
log.info("SsjDict.read_xml({})".format(filepath))
|
||||
t_start = time()
|
||||
stats = {
|
||||
"total_count": 0,
|
||||
"deep_roles_count": 0,
|
||||
"duplicated_sid": 0,
|
||||
}
|
||||
tree = ET.parse(filepath)
|
||||
root = tree.getroot()
|
||||
|
||||
for s in root.findall(".//tei:s", NS_DICT):
|
||||
stats["total_count"] += 1
|
||||
s_id = s.attrib[ns_prefix("xml") + "id"]
|
||||
|
||||
# get_functors (deep semantic roles)
|
||||
functor_links = helper_get_functor_links(s)
|
||||
if len(functor_links) == 0:
|
||||
continue
|
||||
stats["deep_roles_count"] += 1
|
||||
|
||||
# get_sentence
|
||||
tokens = {}
|
||||
for token in helper_get_sentence(s):
|
||||
tid = token.attrib[ns_prefix("xml") + "id"]
|
||||
if token.tag == ns_prefix("tei") + "w":
|
||||
tokens[tid] = {
|
||||
"msd": token.attrib["ana"].split(":")[1],
|
||||
"lemma": token.attrib["lemma"],
|
||||
"word": token.text
|
||||
}
|
||||
elif token.tag == ns_prefix("tei") + "pc":
|
||||
tokens[tid] = {
|
||||
"word": token.text
|
||||
}
|
||||
else:
|
||||
log.warning("Unrecognized sentence element: " + token.tag)
|
||||
exit(1)
|
||||
|
||||
if s_id in self.entries:
|
||||
log.warning("duplicated sentence: " + s_id)
|
||||
stats["duplicated_sid"] += 1
|
||||
continue
|
||||
|
||||
self.entries[s_id] = SsjEntry(
|
||||
s_id,
|
||||
tokens,
|
||||
helper_gen_deep_links(functor_links)
|
||||
)
|
||||
|
||||
t_end = time()
|
||||
log.info("Time: {}s.".format(t_end - t_start))
|
||||
log.info(str(stats))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# testing
|
||||
log.setLevel(logging.DEBUG)
|
||||
|
||||
ch = logging.StreamHandler(sys.stdout)
|
||||
log.addHandler(ch)
|
||||
|
||||
# Load
|
||||
fpath = "../../data/ssj500k-sl.TEI/ssj500k-sl.body.xml"
|
||||
ssj = SsjDict()
|
||||
ssj.read_xml_v2(fpath)
|
||||
with open("ssj_test.pickle", "wb") as file:
|
||||
pickle.dump(ssj, file)
|
@ -0,0 +1,47 @@
|
||||
# Deprecated!
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from time import time
|
||||
from valency import k_utils
|
||||
|
||||
SSKJ_BASE = "http://bos.zrc-sazu.si/cgi/a03.exe?name=sskj_testa&expression="
|
||||
|
||||
|
||||
class SskjScraper:
|
||||
def __init__(self):
|
||||
self.base_url = SSKJ_BASE
|
||||
|
||||
def scrape(self, word):
|
||||
# returns unique set of words
|
||||
soup = BeautifulSoup(
|
||||
requests.get(self.base_url + word).content,
|
||||
"html.parser"
|
||||
)
|
||||
# Check for failure.
|
||||
h2 = soup.find_all("h2")
|
||||
if len(h2) >= 2:
|
||||
# <h2>Zadetkov ni bilo: ...</h2>
|
||||
return []
|
||||
li_elements = soup.find_all('li', class_="nounderline")
|
||||
if len(li_elements) == 0:
|
||||
return []
|
||||
li = li_elements[0]
|
||||
# It was horrible...
|
||||
# <li> ... <li> ... <li> ...</li></li></li>
|
||||
# Parse sequence until you find a sedond <li>
|
||||
txts = []
|
||||
for el in li.find_all():
|
||||
if el.name == "li":
|
||||
break
|
||||
txts.append(el.get_text())
|
||||
print("sskj scraped {}.".format(word))
|
||||
return k_utils.tokenize(txts)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sskjScr = SskjScraper()
|
||||
|
||||
word = "tek"
|
||||
tp = sskjScr.scrape("čaj")
|
||||
print(tp)
|
@ -0,0 +1,40 @@
|
||||
from valency.val_struct import *
|
||||
from valency.ssj_struct import *
|
||||
from valency import k_utils
|
||||
from valency.lesk import Lesk
|
||||
|
||||
vallex_path = "../../data/vallex.xml"
|
||||
vallex = k_utils.pickle_load(vallex_path)
|
||||
if vallex is None:
|
||||
ssj_path = "../../data/anno_final.conll.xml"
|
||||
# ssj_path = "../../data/ssj500kv1_1-SRL_500_stavkov_2017-04-11.xml"
|
||||
ssj = k_utils.pickle_load(ssj_path)
|
||||
if ssj is None:
|
||||
ssj = SsjDict()
|
||||
ssj.read_xml(ssj_path)
|
||||
k_utils.pickle_dump(ssj, ssj_path)
|
||||
|
||||
vallex = Vallex()
|
||||
vallex.read_ssj(ssj)
|
||||
k_utils.pickle_dump(vallex, vallex_path)
|
||||
|
||||
vallex.process_after_read(False, False)
|
||||
|
||||
random_frame = None
|
||||
lesk = Lesk()
|
||||
successes = 0
|
||||
for k, e in vallex.entries.items():
|
||||
for rf in e.raw_frames:
|
||||
random_frame = rf
|
||||
break
|
||||
print(rf.to_string())
|
||||
print(vallex.get_token(random_frame.tids[0]))
|
||||
print(vallex.get_sentence(random_frame.tids[0]))
|
||||
tid = random_frame.tids[0]
|
||||
token = vallex.get_token(tid)
|
||||
context = vallex.get_context(tid)
|
||||
sense = lesk.lesk(token, context)
|
||||
if sense is not None:
|
||||
successes += 1
|
||||
if successes >= 10:
|
||||
break
|
@ -0,0 +1,706 @@
|
||||
from time import time
|
||||
from copy import deepcopy as DC
|
||||
from valency.frame import Frame
|
||||
from valency.reduce_functions import *
|
||||
from valency.lesk import *
|
||||
from valency import mongo_tools
|
||||
import random
|
||||
import logging
|
||||
from valency.evaluation import Evaluation
|
||||
from valency.dictionary_interface import SloWnet, Sskj2
|
||||
from valency.leskFour import LeskFour
|
||||
from valency.k_kmeans import KmeansClass
|
||||
from valency.ssj_struct import SsjDict, SsjEntry
|
||||
from valency.seqparser.seqparser import Seqparser
|
||||
import pickle
|
||||
import sys
|
||||
import hashlib
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def split_id(myid):
|
||||
tmp = myid.split(".")
|
||||
sid = ".".join(tmp[:-1])
|
||||
tid = tmp[-1]
|
||||
return (sid, tid)
|
||||
|
||||
|
||||
class ValEntry():
|
||||
def __init__(self, hw, frame):
|
||||
self.hw = hw
|
||||
self.raw_frames = [frame]
|
||||
self.has_senses = False
|
||||
|
||||
|
||||
class Vallex():
|
||||
# Main class
|
||||
def __init__(self):
|
||||
# database
|
||||
self.db, err_msg = mongo_tools.basic_connection("127.0.0.1", 26633)
|
||||
if self.db is None:
|
||||
log.error((
|
||||
"Database not connected:"
|
||||
"{}".format(err_msg)
|
||||
))
|
||||
exit(1)
|
||||
mongo_tools.check_collections(self.db, [
|
||||
"v2_users", "v2_senses", "v2_sense_map", "v2_user_tokens"
|
||||
])
|
||||
mongo_tools.prepare_user_tokens(self.db)
|
||||
|
||||
# these 3 might be obsolete for the web app (used for ML)
|
||||
self.db_senses_map = self.db.senses_map3
|
||||
self.slownet_interface = SloWnet(self)
|
||||
self.sskj_interface = Sskj2(self)
|
||||
|
||||
# self.tokens["s0][t0"] = {word, lemma, msd, ...}
|
||||
self.tokens = {}
|
||||
|
||||
# key = verb / adjective headword
|
||||
self.entries = {}
|
||||
|
||||
# For alphabetical indexing in web app.
|
||||
self.sorted_words = {}
|
||||
# words = { first_letter: [hw1, hw2, ... sorted] }
|
||||
self.functors_index = {}
|
||||
self.has_se = [] # list of verbs with "se" ("bati se")
|
||||
|
||||
# Used for ML (deprecated).
|
||||
self.leskFour = LeskFour(self)
|
||||
self.kmeans = KmeansClass(self)
|
||||
self.evaluation = Evaluation(self)
|
||||
self.test_samples = []
|
||||
|
||||
# run self.process_after_read() after initiating Vallex
|
||||
|
||||
def read_ssj(self, ssj):
|
||||
# ssj: object generated with ssj_strict.py.
|
||||
BANNED_HW = ["biti"]
|
||||
stats = {
|
||||
"P_count": 0,
|
||||
"skipped": 0,
|
||||
}
|
||||
log.info("Vallex.read_ssj({}).".format(
|
||||
ssj
|
||||
))
|
||||
t_start = time()
|
||||
for ssj_id, entry in ssj.entries.items():
|
||||
# Read tokens
|
||||
skip_entry = False
|
||||
tmp_tokens = {}
|
||||
for ssj_tid, token in entry.s.items():
|
||||
sid, tid = split_id(ssj_tid)
|
||||
|
||||
# safety checks
|
||||
if tid != "t" and not tid[1:].isdigit():
|
||||
log.warning("dropping SID={} - corrupted keys".format(k))
|
||||
skip_entry = True
|
||||
break
|
||||
if tid in tmp_tokens:
|
||||
log.error(
|
||||
"Vallex.read_ssj(): Duplicated ssj_tid:" + ssj_tid)
|
||||
exit(1)
|
||||
|
||||
tmp_tokens[tid] = DC(token)
|
||||
if skip_entry:
|
||||
continue # skip corrupted keys
|
||||
if sid in self.tokens:
|
||||
log.error("sid duplicate: " + sid)
|
||||
exit(1)
|
||||
self.tokens[sid] = DC(tmp_tokens)
|
||||
|
||||
# Read frame data (each deep link gets its own raw frame).
|
||||
link_map = {}
|
||||
# hw_id: { hw_lemma: lemma, deep: [{functor: fnct, to: to}]}
|
||||
for deep_link in entry.deep_links:
|
||||
hw_id = deep_link["from"]
|
||||
hw_token = self.get_token(hw_id)
|
||||
hw_lemma = hw_token["lemma"]
|
||||
hw_bv = hw_token["msd"][0]
|
||||
if (hw_bv != "G" and hw_bv != "P"):
|
||||
stats["skipped"] += 1
|
||||
log.info("Skipping {}: not a verb or adjective.".format(
|
||||
hw_lemma))
|
||||
continue
|
||||
if hw_bv == "P":
|
||||
hw_lemma = hw_lemma + "_"
|
||||
stats["P_count"] += 1
|
||||
if hw_id in link_map:
|
||||
link_map[hw_id]["deep"].append(deep_link)
|
||||
else:
|
||||
link_map[hw_id] = {
|
||||
"hw_lemma": hw_lemma,
|
||||
"deep": [deep_link]
|
||||
}
|
||||
for hw_id, data in link_map.items():
|
||||
hw_lemma = data["hw_lemma"]
|
||||
raw_frame = Frame(
|
||||
hw=hw_lemma,
|
||||
tids=[hw_id],
|
||||
deep_links=data["deep"],
|
||||
slots=None,
|
||||
)
|
||||
if hw_lemma not in self.entries:
|
||||
self.entries[hw_lemma] = ValEntry(hw_lemma, raw_frame)
|
||||
else:
|
||||
self.entries[hw_lemma].raw_frames.append(raw_frame)
|
||||
|
||||
# cleanup banned
|
||||
for hw in BANNED_HW:
|
||||
if hw in self.entries:
|
||||
del(self.entries[hw])
|
||||
|
||||
t_end = time()
|
||||
log.info("Finished build_from_ssj() in {:.2}s.".format(
|
||||
t_end - t_start
|
||||
))
|
||||
log.info("Vallex has a total of {} key entries.".format(
|
||||
len(self.entries.keys())
|
||||
))
|
||||
log.info("Number of adjectives: {}".format(stats["P_count"]))
|
||||
log.info("Number of skipped (not a verb or adjective): {}".format(
|
||||
stats["skipped"]))
|
||||
# Frames per hw
|
||||
"""
|
||||
for k, e in self.entries.items():
|
||||
print(k + "," + str(len(e.raw_frames)))
|
||||
"""
|
||||
|
||||
def get_token(self, myid):
|
||||
# id = S123.t1
|
||||
sid, tid = split_id(myid)
|
||||
return self.tokens[sid][tid]
|
||||
|
||||
def get_sentence(self, myid):
|
||||
sid, tid = split_id(myid)
|
||||
tmp = []
|
||||
sentence = ""
|
||||
for k, token in self.tokens[sid].items():
|
||||
if (k != "t") and (token["word"] is not None):
|
||||
tmp.append((k, token))
|
||||
for token in sorted(tmp, key=lambda x: int(x[0][1:])):
|
||||
sentence += (token[1]["word"] + " ")
|
||||
return sentence
|
||||
|
||||
def get_tokenized_sentence(self, myid):
|
||||
sid, tid = split_id(myid)
|
||||
tmp = []
|
||||
sentence = []
|
||||
for k, token in self.tokens[sid].items():
|
||||
if k != "t":
|
||||
tmp.append((k, token))
|
||||
for token in sorted(tmp, key=lambda x: int(x[0][1:])):
|
||||
sentence.append((".".join([sid, token[0]]), token[1]))
|
||||
# return [(ssj_id, {word: _, lemma: _, msd: _}), ...]
|
||||
return sentence
|
||||
|
||||
def process_after_read(
|
||||
self, sskj_senses_pickle_path, se_list_pickle_path,
|
||||
reload_sskj_senses
|
||||
):
|
||||
tstart = time()
|
||||
|
||||
# web app: index by hw
|
||||
self.sorted_words = {}
|
||||
self.gen_sorted_words()
|
||||
|
||||
# web app: index by functor
|
||||
self.functors_index = {}
|
||||
self.gen_functors_index()
|
||||
|
||||
# fill db.v2_senses
|
||||
self.has_se = []
|
||||
self.read_seqparser_pickles(
|
||||
sskj_senses_pickle_path, se_list_pickle_path, reload_sskj_senses)
|
||||
|
||||
log.debug(
|
||||
"vallex.process_after_read(): {:.2f}s".format(time() - tstart))
|
||||
|
||||
def gen_sorted_words(self):
|
||||
res = {}
|
||||
for hw, e in self.entries.items():
|
||||
letter = hw[0].lower()
|
||||
n_sent = len(e.raw_frames)
|
||||
if letter not in res:
|
||||
res[letter] = []
|
||||
res[letter].append((hw, n_sent))
|
||||
# sort and add to vallex object
|
||||
self.sorted_words = {}
|
||||
for letter, lst in res.items():
|
||||
self.sorted_words[letter] = k_utils.slo_bucket_sort(
|
||||
lst, key=lambda x: x[0])
|
||||
|
||||
def gen_functors_index(self):
|
||||
for hw, e in self.entries.items():
|
||||
for frame in e.raw_frames:
|
||||
for slot in frame.slots:
|
||||
if slot.functor not in self.functors_index:
|
||||
self.functors_index[slot.functor] = []
|
||||
self.functors_index[slot.functor].append(frame)
|
||||
|
||||
def read_seqparser_pickles(
|
||||
self, sskj_senses_pickle_path, se_list_pickle_path,
|
||||
reload_sskj_senses
|
||||
):
|
||||
log.info("read_seqparser_pickles()")
|
||||
log.info((
|
||||
"Reading list of has_se verbs from {}."
|
||||
"Sskj senses into db.v2_senses from {}."
|
||||
).format(se_list_pickle_path, sskj_senses_pickle_path))
|
||||
AUTHOR_SSKJ = "SSKJ"
|
||||
ERR_MSG = (
|
||||
"Need to generate .pickle files first."
|
||||
"Use: "
|
||||
"$ python3 /script/valency/seqparser/seqparser.py"
|
||||
"Input is /data/sskj_v2.html."
|
||||
)
|
||||
|
||||
# has_se
|
||||
with open(se_list_pickle_path, "rb") as f:
|
||||
self.has_se = pickle.load(f)
|
||||
if self.has_se is None:
|
||||
log.error(ERR_MSG)
|
||||
exit(1)
|
||||
self.has_se = sorted(self.has_se)
|
||||
log.info("Loaded self.has_se (len: {}) from {}.".format(
|
||||
len(self.has_se), se_list_pickle_path))
|
||||
|
||||
# sskj senses
|
||||
if reload_sskj_senses:
|
||||
log.info("Reloading sskj_senses.")
|
||||
reply = self.db.v2_senses.remove({"author": AUTHOR_SSKJ})
|
||||
log.info(reply)
|
||||
|
||||
query = list(self.db.v2_senses.find({"author": AUTHOR_SSKJ}))
|
||||
if len(query) > 0:
|
||||
log.info("Sskj senses already in database.")
|
||||
return
|
||||
tstart = time()
|
||||
data = None
|
||||
with open(sskj_senses_pickle_path, "rb") as f:
|
||||
data = pickle.load(f)
|
||||
if data is None:
|
||||
log.error(ERR_MSG)
|
||||
exit(1)
|
||||
for k, e in data.items():
|
||||
for sense in e["senses"]:
|
||||
db_entry = {
|
||||
"hw": k,
|
||||
"author": AUTHOR_SSKJ,
|
||||
"desc": sense["sense_desc"],
|
||||
# unique id for each sense
|
||||
"sense_id": "{}-{}-{}-{}-{}".format(
|
||||
AUTHOR_SSKJ,
|
||||
sense["homonym_id"],
|
||||
sense["sense_id"],
|
||||
sense["sense_type"],
|
||||
hashlib.sha256(
|
||||
sense["sense_desc"].encode("utf-8")
|
||||
).hexdigest()[:5]
|
||||
)
|
||||
}
|
||||
self.db.v2_senses.insert(db_entry)
|
||||
# print(db_entry)
|
||||
log.info("db.v2_senses prepared in {:.2f}s".format(time() - tstart))
|
||||
|
||||
# Functions below can be used for interactively with flask_api.
|
||||
def test_dev(self):
|
||||
# self.prepare_sskj_senses()
|
||||
hw = "dajati"
|
||||
senses = self.sskj_interface.sense_glosses(hw)
|
||||
return str(senses)
|
||||
|
||||
def calc_senses(self):
|
||||
# self.calc_all_senses(self.leskFour.lesk_nltk)
|
||||
# self.calc_all_senses(self.leskFour.lesk_sl)
|
||||
# self.calc_all_senses(self.leskFour.lesk_al) # cca 8h!
|
||||
# self.calc_all_senses(self.leskFour.lesk_ram)
|
||||
self.calc_all_senses_kmeans(self.kmeans.bisection_kmeans)
|
||||
self.calc_all_senses_kmeans(self.kmeans.normal_kmeans)
|
||||
return "edit val_struct.py: calc_senses()"
|
||||
|
||||
# deprecated functions (used for machine learning experiments)
|
||||
|
||||
def prepare_sskj_senses(self):
|
||||
# obsolete, using read_seqparser_pickles()
|
||||
log.info("prepare_sskj_senses() (db.v2_senses)")
|
||||
query = list(self.db.v2_senses.find({"author": "SSKJ2"}))
|
||||
if len(query) > 0:
|
||||
log.info("Sskj senses already in database.")
|
||||
return
|
||||
tstart = time()
|
||||
log.info("Iterating over {} hw entries:".format(
|
||||
len(self.entries.keys())))
|
||||
for hw, e in self.entries.items():
|
||||
senses = self.sskj_interface.sense_glosses(hw)
|
||||
if len(senses) == 0:
|
||||
continue
|
||||
for sense in senses:
|
||||
# create sense from each description
|
||||
for i, de in enumerate(sense["def"]):
|
||||
sense_def = sense["def"][i]
|
||||
sense_def = sense_def[0].upper() + sense_def[1:]
|
||||
if sense_def[-1] == ":" or sense_def[-1] == ";":
|
||||
sense_def = sense_def[:-1] + "."
|
||||
data = {
|
||||
"hw": hw,
|
||||
"author": "SSKJ2",
|
||||
"desc": sense_def,
|
||||
"sskj_id": sense["sskj_sense_id"],
|
||||
"sskj_desc_id": i
|
||||
}
|
||||
self.db.v2_senses.insert(data)
|
||||
log.info("sskj_senses prepared in {:.2f}s".format(time() - tstart))
|
||||
|
||||
def gen_sskj_sl(self):
|
||||
# Takes about an hour.
|
||||
tstart = time()
|
||||
log.info("Generating new sskj_simple_lesk with Simple Lesk.")
|
||||
for k, e in self.entries.items():
|
||||
self.gen_sskj_sl_one(e.hw)
|
||||
log.debug("gen_sskj_sl in {:.2f}s".format(time() - tstart))
|
||||
|
||||
def gen_sskj_sl_one(self, hw, update_db=True):
|
||||
entry = None
|
||||
ttstart = time()
|
||||
e = self.entries.get(hw)
|
||||
if e is None:
|
||||
return
|
||||
for frame in e.raw_frames:
|
||||
tid = frame.tids[0]
|
||||
sentence = self.get_sentence(tid)
|
||||
res = self.lesk.simple_lesk_sskj(sentence, hw)
|
||||
if res is None:
|
||||
log.debug("headword {} not in sskj".format(hw))
|
||||
continue
|
||||
key = {"ssj_id": tid}
|
||||
entry = {
|
||||
"headword": hw,
|
||||
"ssj_id": tid, # uniqe identifier
|
||||
"sense_id": res[1],
|
||||
# "sense_desc": k_utils.dict_safe_key(res[2], "ns0:def"),
|
||||
"sense_desc": res[2]["def"]
|
||||
}
|
||||
# log.debug(str(res[2]))
|
||||
# log.debug(entry["sense_id"])
|
||||
# log.debug(entry["sense_desc"])
|
||||
if update_db:
|
||||
self.db.sskj_simple_lesk.update(key, entry, upsert=True)
|
||||
log.debug("[*] sskj_ids for {} in {:.2f}s".format(
|
||||
hw, time() - ttstart))
|
||||
|
||||
def get_context(self, myid, radius=None, min_lemma_size=None):
|
||||
radius = radius or 5
|
||||
min_lemma_size = min_lemma_size or 4
|
||||
# gives you the token and 10 of its neighbors
|
||||
sentence = self.get_sentence(myid)
|
||||
sentlen = len(sentence.split(" "))
|
||||
sid, tid = split_id(myid)
|
||||
idx = int(tid[1:])
|
||||
tokens_after = []
|
||||
i = idx
|
||||
while i < sentlen - 1 and len(tokens_after) < radius:
|
||||
i += 1
|
||||
token = self.get_token(sid + ".t" + str(i))
|
||||
if (
|
||||
token is not None and "lemma" in token and
|
||||
len(token["lemma"]) >= min_lemma_size and
|
||||
token["lemma"] != "biti"
|
||||
):
|
||||
tokens_after.append(token)
|
||||
tokens_before = []
|
||||
i = idx
|
||||
while i > 1 and len(tokens_before) < radius:
|
||||
i -= 1
|
||||
token = self.get_token(sid + ".t" + str(i))
|
||||
if (
|
||||
token is not None and "lemma" in token and
|
||||
len(token["lemma"]) >= min_lemma_size and
|
||||
token["lemma"] != "biti"
|
||||
):
|
||||
tokens_before.append(token)
|
||||
tokens = tokens_before + [self.get_token(myid)] + tokens_after
|
||||
# find position of original token:
|
||||
mid_idx = len(tokens_before)
|
||||
return (mid_idx, tokens)
|
||||
|
||||
def get_sense_ids(self, collname, hw, sense_group=None):
|
||||
query = {"headword": hw}
|
||||
if sense_group is not None:
|
||||
query["sense_group"] = sense_group
|
||||
result = list(self.db[collname].find(query))
|
||||
sense_ids = {}
|
||||
for r in result:
|
||||
sense_ids[r["ssj_id"]] = r["sense_id"]
|
||||
return sense_ids
|
||||
|
||||
def t_get_context(self):
|
||||
ii = 10
|
||||
for k, e in self.entries.items():
|
||||
for frame in e.raw_frames:
|
||||
if random.randint(0, 100) > 20:
|
||||
continue
|
||||
ii -= 1
|
||||
if ii <= 0:
|
||||
return
|
||||
|
||||
mytid = frame.tids[0]
|
||||
print()
|
||||
print(mytid)
|
||||
print(self.get_token(mytid))
|
||||
sent = self.get_context(mytid, radius=3, min_lemma_size=4)
|
||||
print("mid: {}".format(sent[0]))
|
||||
for ii in range(len(sent[1])):
|
||||
print("{} -> {}".format(
|
||||
ii, sent[1][ii]))
|
||||
|
||||
def t_simple_lesk_sskj(self):
|
||||
ii = 10
|
||||
for k, e in self.entries.items():
|
||||
if random.randint(0, 100) > 20:
|
||||
continue
|
||||
for frame in e.raw_frames:
|
||||
if random.randint(0, 100) > 20:
|
||||
continue
|
||||
if ii == 0:
|
||||
return
|
||||
ii -= 1
|
||||
|
||||
print("\nTest frame: {}.".format(frame.tids))
|
||||
hw_token = self.get_token(frame.tids[0])
|
||||
print(hw_token)
|
||||
context_sentence = self.get_sentence(frame.tids[0])
|
||||
print(context_sentence)
|
||||
self.lesk.simple_lesk_sskj(
|
||||
context_sentence=context_sentence,
|
||||
word_lemma=hw_token["lemma"]
|
||||
)
|
||||
|
||||
def process_kmeans(self):
|
||||
# Convert words to lemmas, cluseter using k-means.
|
||||
# Number of clusters from sskj.
|
||||
tstart = time()
|
||||
log.info("Processing senses using kmeans.")
|
||||
for k, e in self.entries.items():
|
||||
# Frame start
|
||||
ttstart = time()
|
||||
lemma = e.hw
|
||||
tokenized_sentences = []
|
||||
for frame in e.raw_frames:
|
||||
tid = frame.tids[0]
|
||||
tokenized_sentences.append(self.get_tokenized_sentence(tid))
|
||||
lemmatized_sentences = []
|
||||
for sent in tokenized_sentences:
|
||||
lemmatized = ""
|
||||
for token in sent:
|
||||
if "lemma" in token[1]:
|
||||
lemmatized += (token[1]["lemma"] + " ")
|
||||
lemmatized_sentences.append(lemmatized)
|
||||
lls = len(lemmatized_sentences)
|
||||
# We got the sentences
|
||||
sskj_entry = self.db.sskj.find_one(
|
||||
{"ns0:entry.ns0:form.ns0:orth": lemma})
|
||||
if sskj_entry is None:
|
||||
log.debug("headword {} has no <sense> in sskj".format(lemma))
|
||||
continue
|
||||
n_clusters = 1
|
||||
if "ns0:sense" in sskj_entry["ns0:entry"]:
|
||||
# Guess number of senses based on sskj senses.
|
||||
n_clusters = len(sskj_entry["ns0:entry"]["ns0:sense"])
|
||||
if lls >= n_clusters and n_clusters > 1:
|
||||
labels = k_kmeans.k_means(
|
||||
sentences=lemmatized_sentences,
|
||||
n_clusters=n_clusters
|
||||
)
|
||||
kmeans_ids = [str(x) + "-" + str(lls) for x in labels]
|
||||
elif n_clusters == 1:
|
||||
kmeans_ids = ["1-1" for x in lemmatized_sentences]
|
||||
elif lls < n_clusters:
|
||||
# Each sentence gets its own sense.
|
||||
kmeans_ids = []
|
||||
for i in range(lls):
|
||||
kmeans_ids.append(str(i + 1) + "lt" + str(n_clusters))
|
||||
else:
|
||||
log.error("Shouldn't be here (val_struct: process_kmeans()")
|
||||
exit(1)
|
||||
|
||||
# Feed sense ides of whole frame to database.
|
||||
for i in range(len(e.raw_frames)):
|
||||
tid = e.raw_frames[i].tids[0]
|
||||
key = {"ssj_id": tid}
|
||||
entry = {
|
||||
"headword": lemma,
|
||||
"ssj_id": tid, # unique idenfitier
|
||||
"sense_id": kmeans_ids[i],
|
||||
}
|
||||
self.db.kmeans.update(key, entry, upsert=True)
|
||||
|
||||
log.debug("[*] kemans_ids for {} in {:.2f}s".format(
|
||||
lemma, time() - ttstart))
|
||||
# Frame end
|
||||
log.debug("process_kmeans in {:.2f}s".format(time() - tstart))
|
||||
|
||||
def get_context1(
|
||||
self, mytid, collname, radius=None, min_token_len=3, get_glosses=None
|
||||
):
|
||||
# returns {
|
||||
# "hw": headword lemma and its glosses
|
||||
# "context": a list of lemmas and their glosses around the hw that
|
||||
# have entries in collname dictionary (if get_glosses=True)
|
||||
# }
|
||||
# tstart = time()
|
||||
if get_glosses is None:
|
||||
get_glosses = False
|
||||
if radius is None:
|
||||
radius = 10000
|
||||
if collname == "slownet":
|
||||
dictionary_interface = self.slownet_interface
|
||||
elif collname == "sskj":
|
||||
dictionary_interface = self.sskj_interface
|
||||
else:
|
||||
log.error("argument error: get_context1(collname=<slownet/sskj>)")
|
||||
return []
|
||||
|
||||
sentence = self.get_tokenized_sentence(mytid)
|
||||
# return [(ssj_id, {word: _, lemma: _, msd: _}), ...]
|
||||
hw_idx = -1
|
||||
for i, e in enumerate(sentence):
|
||||
if e[0] == mytid:
|
||||
hw_idx = i
|
||||
hw_lemma = e[1]["lemma"]
|
||||
break
|
||||
|
||||
hw_glosses = dictionary_interface.sense_glosses(hw_lemma)
|
||||
if len(hw_glosses) == 0:
|
||||
log.info("hw: {} has 0 glosses".format(hw_lemma))
|
||||
return {
|
||||
"hw": None,
|
||||
"err": "headword {} has no glosses in {}".format(
|
||||
hw_lemma, collname)
|
||||
}
|
||||
|
||||
tokens_before = []
|
||||
ii = hw_idx - 1
|
||||
while(ii >= 0 and len(tokens_before) < radius):
|
||||
lemma = sentence[ii][1].get("lemma")
|
||||
if (
|
||||
lemma is not None and
|
||||
len(lemma) >= min_token_len
|
||||
):
|
||||
if get_glosses:
|
||||
glosses = dictionary_interface.sense_glosses(lemma)
|
||||
else:
|
||||
glosses = [{"def": "--none--", "gloss": "--none--"}]
|
||||
if len(glosses) > 0:
|
||||
tokens_before.insert(0, {
|
||||
"lemma": lemma,
|
||||
"glosses": glosses
|
||||
})
|
||||
ii -= 1
|
||||
|
||||
tokens_after = []
|
||||
ii = hw_idx + 1
|
||||
while(ii < len(sentence) and len(tokens_after) < radius):
|
||||
lemma = sentence[ii][1].get("lemma")
|
||||
if (
|
||||
lemma is not None and
|
||||
len(lemma) >= min_token_len
|
||||
):
|
||||
if get_glosses:
|
||||
glosses = dictionary_interface.sense_glosses(lemma)
|
||||
else:
|
||||
glosses = [{"def": "--none--", "gloss": "--none--"}]
|
||||
if len(glosses) > 0:
|
||||
tokens_after.append({
|
||||
"lemma": lemma,
|
||||
"glosses": glosses
|
||||
})
|
||||
ii += 1
|
||||
|
||||
# log.debug("context1({}): {:.2f}".format(mytid, time() - tstart))
|
||||
return {
|
||||
"hw": {"lemma": hw_lemma, "glosses": hw_glosses},
|
||||
"context": tokens_before + tokens_after
|
||||
}
|
||||
|
||||
def test_context1(self, mytid, hw=""):
|
||||
res = ""
|
||||
context = self.get_context1(
|
||||
mytid, collname="slownet", radius=2, get_glosses=True)
|
||||
if context["hw"] is None:
|
||||
return context["err"] + "<br><br>"
|
||||
res = "hw: {}<br>sentence: {}<br>".format(
|
||||
hw, self.get_sentence(mytid))
|
||||
tfigf_input = []
|
||||
glosses = [context["hw"]] + context["context"]
|
||||
for e in glosses:
|
||||
res += "--->lemma: {} ({} senses)<br>".format(
|
||||
e["lemma"], len(e["glosses"]))
|
||||
for g in e["glosses"]:
|
||||
res += "{}<br>".format(str(g))
|
||||
tfigf_input.append(" ".join(k_utils.tokenize_multiple(
|
||||
g["gloss"],
|
||||
min_token_len=3,
|
||||
stem=k_utils.stem_eng
|
||||
)))
|
||||
res += "<br><br>"
|
||||
return res
|
||||
|
||||
def calc_all_senses(self, lesk_algorithm):
|
||||
allcount = 0
|
||||
count = 0
|
||||
for k, e in self.entries.items():
|
||||
allcount += len(e.raw_frames)
|
||||
for k, e in self.entries.items():
|
||||
if k == "biti": # skip this huge bag of words
|
||||
continue
|
||||
for frame in e.raw_frames:
|
||||
count += 1
|
||||
if count % 10 == 0:
|
||||
log.info("calc_all_senses: ({}/{})".format(
|
||||
count, allcount))
|
||||
lesk_algorithm(frame.tids[0])
|
||||
return None
|
||||
|
||||
def calc_all_senses_kmeans(self, kmeans_algorithm):
|
||||
tstart = time()
|
||||
allcount = len(self.entries)
|
||||
count = 0
|
||||
avg_times = []
|
||||
for key in self.entries:
|
||||
count += 1
|
||||
if key == "biti":
|
||||
continue
|
||||
# cluster frames of each entry
|
||||
log.info("calc_all_senses_kmeans: ({}/{}) [{}]".format(
|
||||
count, allcount, key))
|
||||
kmeans_algorithm(key)
|
||||
"""
|
||||
try:
|
||||
kmeans_algorithm(key)
|
||||
except ValueError:
|
||||
continue
|
||||
"""
|
||||
avg_times.append(1.0 * (time() - tstart) / count)
|
||||
log.info("avg_time: {:.2f}s".format(avg_times[-1]))
|
||||
log.info("calc_all_senses_kmeans in {:.2f}s.".format(time() - tstart))
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
log.setLevel(logging.DEBUG)
|
||||
ch = logging.StreamHandler(sys.stdout)
|
||||
log.addHandler(ch)
|
||||
# run ssj_struct to create a ssj_test.pickle file
|
||||
with open("ssj_test.pickle", "rb") as file:
|
||||
ssj = pickle.load(file)
|
||||
|
||||
vallex = Vallex()
|
||||
vallex.read_ssj(ssj)
|
||||
|
||||
vallex.sorted_words = {}
|
||||
vallex.gen_sorted_words()
|
||||
|
||||
vallex.functors_index = {}
|
||||
vallex.gen_functors_index()
|
@ -0,0 +1,12 @@
|
||||
{
|
||||
"presets": [
|
||||
["env", {
|
||||
"modules": false,
|
||||
"targets": {
|
||||
"browsers": ["> 1%", "last 2 versions", "not ie <= 8"]
|
||||
}
|
||||
}],
|
||||
"stage-2"
|
||||
],
|
||||
"plugins": ["transform-vue-jsx", "transform-runtime"]
|
||||
}
|
@ -0,0 +1,9 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
@ -0,0 +1,14 @@
|
||||
.DS_Store
|
||||
node_modules/
|
||||
/dist/
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# Editor directories and files
|
||||
.idea
|
||||
.vscode
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
@ -0,0 +1,10 @@
|
||||
// https://github.com/michael-ciniawsky/postcss-load-config
|
||||
|
||||
module.exports = {
|
||||
"plugins": {
|
||||
"postcss-import": {},
|
||||
"postcss-url": {},
|
||||
// to edit target browsers: use "browserslist" field in package.json
|
||||
"autoprefixer": {}
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
# vue_frontend
|
||||
|
||||
> Frontend for Valency App.
|
||||
|
||||
## Build Setup
|
||||
|
||||
``` bash
|
||||
# install dependencies
|
||||
npm install
|
||||
|
||||
# serve with hot reload at localhost:8080
|
||||
npm run dev
|
||||
|
||||
# build for production with minification
|
||||
npm run build
|
||||
|
||||
# build for production and view the bundle analyzer report
|
||||
npm run build --report
|
||||
```
|
||||
|
||||
For a detailed explanation on how things work, check out the [guide](http://vuejs-templates.github.io/webpack/) and [docs for vue-loader](http://vuejs.github.io/vue-loader).
|
@ -0,0 +1,41 @@
|
||||
'use strict'
|
||||
require('./check-versions')()
|
||||
|
||||
process.env.NODE_ENV = 'production'
|
||||
|
||||
const ora = require('ora')
|
||||
const rm = require('rimraf')
|
||||
const path = require('path')
|
||||
const chalk = require('chalk')
|
||||
const webpack = require('webpack')
|
||||
const config = require('../config')
|
||||
const webpackConfig = require('./webpack.prod.conf')
|
||||
|
||||
const spinner = ora('building for production...')
|
||||
spinner.start()
|
||||
|
||||
rm(path.join(config.build.assetsRoot, config.build.assetsSubDirectory), err => {
|
||||
if (err) throw err
|
||||
webpack(webpackConfig, (err, stats) => {
|
||||
spinner.stop()
|
||||
if (err) throw err
|
||||
process.stdout.write(stats.toString({
|
||||
colors: true,
|
||||
modules: false,
|
||||
children: false, // If you are using ts-loader, setting this to true will make TypeScript errors show up during build.
|
||||
chunks: false,
|
||||
chunkModules: false
|
||||
}) + '\n\n')
|
||||
|
||||
if (stats.hasErrors()) {
|
||||
console.log(chalk.red(' Build failed with errors.\n'))
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
console.log(chalk.cyan(' Build complete.\n'))
|
||||
console.log(chalk.yellow(
|
||||
' Tip: built files are meant to be served over an HTTP server.\n' +
|
||||
' Opening index.html over file:// won\'t work.\n'
|
||||
))
|
||||
})
|
||||
})
|
@ -0,0 +1,54 @@
|
||||
'use strict'
|
||||
const chalk = require('chalk')
|
||||
const semver = require('semver')
|
||||
const packageConfig = require('../package.json')
|
||||
const shell = require('shelljs')
|
||||
|
||||
function exec (cmd) {
|
||||
return require('child_process').execSync(cmd).toString().trim()
|
||||
}
|
||||
|
||||
const versionRequirements = [
|
||||
{
|
||||
name: 'node',
|
||||
currentVersion: semver.clean(process.version),
|
||||
versionRequirement: packageConfig.engines.node
|
||||
}
|
||||
]
|
||||
|
||||
if (shell.which('npm')) {
|
||||
versionRequirements.push({
|
||||
name: 'npm',
|
||||
currentVersion: exec('npm --version'),
|
||||
versionRequirement: packageConfig.engines.npm
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = function () {
|
||||
const warnings = []
|
||||
|
||||
for (let i = 0; i < versionRequirements.length; i++) {
|
||||
const mod = versionRequirements[i]
|
||||
|
||||
if (!semver.satisfies(mod.currentVersion, mod.versionRequirement)) {
|
||||
warnings.push(mod.name + ': ' +
|
||||
chalk.red(mod.currentVersion) + ' should be ' +
|
||||
chalk.green(mod.versionRequirement)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (warnings.length) {
|
||||
console.log('')
|
||||
console.log(chalk.yellow('To use this template, you must update following to modules:'))
|
||||
console.log()
|
||||
|
||||
for (let i = 0; i < warnings.length; i++) {
|
||||
const warning = warnings[i]
|
||||
console.log(' ' + warning)
|
||||
}
|
||||
|
||||
console.log()
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
After Width: | Height: | Size: 6.7 KiB |
@ -0,0 +1,101 @@
|
||||
'use strict'
|
||||
const path = require('path')
|
||||
const config = require('../config')
|
||||
const ExtractTextPlugin = require('extract-text-webpack-plugin')
|
||||
const packageConfig = require('../package.json')
|
||||
|
||||
exports.assetsPath = function (_path) {
|
||||
const assetsSubDirectory = process.env.NODE_ENV === 'production'
|
||||
? config.build.assetsSubDirectory
|
||||
: config.dev.assetsSubDirectory
|
||||
|
||||
return path.posix.join(assetsSubDirectory, _path)
|
||||
}
|
||||
|
||||
exports.cssLoaders = function (options) {
|
||||
options = options || {}
|
||||
|
||||
const cssLoader = {
|
||||
loader: 'css-loader',
|
||||
options: {
|
||||
sourceMap: options.sourceMap
|
||||
}
|
||||
}
|
||||
|
||||
const postcssLoader = {
|
||||
loader: 'postcss-loader',
|
||||
options: {
|
||||
sourceMap: options.sourceMap
|
||||
}
|
||||
}
|
||||
|
||||
// generate loader string to be used with extract text plugin
|
||||
function generateLoaders (loader, loaderOptions) {
|
||||
const loaders = options.usePostCSS ? [cssLoader, postcssLoader] : [cssLoader]
|
||||
|
||||
if (loader) {
|
||||
loaders.push({
|
||||
loader: loader + '-loader',
|
||||
options: Object.assign({}, loaderOptions, {
|
||||
sourceMap: options.sourceMap
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// Extract CSS when that option is specified
|
||||
// (which is the case during production build)
|
||||
if (options.extract) {
|
||||
return ExtractTextPlugin.extract({
|
||||
use: loaders,
|
||||
fallback: 'vue-style-loader'
|
||||
})
|
||||
} else {
|
||||
return ['vue-style-loader'].concat(loaders)
|
||||
}
|
||||
}
|
||||
|
||||
// https://vue-loader.vuejs.org/en/configurations/extract-css.html
|
||||
return {
|
||||
css: generateLoaders(),
|
||||
postcss: generateLoaders(),
|
||||
less: generateLoaders('less'),
|
||||
sass: generateLoaders('sass', { indentedSyntax: true }),
|
||||
scss: generateLoaders('sass'),
|
||||
stylus: generateLoaders('stylus'),
|
||||
styl: generateLoaders('stylus')
|
||||
}
|
||||
}
|
||||
|
||||
// Generate loaders for standalone style files (outside of .vue)
|
||||
exports.styleLoaders = function (options) {
|
||||
const output = []
|
||||
const loaders = exports.cssLoaders(options)
|
||||
|
||||
for (const extension in loaders) {
|
||||
const loader = loaders[extension]
|
||||
output.push({
|
||||
test: new RegExp('\\.' + extension + '$'),
|
||||
use: loader
|
||||
})
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
exports.createNotifierCallback = () => {
|
||||
const notifier = require('node-notifier')
|
||||
|
||||
return (severity, errors) => {
|
||||
if (severity !== 'error') return
|
||||
|
||||
const error = errors[0]
|
||||
const filename = error.file && error.file.split('!').pop()
|
||||
|
||||
notifier.notify({
|
||||
title: packageConfig.name,
|
||||
message: severity + ': ' + error.name,
|
||||
subtitle: filename || '',
|
||||
icon: path.join(__dirname, 'logo.png')
|
||||
})
|
||||
}
|
||||
}
|
@ -0,0 +1,22 @@
|
||||
'use strict'
|
||||
const utils = require('./utils')
|
||||
const config = require('../config')
|
||||
const isProduction = process.env.NODE_ENV === 'production'
|
||||
const sourceMapEnabled = isProduction
|
||||
? config.build.productionSourceMap
|
||||
: config.dev.cssSourceMap
|
||||
|
||||
module.exports = {
|
||||
loaders: utils.cssLoaders({
|
||||
sourceMap: sourceMapEnabled,
|
||||
extract: isProduction
|
||||
}),
|
||||
cssSourceMap: sourceMapEnabled,
|
||||
cacheBusting: config.dev.cacheBusting,
|
||||
transformToRequire: {
|
||||
video: ['src', 'poster'],
|
||||
source: 'src',
|
||||
img: 'src',
|
||||
image: 'xlink:href'
|
||||
}
|
||||
}
|
@ -0,0 +1,82 @@
|
||||
'use strict'
|
||||
const path = require('path')
|
||||
const utils = require('./utils')
|
||||
const config = require('../config')
|
||||
const vueLoaderConfig = require('./vue-loader.conf')
|
||||
|
||||
function resolve (dir) {
|
||||
return path.join(__dirname, '..', dir)
|
||||
}
|
||||
|
||||
|
||||
|
||||
module.exports = {
|
||||
context: path.resolve(__dirname, '../'),
|
||||
entry: {
|
||||
app: './src/main.js'
|
||||
},
|
||||
output: {
|
||||
path: config.build.assetsRoot,
|
||||
filename: '[name].js',
|
||||
publicPath: process.env.NODE_ENV === 'production'
|
||||
? config.build.assetsPublicPath
|
||||
: config.dev.assetsPublicPath
|
||||
},
|
||||
resolve: {
|
||||
extensions: ['.js', '.vue', '.json'],
|
||||
alias: {
|
||||
'vue$': 'vue/dist/vue.esm.js',
|
||||
'@': resolve('src'),
|
||||
}
|
||||
},
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.vue$/,
|
||||
loader: 'vue-loader',
|
||||
options: vueLoaderConfig
|
||||
},
|
||||
{
|
||||
test: /\.js$/,
|
||||
loader: 'babel-loader',
|
||||
include: [resolve('src'), resolve('test'), resolve('node_modules/webpack-dev-server/client')]
|
||||
},
|
||||
{
|
||||
test: /\.(png|jpe?g|gif|svg)(\?.*)?$/,
|
||||
loader: 'url-loader',
|
||||
options: {
|
||||
limit: 10000,
|
||||
name: utils.assetsPath('img/[name].[hash:7].[ext]')
|
||||
}
|
||||
},
|
||||
{
|
||||
test: /\.(mp4|webm|ogg|mp3|wav|flac|aac)(\?.*)?$/,
|
||||
loader: 'url-loader',
|
||||
options: {
|
||||
limit: 10000,
|
||||
name: utils.assetsPath('media/[name].[hash:7].[ext]')
|
||||
}
|
||||
},
|
||||
{
|
||||
test: /\.(woff2?|eot|ttf|otf)(\?.*)?$/,
|
||||
loader: 'url-loader',
|
||||
options: {
|
||||
limit: 10000,
|
||||
name: utils.assetsPath('fonts/[name].[hash:7].[ext]')
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
node: {
|
||||
// prevent webpack from injecting useless setImmediate polyfill because Vue
|
||||
// source contains it (although only uses it if it's native).
|
||||
setImmediate: false,
|
||||
// prevent webpack from injecting mocks to Node native modules
|
||||
// that does not make sense for the client
|
||||
dgram: 'empty',
|
||||
fs: 'empty',
|
||||
net: 'empty',
|
||||
tls: 'empty',
|
||||
child_process: 'empty'
|
||||
}
|
||||
}
|
@ -0,0 +1,95 @@
|
||||
'use strict'
|
||||
const utils = require('./utils')
|
||||
const webpack = require('webpack')
|
||||
const config = require('../config')
|
||||
const merge = require('webpack-merge')
|
||||
const path = require('path')
|
||||
const baseWebpackConfig = require('./webpack.base.conf')
|
||||
const CopyWebpackPlugin = require('copy-webpack-plugin')
|
||||
const HtmlWebpackPlugin = require('html-webpack-plugin')
|
||||
const FriendlyErrorsPlugin = require('friendly-errors-webpack-plugin')
|
||||
const portfinder = require('portfinder')
|
||||
|
||||
const HOST = process.env.HOST
|
||||
const PORT = process.env.PORT && Number(process.env.PORT)
|
||||
|
||||
const devWebpackConfig = merge(baseWebpackConfig, {
|
||||
module: {
|
||||
rules: utils.styleLoaders({ sourceMap: config.dev.cssSourceMap, usePostCSS: true })
|
||||
},
|
||||
// cheap-module-eval-source-map is faster for development
|
||||
devtool: config.dev.devtool,
|
||||
|
||||
// these devServer options should be customized in /config/index.js
|
||||
devServer: {
|
||||
clientLogLevel: 'warning',
|
||||
historyApiFallback: {
|
||||
rewrites: [
|
||||
{ from: /.*/, to: path.posix.join(config.dev.assetsPublicPath, 'index.html') },
|
||||
],
|
||||
},
|
||||
hot: true,
|
||||
contentBase: false, // since we use CopyWebpackPlugin.
|
||||
compress: true,
|
||||
host: HOST || config.dev.host,
|
||||
port: PORT || config.dev.port,
|
||||
open: config.dev.autoOpenBrowser,
|
||||
overlay: config.dev.errorOverlay
|
||||
? { warnings: false, errors: true }
|
||||
: false,
|
||||
publicPath: config.dev.assetsPublicPath,
|
||||
proxy: config.dev.proxyTable,
|
||||
quiet: true, // necessary for FriendlyErrorsPlugin
|
||||
watchOptions: {
|
||||
poll: config.dev.poll,
|
||||
}
|
||||
},
|
||||
plugins: [
|
||||
new webpack.DefinePlugin({
|
||||
'process.env': require('../config/dev.env')
|
||||
}),
|
||||
new webpack.HotModuleReplacementPlugin(),
|
||||
new webpack.NamedModulesPlugin(), // HMR shows correct file names in console on update.
|
||||
new webpack.NoEmitOnErrorsPlugin(),
|
||||
// https://github.com/ampedandwired/html-webpack-plugin
|
||||
new HtmlWebpackPlugin({
|
||||
filename: 'index.html',
|
||||
template: 'index.html',
|
||||
inject: true
|
||||
}),
|
||||
// copy custom static assets
|
||||
new CopyWebpackPlugin([
|
||||
{
|
||||
from: path.resolve(__dirname, '../static'),
|
||||
to: config.dev.assetsSubDirectory,
|
||||
ignore: ['.*']
|
||||
}
|
||||
])
|
||||
]
|
||||
})
|
||||
|
||||
module.exports = new Promise((resolve, reject) => {
|
||||
portfinder.basePort = process.env.PORT || config.dev.port
|
||||
portfinder.getPort((err, port) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else {
|
||||
// publish the new Port, necessary for e2e tests
|
||||
process.env.PORT = port
|
||||
// add port to devServer config
|
||||
devWebpackConfig.devServer.port = port
|
||||
|
||||
// Add FriendlyErrorsPlugin
|
||||
devWebpackConfig.plugins.push(new FriendlyErrorsPlugin({
|
||||
compilationSuccessInfo: {
|
||||
messages: [`Your application is running here: http://${devWebpackConfig.devServer.host}:${port}`],
|
||||
},
|
||||
onErrors: config.dev.notifyOnErrors
|
||||
? utils.createNotifierCallback()
|
||||
: undefined
|
||||
}))
|
||||
|
||||
resolve(devWebpackConfig)
|
||||
}
|
||||
})
|
||||
})
|
@ -0,0 +1,145 @@
|
||||
'use strict'
|
||||
const path = require('path')
|
||||
const utils = require('./utils')
|
||||
const webpack = require('webpack')
|
||||
const config = require('../config')
|
||||
const merge = require('webpack-merge')
|
||||
const baseWebpackConfig = require('./webpack.base.conf')
|
||||
const CopyWebpackPlugin = require('copy-webpack-plugin')
|
||||
const HtmlWebpackPlugin = require('html-webpack-plugin')
|
||||
const ExtractTextPlugin = require('extract-text-webpack-plugin')
|
||||
const OptimizeCSSPlugin = require('optimize-css-assets-webpack-plugin')
|
||||
const UglifyJsPlugin = require('uglifyjs-webpack-plugin')
|
||||
|
||||
const env = require('../config/prod.env')
|
||||
|
||||
const webpackConfig = merge(baseWebpackConfig, {
|
||||
module: {
|
||||
rules: utils.styleLoaders({
|
||||
sourceMap: config.build.productionSourceMap,
|
||||
extract: true,
|
||||
usePostCSS: true
|
||||
})
|
||||
},
|
||||
devtool: config.build.productionSourceMap ? config.build.devtool : false,
|
||||
output: {
|
||||
path: config.build.assetsRoot,
|
||||
filename: utils.assetsPath('js/[name].[chunkhash].js'),
|
||||
chunkFilename: utils.assetsPath('js/[id].[chunkhash].js')
|
||||
},
|
||||
plugins: [
|
||||
// http://vuejs.github.io/vue-loader/en/workflow/production.html
|
||||
new webpack.DefinePlugin({
|
||||
'process.env': env
|
||||
}),
|
||||
new UglifyJsPlugin({
|
||||
uglifyOptions: {
|
||||
compress: {
|
||||
warnings: false
|
||||
}
|
||||
},
|
||||
sourceMap: config.build.productionSourceMap,
|
||||
parallel: true
|
||||
}),
|
||||
// extract css into its own file
|
||||
new ExtractTextPlugin({
|
||||
filename: utils.assetsPath('css/[name].[contenthash].css'),
|
||||
// Setting the following option to `false` will not extract CSS from codesplit chunks.
|
||||
// Their CSS will instead be inserted dynamically with style-loader when the codesplit chunk has been loaded by webpack.
|
||||
// It's currently set to `true` because we are seeing that sourcemaps are included in the codesplit bundle as well when it's `false`,
|
||||
// increasing file size: https://github.com/vuejs-templates/webpack/issues/1110
|
||||
allChunks: true,
|
||||
}),
|
||||
// Compress extracted CSS. We are using this plugin so that possible
|
||||
// duplicated CSS from different components can be deduped.
|
||||
new OptimizeCSSPlugin({
|
||||
cssProcessorOptions: config.build.productionSourceMap
|
||||
? { safe: true, map: { inline: false } }
|
||||
: { safe: true }
|
||||
}),
|
||||
// generate dist index.html with correct asset hash for caching.
|
||||
// you can customize output by editing /index.html
|
||||
// see https://github.com/ampedandwired/html-webpack-plugin
|
||||
new HtmlWebpackPlugin({
|
||||
filename: config.build.index,
|
||||
template: 'index.html',
|
||||
inject: true,
|
||||
minify: {
|
||||
removeComments: true,
|
||||
collapseWhitespace: true,
|
||||
removeAttributeQuotes: true
|
||||
// more options:
|
||||
// https://github.com/kangax/html-minifier#options-quick-reference
|
||||
},
|
||||
// necessary to consistently work with multiple chunks via CommonsChunkPlugin
|
||||
chunksSortMode: 'dependency'
|
||||
}),
|
||||
// keep module.id stable when vendor modules does not change
|
||||
new webpack.HashedModuleIdsPlugin(),
|
||||
// enable scope hoisting
|
||||
new webpack.optimize.ModuleConcatenationPlugin(),
|
||||
// split vendor js into its own file
|
||||
new webpack.optimize.CommonsChunkPlugin({
|
||||
name: 'vendor',
|
||||
minChunks (module) {
|
||||
// any required modules inside node_modules are extracted to vendor
|
||||
return (
|
||||
module.resource &&
|
||||
/\.js$/.test(module.resource) &&
|
||||
module.resource.indexOf(
|
||||
path.join(__dirname, '../node_modules')
|
||||
) === 0
|
||||
)
|
||||
}
|
||||
}),
|
||||
// extract webpack runtime and module manifest to its own file in order to
|
||||
// prevent vendor hash from being updated whenever app bundle is updated
|
||||
new webpack.optimize.CommonsChunkPlugin({
|
||||
name: 'manifest',
|
||||
minChunks: Infinity
|
||||
}),
|
||||
// This instance extracts shared chunks from code splitted chunks and bundles them
|
||||
// in a separate chunk, similar to the vendor chunk
|
||||
// see: https://webpack.js.org/plugins/commons-chunk-plugin/#extra-async-commons-chunk
|
||||
new webpack.optimize.CommonsChunkPlugin({
|
||||
name: 'app',
|
||||
async: 'vendor-async',
|
||||
children: true,
|
||||
minChunks: 3
|
||||
}),
|
||||
|
||||
// copy custom static assets
|
||||
new CopyWebpackPlugin([
|
||||
{
|
||||
from: path.resolve(__dirname, '../static'),
|
||||
to: config.build.assetsSubDirectory,
|
||||
ignore: ['.*']
|
||||
}
|
||||
])
|
||||
]
|
||||
})
|
||||
|
||||
if (config.build.productionGzip) {
|
||||
const CompressionWebpackPlugin = require('compression-webpack-plugin')
|
||||
|
||||
webpackConfig.plugins.push(
|
||||
new CompressionWebpackPlugin({
|
||||
asset: '[path].gz[query]',
|
||||
algorithm: 'gzip',
|
||||
test: new RegExp(
|
||||
'\\.(' +
|
||||
config.build.productionGzipExtensions.join('|') +
|
||||
')$'
|
||||
),
|
||||
threshold: 10240,
|
||||
minRatio: 0.8
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
if (config.build.bundleAnalyzerReport) {
|
||||
const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin
|
||||
webpackConfig.plugins.push(new BundleAnalyzerPlugin())
|
||||
}
|
||||
|
||||
module.exports = webpackConfig
|
@ -0,0 +1,3 @@
|
||||
{
|
||||
"api_addr": "http://localhost:5004"
|
||||
}
|
@ -0,0 +1,3 @@
|
||||
{
|
||||
"api_addr": "http://localhost:5004"
|
||||
}
|
@ -0,0 +1,3 @@
|
||||
{
|
||||
"api_addr": "http://193.2.76.103:5004"
|
||||
}
|
@ -0,0 +1,7 @@
|
||||
'use strict'
|
||||
const merge = require('webpack-merge')
|
||||
const prodEnv = require('./prod.env')
|
||||
|
||||
module.exports = merge(prodEnv, {
|
||||
NODE_ENV: '"development"'
|
||||
})
|
@ -0,0 +1,69 @@
|
||||
'use strict'
|
||||
// Template version: 1.3.1
|
||||
// see http://vuejs-templates.github.io/webpack for documentation.
|
||||
|
||||
const path = require('path')
|
||||
|
||||
module.exports = {
|
||||
dev: {
|
||||
|
||||
// Paths
|
||||
assetsSubDirectory: 'static',
|
||||
assetsPublicPath: '/',
|
||||
proxyTable: {},
|
||||
|
||||
// Various Dev Server settings
|
||||
host: 'localhost', // can be overwritten by process.env.HOST
|
||||
port: 8080, // can be overwritten by process.env.PORT, if port is in use, a free one will be determined
|
||||
autoOpenBrowser: false,
|
||||
errorOverlay: true,
|
||||
notifyOnErrors: true,
|
||||
poll: false, // https://webpack.js.org/configuration/dev-server/#devserver-watchoptions-
|
||||
|
||||
|
||||
/**
|
||||
* Source Maps
|
||||
*/
|
||||
|
||||
// https://webpack.js.org/configuration/devtool/#development
|
||||
devtool: 'cheap-module-eval-source-map',
|
||||
|
||||
// If you have problems debugging vue-files in devtools,
|
||||
// set this to false - it *may* help
|
||||
// https://vue-loader.vuejs.org/en/options.html#cachebusting
|
||||
cacheBusting: true,
|
||||
|
||||
cssSourceMap: true
|
||||
},
|
||||
|
||||
build: {
|
||||
// Template for index.html
|
||||
index: path.resolve(__dirname, '../dist/index.html'),
|
||||
|
||||
// Paths
|
||||
assetsRoot: path.resolve(__dirname, '../dist'),
|
||||
assetsSubDirectory: 'static',
|
||||
assetsPublicPath: '/',
|
||||
|
||||
/**
|
||||
* Source Maps
|
||||
*/
|
||||
|
||||
productionSourceMap: true,
|
||||
// https://webpack.js.org/configuration/devtool/#production
|
||||
devtool: '#source-map',
|
||||
|
||||
// Gzip off by default as many popular static hosts such as
|
||||
// Surge or Netlify already gzip all static assets for you.
|
||||
// Before setting to `true`, make sure to:
|
||||
// npm install --save-dev compression-webpack-plugin
|
||||
productionGzip: false,
|
||||
productionGzipExtensions: ['js', 'css'],
|
||||
|
||||
// Run the build command with an extra argument to
|
||||
// View the bundle analyzer report after build finishes:
|
||||
// `npm run build --report`
|
||||
// Set to `true` or `false` to always turn it on or off
|
||||
bundleAnalyzerReport: process.env.npm_config_report
|
||||
}
|
||||
}
|
@ -0,0 +1,4 @@
|
||||
'use strict'
|
||||
module.exports = {
|
||||
NODE_ENV: '"production"'
|
||||
}
|
@ -0,0 +1,12 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1.0">
|
||||
<title>vue_frontend</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
<!-- built files will be auto injected -->
|
||||
</body>
|
||||
</html>
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,67 @@
|
||||
{
|
||||
"name": "vue_frontend",
|
||||
"version": "1.0.0",
|
||||
"description": "Frontend for Valency App.",
|
||||
"author": "voje <kristjan.voje@gmail.com>",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "webpack-dev-server --inline --progress --config build/webpack.dev.conf.js",
|
||||
"start": "npm run dev",
|
||||
"build": "node build/build.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"axios": "^0.18.0",
|
||||
"bootstrap-vue": "^2.0.0-rc.11",
|
||||
"sha256": "^0.2.0",
|
||||
"vue": "^2.5.2",
|
||||
"vue-cookies": "^1.5.6",
|
||||
"vue-router": "^3.0.1",
|
||||
"vue-spinner": "^1.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"autoprefixer": "^7.1.2",
|
||||
"babel-core": "^6.22.1",
|
||||
"babel-helper-vue-jsx-merge-props": "^2.0.3",
|
||||
"babel-loader": "^7.1.1",
|
||||
"babel-plugin-syntax-jsx": "^6.18.0",
|
||||
"babel-plugin-transform-runtime": "^6.22.0",
|
||||
"babel-plugin-transform-vue-jsx": "^3.5.0",
|
||||
"babel-preset-env": "^1.3.2",
|
||||
"babel-preset-stage-2": "^6.22.0",
|
||||
"chalk": "^2.0.1",
|
||||
"copy-webpack-plugin": "^4.0.1",
|
||||
"css-loader": "^0.28.0",
|
||||
"extract-text-webpack-plugin": "^3.0.0",
|
||||
"file-loader": "^1.1.4",
|
||||
"friendly-errors-webpack-plugin": "^1.6.1",
|
||||
"html-webpack-plugin": "^2.30.1",
|
||||
"node-notifier": "^5.1.2",
|
||||
"optimize-css-assets-webpack-plugin": "^3.2.0",
|
||||
"ora": "^1.2.0",
|
||||
"portfinder": "^1.0.13",
|
||||
"postcss-import": "^11.0.0",
|
||||
"postcss-loader": "^2.0.8",
|
||||
"postcss-url": "^7.2.1",
|
||||
"rimraf": "^2.6.0",
|
||||
"semver": "^5.3.0",
|
||||
"shelljs": "^0.7.6",
|
||||
"uglifyjs-webpack-plugin": "^1.1.1",
|
||||
"url-loader": "^0.5.8",
|
||||
"vue-loader": "^13.3.0",
|
||||
"vue-style-loader": "^3.0.1",
|
||||
"vue-template-compiler": "^2.5.2",
|
||||
"webpack": "^3.6.0",
|
||||
"webpack-bundle-analyzer": "^2.9.0",
|
||||
"webpack-dev-server": "^2.9.1",
|
||||
"webpack-merge": "^4.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6.0.0",
|
||||
"npm": ">= 3.0.0"
|
||||
},
|
||||
"browserslist": [
|
||||
"> 1%",
|
||||
"last 2 versions",
|
||||
"not ie <= 8"
|
||||
]
|
||||
}
|
@ -0,0 +1,9 @@
|
||||
<template>
|
||||
<router-view/>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: 'App',
|
||||
}
|
||||
</script>
|
@ -0,0 +1,206 @@
|
||||
<template>
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<div class="col-sm-12">
|
||||
<p class="pb-0 mb-0">Urejanje pomenov za besedo: <b>{{ hw }}</b>.</p>
|
||||
<p><small>
|
||||
Z miško kliknite na poved, nato kliknite na pomen, ki ga želite dodeliti povedi. Par poved‒pomen bo obarvan z modro. Pare lahko shranite s klikom na gumb "Shrani". Možno je dodajanje poljubnih pomenov.
|
||||
</small></p>
|
||||
<button v-on:click="cancel_all">Prekliči</button>
|
||||
<button v-on:click="save_all">Shrani</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
|
||||
<!-- left column: sentences -->
|
||||
<div class="my-sent col-sm-6">
|
||||
<div
|
||||
v-for="(sentence, ssj_id) in sentences"
|
||||
v-on:click="pick_ssj_id(ssj_id)"
|
||||
class="border rounded my-sentences my-pointer"
|
||||
v-bind:class="{
|
||||
'border-primary': ssj_id === picked_ssj_id
|
||||
}"
|
||||
>
|
||||
<div>
|
||||
<span
|
||||
v-for="(word, index) in sentence.words"
|
||||
v-bind:class="{
|
||||
'text-primary': index === parseInt(sentence.hw_idx)
|
||||
}"
|
||||
>
|
||||
<span v-if="$root.mkspace(index, word)"> </span>{{ word }}
|
||||
</span>
|
||||
</div>
|
||||
<hr>
|
||||
<div class="col-sm-12"><small>
|
||||
<div v-if="ssj_id in local_sense_map">
|
||||
<Sense v-bind:sense="local_sense_map[ssj_id].sense"></Sense>
|
||||
</div>
|
||||
<div v-else>
|
||||
<Sense v-bind:sense="undefined"></Sense>
|
||||
</div>
|
||||
</small></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- right column: senses -->
|
||||
<div class="col-sm-6 border rounded my-div-scroll sticky-top">
|
||||
<div
|
||||
v-for="sense in local_senses"
|
||||
class="my-pointer"
|
||||
v-on:click="picked_sense_id = sense.sense_id"
|
||||
v-bind:class="{
|
||||
'text-primary': sense.sense_id === picked_sense_id
|
||||
}"
|
||||
>
|
||||
<Sense v-bind:sense="sense"></Sense>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-sm-12">
|
||||
<textarea class="my-textarea" v-model="new_sense_desc"></textarea>
|
||||
</div>
|
||||
<div class="col-sm-12">
|
||||
<button v-on:click="new_sense">Dodaj pomen</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import Sense from "./Sense"
|
||||
export default {
|
||||
name: "EditSenses",
|
||||
props: ["hw", "sentences", "sens"],
|
||||
components: {
|
||||
Sense: Sense
|
||||
},
|
||||
data () { return {
|
||||
picked_ssj_id: null,
|
||||
picked_sense_id: null,
|
||||
local_senses: [], // make changes on a local copy
|
||||
local_sense_map: {}, // make changes on a local copy
|
||||
new_sense_desc: "",
|
||||
new_senses: [], // only send changes to server
|
||||
delta_sense_map: {}, // only send changes to server
|
||||
}},
|
||||
created: function() {
|
||||
// not sure if needed, maybe move to data()
|
||||
this.local_senses = this.sens.senses
|
||||
var json = JSON.stringify(this.sens.sense_map)
|
||||
this.local_sense_map = JSON.parse(json)
|
||||
for (var ssj_id in this.local_sense_map) {
|
||||
this.local_sense_map[ssj_id].sense = this.sense_id_to_sense(
|
||||
this.local_sense_map[ssj_id].sense_id)
|
||||
}
|
||||
},
|
||||
watch: {
|
||||
picked_ssj_id: function() {
|
||||
this.new_link()
|
||||
},
|
||||
picked_sense_id: function() {
|
||||
this.new_link()
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
pick_ssj_id: function(ssj_id) {
|
||||
this.picked_ssj_id = ssj_id
|
||||
if (ssj_id in this.local_sense_map) {
|
||||
this.picked_sense_id = this.local_sense_map[ssj_id].sense_id
|
||||
}
|
||||
|
||||
},
|
||||
new_link: function() {
|
||||
if (this.picked_ssj_id === null ||
|
||||
this.picked_sense_id === null) { return }
|
||||
this.local_sense_map[this.picked_ssj_id] = {
|
||||
sense_id: this.picked_sense_id,
|
||||
sense: this.sense_id_to_sense(this.picked_sense_id)
|
||||
}
|
||||
this.delta_sense_map[this.picked_ssj_id] = { sense_id: this.picked_sense_id }
|
||||
},
|
||||
new_sense: function(sense_id) {
|
||||
if (this.new_sense_desc === "") {
|
||||
return
|
||||
}
|
||||
var new_sense = {
|
||||
hw: this.hw,
|
||||
author: this.$root.store.username,
|
||||
desc: this.new_sense_desc,
|
||||
sense_id: "tmp_sense_id" + (new Date().getTime()),
|
||||
}
|
||||
this.local_senses.push(new_sense)
|
||||
this.new_senses.push(new_sense)
|
||||
this.new_sense_desc = ""
|
||||
},
|
||||
sense_id_to_sense: function(sense_id) {
|
||||
for (var i=0; i<this.local_senses.length; i++) {
|
||||
if (this.local_senses[i].sense_id === sense_id) {
|
||||
return this.local_senses[i]
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
cancel_all: function() {
|
||||
this.$parent.state = "normal"
|
||||
},
|
||||
save_all: function() {
|
||||
const data = {
|
||||
token: this.$root.store.token,
|
||||
hw: this.hw,
|
||||
sense_map: this.delta_sense_map,
|
||||
new_senses: this.new_senses,
|
||||
}
|
||||
var component = this
|
||||
function exit_edit(component) {
|
||||
component.$parent.state = "normal"
|
||||
component.$parent.request_reload = true
|
||||
}
|
||||
|
||||
// don't update if there are no changes
|
||||
if (
|
||||
Object.keys(data.sense_map).length === 0 &&
|
||||
data.new_senses.length === 0
|
||||
) { exit_edit(component); return }
|
||||
|
||||
// exit after update
|
||||
this.$http.post(
|
||||
this.$root.store.api_addr + "/api/senses/update",
|
||||
data,
|
||||
{ headers: {
|
||||
'Content-type': 'application/json',
|
||||
}}
|
||||
).then(function () {
|
||||
exit_edit(component)
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.my-div-scroll {
|
||||
margin-top: 5px;
|
||||
height: 90vh;
|
||||
overflow-y: auto;
|
||||
padding-top: 5px;
|
||||
}
|
||||
.my-pointer {
|
||||
cursor: pointer;
|
||||
}
|
||||
.my-textarea {
|
||||
width: 100%;
|
||||
}
|
||||
.my-sentences {
|
||||
margin: 5px 0px 20px 0px;
|
||||
padding: 5px;
|
||||
}
|
||||
.my-sent {
|
||||
word-wrap: break-word;
|
||||
}
|
||||
.my-sent span {
|
||||
display: inline-block;
|
||||
}
|
||||
</style>
|
@ -0,0 +1,208 @@
|
||||
<template>
|
||||
<!-- clicking on empty space clears highlights -->
|
||||
<div v-on:click="clearOnClick" class="container-fluid">
|
||||
<hr>
|
||||
<div class="row">
|
||||
<div class="col-sm-7">
|
||||
<div class="row">
|
||||
<div class="col-sm-12">
|
||||
št. povedi: {{ frameData.sentences.length }}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!--frame slots-->
|
||||
<div class="row my-frames">
|
||||
<div class="col-sm-12">
|
||||
<span v-for="(slot, key) in frameData.slots">
|
||||
<span
|
||||
v-bind:class="{
|
||||
'my-pointer text-danger': hasHoverTid(idx=key),
|
||||
'my-underline text-danger': hasSelTid(idx=key)
|
||||
}"
|
||||
v-on:mouseover="setHid(idx=key)"
|
||||
v-on:mouseleave="setHid()"
|
||||
v-on:click="setSid(idx=key)"
|
||||
>{{ slot.functor }}</span>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!--sense information-->
|
||||
<div v-if="$root.store.radio === 'three'" class="col-sm-5">
|
||||
<Sense v-bind:sense="getSense()"></Sense>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!--sentences-->
|
||||
<div class="row">
|
||||
<!-- fmode: prikaz->udelezenske vloge (drugacno razvrscanje povedi) -->
|
||||
<div v-if="fmode" class="col-sm-12">
|
||||
<div v-for="hw in getAggrHws()">
|
||||
<blockquote v-for="sentence in getAggrSent(hw)">
|
||||
<span class="text-secondary"> {{ hw }}</span><br>
|
||||
<span
|
||||
v-for="(token, index) in sentence"
|
||||
v-bind:class="{
|
||||
'my-pointer text-danger': hasHoverTid(idx=null, tid=token[0]), 'my-underline text-danger': hasSelTid(idx=null, tid=token[0]),
|
||||
'text-primary': isHw(token[0]),
|
||||
}"
|
||||
v-on:mouseover="setHid(idx=null, tid=token[0])"
|
||||
v-on:mouseleave="setHid()"
|
||||
v-on:click="setSid(idx=null, tid=token[0])"
|
||||
v-bind:title="token[1].msd"
|
||||
><span v-if="$root.mkspace(index, token[1].word)"> </span>{{ token[1].word }}</span>
|
||||
</blockquote>
|
||||
</div>
|
||||
</div>
|
||||
<div v-else class="col-sm-12">
|
||||
<blockquote v-for="sentence in frameData.sentences">
|
||||
<span
|
||||
v-for="(token, index) in sentence"
|
||||
v-bind:class="{
|
||||
'my-pointer text-danger': hasHoverTid(idx=null, tid=token[0]), 'my-underline text-danger': hasSelTid(idx=null, tid=token[0]),
|
||||
'text-primary': isHw(token[0]),
|
||||
}"
|
||||
v-on:mouseover="setHid(idx=null, tid=token[0])"
|
||||
v-on:mouseleave="setHid()"
|
||||
v-on:click="setSid(idx=null, tid=token[0])"
|
||||
v-bind:title="token[1].msd"
|
||||
><span v-if="$root.mkspace(index, token[1].word)"> </span>{{ token[1].word }}</span>
|
||||
</blockquote>
|
||||
</div>
|
||||
</div>
|
||||
<br>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import Sense from "./Sense"
|
||||
export default {
|
||||
name: "Frame",
|
||||
props: {
|
||||
frameData: {},
|
||||
sensData: {},
|
||||
fmode: {
|
||||
default: false,
|
||||
type: Boolean,
|
||||
},
|
||||
},
|
||||
data() { return {
|
||||
hid: null, // hover functor index
|
||||
sid: null, // select functor index (click)
|
||||
}},
|
||||
components: {
|
||||
Sense: Sense
|
||||
},
|
||||
watch: {
|
||||
frameData: function () {
|
||||
this.hid = null,
|
||||
this.sid = null
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
setHid: function (idx=null, tid=null) {
|
||||
// calling this functoin without parameters
|
||||
// resets hid
|
||||
if (tid === null) {
|
||||
this.hid = idx
|
||||
return
|
||||
}
|
||||
for (var i=0; i<this.frameData.slots.length; i++) {
|
||||
if (this.frameData.slots[i].tids.includes(tid)) {
|
||||
this.hid = i
|
||||
return
|
||||
}
|
||||
}
|
||||
},
|
||||
clearOnClick: function (event) {
|
||||
if (event.target.tagName !== "SPAN") {
|
||||
this.sid = null
|
||||
}
|
||||
},
|
||||
setSid: function (idx=null, tid=null) {
|
||||
this.sid = null
|
||||
if (tid === null) {
|
||||
this.sid = idx
|
||||
return
|
||||
}
|
||||
for (var i=0; i<this.frameData.slots.length; i++) {
|
||||
if (this.frameData.slots[i].tids.includes(tid)) {
|
||||
this.sid = i
|
||||
return
|
||||
}
|
||||
}
|
||||
},
|
||||
hasHoverTid: function(idx=null, tid=null) {
|
||||
if (this.hid === null) {
|
||||
return false
|
||||
}
|
||||
if (tid === null) {
|
||||
if (idx == this.hid) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
return this.frameData.slots[this.hid].tids.includes(tid)
|
||||
},
|
||||
hasSelTid: function (idx=null, tid=null) {
|
||||
if (this.sid === null) {
|
||||
return false
|
||||
}
|
||||
if (tid === null) {
|
||||
if (idx == this.sid) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
return this.frameData.slots[this.sid].tids.includes(tid)
|
||||
},
|
||||
isHw: function (tid) {
|
||||
return this.frameData.tids.includes(tid)
|
||||
},
|
||||
getSense: function () {
|
||||
for (var i in this.sensData.senses) {
|
||||
if (this.sensData.senses[i].sense_id === this.frameData.sense_info.sense_id) {
|
||||
return this.sensData.senses[i]
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
getAggrHws: function() {
|
||||
return (Object.keys(this.frameData.aggr_sent)).sort()
|
||||
},
|
||||
getAggrSent: function(hw) {
|
||||
var sentences = []
|
||||
for (var i=0; i<this.frameData.aggr_sent[hw].length; i++) {
|
||||
sentences.push(
|
||||
this.frameData.sentences[this.frameData.aggr_sent[hw][i]]
|
||||
)
|
||||
}
|
||||
return sentences
|
||||
},
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.my-pointer {
|
||||
cursor: pointer;
|
||||
}
|
||||
.my-underline {
|
||||
text-decoration: underline;
|
||||
}
|
||||
.my-frames {
|
||||
margin-top: 10px;
|
||||
margin-bottom: 2px;
|
||||
}
|
||||
blockquote {
|
||||
background: #ffffff;
|
||||
border-left: 4px solid #ccc;
|
||||
margin: 10px 0px 10px 10px;
|
||||
padding: 0px 0px 0px 5px;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
blockquote span {
|
||||
display: inline-block;
|
||||
}
|
||||
</style>
|
@ -0,0 +1,50 @@
|
||||
<template>
|
||||
<div>
|
||||
<p
|
||||
v-if="this.$root.store.api_error !== null"
|
||||
class="text-warning"
|
||||
>
|
||||
api_error: {{ this.$root.store.api_error }}
|
||||
</p>
|
||||
<Nav></Nav>
|
||||
<div class="my-home container-fluid">
|
||||
<div class="row">
|
||||
<div id="serach" class="col-sm-2 border-right fill">
|
||||
<LWords v-if="navSS()"></LWords>
|
||||
<LFunctors v-else></LFunctors>
|
||||
</div>
|
||||
<div class="col-sm-10">
|
||||
<router-view></router-view>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import Nav from "./Nav"
|
||||
import LWords from "./LWords"
|
||||
import LFunctors from "./LFunctors"
|
||||
import MainDispl from "./MainDispl"
|
||||
|
||||
export default {
|
||||
name: 'Home',
|
||||
components: {
|
||||
Nav: Nav,
|
||||
LWords: LWords,
|
||||
LFunctors: LFunctors,
|
||||
MainDispl: MainDispl,
|
||||
},
|
||||
methods: {
|
||||
navSS: function () {
|
||||
return this.$root.storeGet("navSS") === "words"
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.my-home {
|
||||
padding-top: 10px;
|
||||
}
|
||||
</style>
|
@ -0,0 +1,51 @@
|
||||
<template>
|
||||
<div>
|
||||
<table>
|
||||
<tr v-for="functor in functors">
|
||||
<td><a href="#" v-on:click="selectFunctor(functor)">{{ functor[0] }}</a></td>
|
||||
<td>({{ functor[1] }})</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: "LWords",
|
||||
props: ["appState"],
|
||||
data() {return {
|
||||
functors: []
|
||||
}},
|
||||
methods: {
|
||||
apiGetFunctors: function () {
|
||||
var component = this
|
||||
this.$http.get(this.$root.store.api_addr + "/api/functors")
|
||||
.then(function(response) {
|
||||
component.$root.store.api_error = null
|
||||
component.functors = response.data
|
||||
})
|
||||
.catch(function(error) {
|
||||
component.$root.store.api_error = error
|
||||
})
|
||||
},
|
||||
selectFunctor: function (functor) {
|
||||
this.$router.push({
|
||||
name: "MainDispl",
|
||||
params: {
|
||||
hw: functor[0],
|
||||
fmode: true
|
||||
}
|
||||
})
|
||||
}
|
||||
},
|
||||
mounted: function() {
|
||||
this.apiGetFunctors()
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style>
|
||||
table {
|
||||
width: 100%;
|
||||
}
|
||||
</style>
|
@ -0,0 +1,80 @@
|
||||
<template>
|
||||
<div>
|
||||
<select v-model="selectedLetter">
|
||||
<option v-for="letter in alphabet" :value="letter">
|
||||
{{ letter.toUpperCase() }} ({{ getNumWords(letter) }})
|
||||
</option>
|
||||
</select>
|
||||
<table>
|
||||
<tr v-for="word in getWords()">
|
||||
<td><a href="#" v-on:click="selectHw(word)">{{ word[0] }}
|
||||
<span v-if="$root.store.has_se.includes(word[0])">se</span>
|
||||
</a></td>
|
||||
<td>({{ word[1] }})</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: "LWords",
|
||||
data() {return {
|
||||
alphabet: "abcčdefghijklmnoprsštuvzž",
|
||||
letters: {},
|
||||
selectedLetter: "a"
|
||||
}},
|
||||
methods: {
|
||||
apiGetWords: function() {
|
||||
var component = this
|
||||
this.$http.get(this.$root.storeGet("api_addr") + "/api/words")
|
||||
.then(function(response) {
|
||||
component.$root.store.api_error = null
|
||||
component.$root.store.has_se = response.data["has_se"]
|
||||
component.letters = response.data["sorted_words"]
|
||||
})
|
||||
.catch(function(error) {
|
||||
component.$root.store.api_error = error
|
||||
})
|
||||
},
|
||||
getNumWords: function(letter) {
|
||||
var entry = this.letters[letter]
|
||||
if (entry) {
|
||||
return entry.length
|
||||
} else {
|
||||
return 0
|
||||
}
|
||||
},
|
||||
getWords: function() {
|
||||
var entry = this.letters[this.selectedLetter]
|
||||
if (entry) {
|
||||
return entry
|
||||
} else {
|
||||
return []
|
||||
}
|
||||
},
|
||||
selectHw: function(word) {
|
||||
this.$router.push({
|
||||
name: "MainDispl",
|
||||
params: {
|
||||
hw: word[0],
|
||||
fmode: false
|
||||
}
|
||||
})
|
||||
}
|
||||
},
|
||||
mounted: function() {
|
||||
this.apiGetWords()
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
table {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
select {
|
||||
width: 100%;
|
||||
}
|
||||
</style>
|
@ -0,0 +1,120 @@
|
||||
<template>
|
||||
<div>
|
||||
<div class="col-sm-2">
|
||||
<a href="#" v-on:click="this.$root.routeBack">Nazaj</a>
|
||||
</div>
|
||||
<div class="ev-login col-sm-4 offset-sm-4">
|
||||
<div class="alert alert-danger" v-if="error">
|
||||
<p>{{ error }}</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<input
|
||||
type="text"
|
||||
data-id="login.username"
|
||||
class="form-control js-login__username"
|
||||
placeholder="Uporabnik"
|
||||
v-model="credentials.username"
|
||||
>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<input
|
||||
type="password"
|
||||
class="form-control js-login__password "
|
||||
placeholder="Geslo"
|
||||
v-model="credentials.password"
|
||||
>
|
||||
</div>
|
||||
<button
|
||||
data-id="login.submit"
|
||||
class="btn btn-primary solid blank js-login__submit"
|
||||
@click="submit()"
|
||||
>
|
||||
Prijava<i class="fa fa-arrow-circle-o-right"></i>
|
||||
</button>
|
||||
<br>
|
||||
<br>
|
||||
<br>
|
||||
<router-link to="/new_pass">Ste pozabili geslo?</router-link>
|
||||
<br>
|
||||
<br>
|
||||
Nov uporabnik?
|
||||
<br>
|
||||
<router-link to="/register">Ustvarite nov račun.</router-link>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: 'Login',
|
||||
data () {
|
||||
return {
|
||||
credentials: {
|
||||
username: '',
|
||||
password: ''
|
||||
},
|
||||
loggingIn: false,
|
||||
error: ''
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
submit () {
|
||||
this.error = ""
|
||||
//this.loggingIn = true
|
||||
// Auth.login() returns a promise. A redirect will happen on success.
|
||||
// For errors, use .then() to capture the response to output
|
||||
// error_description (if exists) as shown below:
|
||||
/*
|
||||
this.$auth.login(credentials, 'dashboard').then((response) => {
|
||||
this.loggingIn = false
|
||||
this.error = utils.getError(response)
|
||||
})
|
||||
*/
|
||||
|
||||
if ( this.credentials.username === "" ||
|
||||
this.credentials.password === ""
|
||||
) {
|
||||
this.error = "Izpolnite vsa polja."
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
var data = {
|
||||
username: this.credentials.username,
|
||||
password: this.credentials.password
|
||||
}
|
||||
|
||||
var component = this
|
||||
this.$http.post(this.$root.storeGet("api_addr") + "/api/login",
|
||||
data, // the data to post
|
||||
{ headers: {
|
||||
'Content-type': 'application/x-www-form-urlencoded',
|
||||
}
|
||||
})
|
||||
.then(function (response) {
|
||||
component.$root.store.api_error = null
|
||||
var token = response.data.token
|
||||
if (token === null) {
|
||||
component.error = "Napačno uporabniško ime ali geslo."
|
||||
} else {
|
||||
// set cookies (if the page reloads)
|
||||
component.$root.store.username = component.credentials.username
|
||||
component.$root.store.token = token
|
||||
component.$router.go(-1)
|
||||
component.$cookies.set("valency_token", token, 60*60*48)
|
||||
}
|
||||
})
|
||||
.catch(function (err) {
|
||||
component.$root.store.api_error = err
|
||||
})
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.ev-login {
|
||||
margin-top: 100px;
|
||||
}
|
||||
</style>
|
@ -0,0 +1,256 @@
|
||||
<template>
|
||||
<!--load mode-->
|
||||
<div v-if="show_loader">
|
||||
<pulse-loader :color="loader_color"></pulse-loader>
|
||||
</div>
|
||||
|
||||
<!--edit mode (button: razvrsti po pomenih)-->
|
||||
<div v-else-if="state === 'editing'" class="container-fluid">
|
||||
<EditSenses
|
||||
v-bind:hw="hw"
|
||||
v-bind:sentences="sentences"
|
||||
v-bind:sens="sens"
|
||||
></EditSenses>
|
||||
</div>
|
||||
|
||||
<!--normal mode-->
|
||||
<div v-else class="container-fluid" id="head">
|
||||
|
||||
<!--header (verb/adjective, radio buttons)-->
|
||||
<div class="row">
|
||||
<div class="col-sm-4">
|
||||
<table>
|
||||
<tr><h4 id="main-displ-hw">{{ hw }}
|
||||
<span v-if="$root.store.has_se.includes(hw)">se</span>
|
||||
</h4></tr>
|
||||
<tr>{{ calcPos() }}</tr>
|
||||
</table>
|
||||
</div>
|
||||
<div class="col-sm-8">
|
||||
<table>
|
||||
<tr>Združevanje vezljivostnih vzorcev:</tr>
|
||||
<tr>
|
||||
<label class="radio-inline"><input value="one" v-model="$root.store.radio" v-on:change="reload()" checked="" type="radio" name="optradio">posamezne povedi</label>
|
||||
<label class="radio-inline"><input value="two" v-model="$root.store.radio" v-on:change="reload()" type="radio" name="optradio">skupne udeleženske vloge</label>
|
||||
<label v-if="this.$root.store.navSS === 'words'" class="radio-inline"><input value="three" v-model="$root.store.radio" v-on:change="reload()" type="radio" name="optradio">po meri</label>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!--frames-->
|
||||
<div v-if="$root.store.radio === 'three'" class="row">
|
||||
<div class="col-sm-4">
|
||||
<button v-on:click="userEdit">razvrsti po pomenih</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row" v-for="frame in frames">
|
||||
<Frame
|
||||
v-bind:frameData="frame"
|
||||
v-bind:sensData="sens"
|
||||
v-bind:fmode="fmode">
|
||||
</Frame>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import Frame from "./Frame"
|
||||
import EditSenses from "./EditSenses"
|
||||
import PulseLoader from 'vue-spinner/src/PulseLoader.vue'
|
||||
export default {
|
||||
name: "MainDispl",
|
||||
components: {
|
||||
Frame: Frame,
|
||||
EditSenses: EditSenses,
|
||||
PulseLoader: PulseLoader,
|
||||
},
|
||||
props: ["hw", "fmode"],
|
||||
data () { return {
|
||||
frames: [],
|
||||
sentences: {},
|
||||
sens: {
|
||||
senses: [],
|
||||
sense_map: {},
|
||||
},
|
||||
state: "loading", // editing, normal
|
||||
request_reload: false,
|
||||
loader_color: "#007bff",
|
||||
}},
|
||||
created: function () {
|
||||
this.reload()
|
||||
},
|
||||
computed: {
|
||||
show_loader: function () {
|
||||
return this.state === "loading" && this.$root.store.api_error !== null
|
||||
}
|
||||
},
|
||||
watch: {
|
||||
hw: function () {
|
||||
this.reload()
|
||||
},
|
||||
frames: function () {
|
||||
this.buildSentences()
|
||||
},
|
||||
request_reload: function () {
|
||||
if (this.request_reload) {
|
||||
this.request_reload = false
|
||||
this.reload()
|
||||
}
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
getFFrames: function(functor, reduce_fun=null) {
|
||||
// get frames in functor mode
|
||||
if (functor === null || functor === undefined) return
|
||||
if (reduce_fun === null) {
|
||||
switch (this.$root.store.radio) {
|
||||
case "one":
|
||||
reduce_fun = "reduce_0"
|
||||
break
|
||||
case "two":
|
||||
reduce_fun = "reduce_1"
|
||||
break
|
||||
default:
|
||||
reduce_fun = "reduce_0"
|
||||
break
|
||||
}
|
||||
}
|
||||
var component = this
|
||||
this.$http.get(
|
||||
this.$root.storeGet("api_addr") + "/api/functor-frames" +
|
||||
"?functor=" + functor + "&rf=" + reduce_fun)
|
||||
.then(function (response) {
|
||||
component.$root.store.api_error = null
|
||||
component.frames = response.data.frames
|
||||
component.state = "normal"
|
||||
})
|
||||
.catch(function(error) {
|
||||
component.$root.store.api_error = error
|
||||
})
|
||||
},
|
||||
getFrames: function (hw, reduce_fun=null) {
|
||||
if (hw === null || hw === undefined) return
|
||||
if (reduce_fun === null) {
|
||||
switch (this.$root.store.radio) {
|
||||
case "one":
|
||||
reduce_fun = "reduce_0"
|
||||
break
|
||||
case "two":
|
||||
reduce_fun = "reduce_1"
|
||||
break
|
||||
case "three":
|
||||
reduce_fun = "reduce_5"
|
||||
break
|
||||
}
|
||||
}
|
||||
var component = this
|
||||
this.$http.get(
|
||||
this.$root.storeGet("api_addr") + "/api/frames" +
|
||||
"?hw=" + hw + "&rf=" + reduce_fun)
|
||||
.then(function (response) {
|
||||
component.$root.store.api_error = null
|
||||
component.frames = response.data.frames
|
||||
component.state = "normal"
|
||||
})
|
||||
.catch(function(error) {
|
||||
component.$root.store.api_error = error
|
||||
})
|
||||
},
|
||||
buildSentences: function () {
|
||||
if (this.frames.length == 0) {
|
||||
return
|
||||
}
|
||||
this.sentences = {}
|
||||
for (var fi in this.frames) {
|
||||
for (var si in this.frames[fi].sentences) {
|
||||
var sentence = this.frames[fi].sentences[si]
|
||||
// get ssj_id without .t123
|
||||
var ssj_id = sentence[0][0].split(".")
|
||||
ssj_id.splice(-1, 1) // removes last element
|
||||
ssj_id = ssj_id.join(".")
|
||||
var words = []
|
||||
var hw_idx = -1
|
||||
var tmp_hw = this.hw
|
||||
if (tmp_hw[tmp_hw.length - 1] === "_") {
|
||||
tmp_hw = tmp_hw.substr(0, tmp_hw.length - 1)
|
||||
}
|
||||
for (var i in sentence) {
|
||||
words.push(sentence[i][1].word)
|
||||
if (sentence[i][1].lemma === tmp_hw && hw_idx == -1) {
|
||||
hw_idx = i
|
||||
}
|
||||
}
|
||||
this.sentences[ssj_id] = {
|
||||
hw_idx: hw_idx,
|
||||
words: words
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
getSenses: function (hw, callback) {
|
||||
if (hw === null || hw === undefined) {
|
||||
return
|
||||
}
|
||||
var component = this
|
||||
this.$http.get(
|
||||
this.$root.store.api_addr + "/api/senses/get" + "?hw=" + hw)
|
||||
.then(function(response) {
|
||||
// console.log(response.data)
|
||||
component.sens.senses = response.data.senses
|
||||
component.sens.sense_map = response.data.sense_map
|
||||
callback()
|
||||
})
|
||||
},
|
||||
reload: function () {
|
||||
this.state = "loading"
|
||||
this.sentences = {}
|
||||
if (this.$root.store.navSS === "functors") this.getFFrames(this.hw)
|
||||
else {
|
||||
this.getFrames(this.hw)
|
||||
if (this.$root.store.radio === "three") {
|
||||
this.getSenses(this.hw, this.sortBySense)
|
||||
}
|
||||
}
|
||||
this.calcPos()
|
||||
},
|
||||
userEdit: function () {
|
||||
// authenticate the user for this
|
||||
var tthis = this
|
||||
this.$root.checkToken()
|
||||
.then(function (response) {tthis.state = "editing"})
|
||||
.catch(function (err) {alert("Za urejanje je potrebna prijava.")}
|
||||
)
|
||||
},
|
||||
calcPos: function() {
|
||||
var bfmode = this.fmode
|
||||
if (typeof(bfmode) === "string") {
|
||||
bfmode = (bfmode === "true")
|
||||
}
|
||||
if (bfmode) return "udeleženska vloga"
|
||||
else if (this.hw.substr(this.hw.length-1) === "_") return "pridevnik"
|
||||
return "glagol"
|
||||
},
|
||||
sortBySense: function() {
|
||||
// frames with defined senses on top
|
||||
var undefFrames = []
|
||||
var defFrames = []
|
||||
//console.log(Object.keys(this.sens.sense_map))
|
||||
for (var i=0; i<this.frames.length; i++) {
|
||||
var sense_id = this.frames[i].sense_info.sense_id
|
||||
if (sense_id === "nedefinirano") undefFrames.push(this.frames[i])
|
||||
else defFrames.push(this.frames[i])
|
||||
}
|
||||
this.frames = defFrames.concat(undefFrames)
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
#main-displ-hw {
|
||||
margin: 0px;
|
||||
}
|
||||
|
||||
</style>
|
@ -0,0 +1,74 @@
|
||||
<template>
|
||||
<nav>
|
||||
<b-navbar toggleable="md" type="light" variant="light">
|
||||
<b-navbar-toggle target="nav_collapse"></b-navbar-toggle>
|
||||
<b-navbar-brand>Vezljivostni vzorci slovenskih glagolov</b-navbar-brand>
|
||||
<b-collapse is-nav id="nav_collapse">
|
||||
|
||||
<b-navbar-nav>
|
||||
<b-nav-item-dropdown text="Prikaz" right>
|
||||
<b-dropdown-item v-for="option in search_options"
|
||||
:value="option.val"
|
||||
:key="option.val"
|
||||
v-on:click="setNavSS(option.val)">
|
||||
{{ option.key }}
|
||||
</b-dropdown-item>
|
||||
</b-nav-item-dropdown>
|
||||
</b-navbar-nav>
|
||||
|
||||
<!-- Right aligned nav items -->
|
||||
<b-navbar-nav class="ml-auto" right v-if="this.loggedIn()">
|
||||
<b-nav-item>
|
||||
Uporabnik: {{ this.$root.store.username }}
|
||||
<a href="#" v-on:click="logOut()">(odjava)</a>
|
||||
</b-nav-item>
|
||||
</b-navbar-nav>
|
||||
<b-navbar-nav class="ml-auto" right v-else>
|
||||
<b-nav-item>
|
||||
<router-link to="/register">
|
||||
Registracija
|
||||
</router-link>
|
||||
</b-nav-item>
|
||||
<b-nav-item>
|
||||
<router-link to="/login">
|
||||
Prijava
|
||||
</router-link>
|
||||
</b-nav-item>
|
||||
</b-navbar-nav>
|
||||
|
||||
</b-collapse>
|
||||
</b-navbar>
|
||||
</nav>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: "Nav",
|
||||
props: ["appState"],
|
||||
data() {return {
|
||||
search_options: [
|
||||
{key: "besede", val: "words"},
|
||||
{key: "udeleženske vloge", val: "functors"},
|
||||
],
|
||||
}},
|
||||
methods: {
|
||||
setNavSS(val) {
|
||||
this.$root.store.radio = "one"
|
||||
this.$root.store.navSS = val
|
||||
this.$router.push({
|
||||
name: "Home"
|
||||
})
|
||||
},
|
||||
loggedIn() {
|
||||
return (this.$root.store.token !== null)
|
||||
},
|
||||
logOut() {
|
||||
this.$root.store.token = null
|
||||
this.$root.store.username = null
|
||||
this.$router.push({
|
||||
name: "Home"
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
@ -0,0 +1,106 @@
|
||||
<template>
|
||||
<div>
|
||||
<div class="col-sm-2">
|
||||
<a href="#" v-on:click="this.$root.routeBack">Nazaj</a>
|
||||
</div>
|
||||
<div class="ev-login col-sm-4 offset-sm-4">
|
||||
<div class="alert alert-danger" v-if="error">
|
||||
<p>{{ error }}</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<input
|
||||
type="text"
|
||||
data-id="login.username"
|
||||
class="form-control js-login__username"
|
||||
placeholder="Uporabnik"
|
||||
v-model="credentials.username"
|
||||
>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<input
|
||||
type="email"
|
||||
class="form-control"
|
||||
placeholder="e-pošta"
|
||||
v-model="credentials.email"
|
||||
>
|
||||
</div>
|
||||
<div>
|
||||
<p>Novo geslo bo poslano na vaš e-poštni naslov.</p>
|
||||
</div>
|
||||
<button
|
||||
data-id="new_pass.submit"
|
||||
class="btn btn-primary solid blank js-login__submit"
|
||||
@click="submit()"
|
||||
>
|
||||
Novo geslo<i class="fa fa-arrow-circle-o-right"></i>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: 'NewPass',
|
||||
data () {
|
||||
return {
|
||||
credentials: {
|
||||
username: '',
|
||||
email: ''
|
||||
},
|
||||
error: ''
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
submit () {
|
||||
this.error = ""
|
||||
//this.loggingIn = true
|
||||
// Auth.login() returns a promise. A redirect will happen on success.
|
||||
// For errors, use .then() to capture the response to output
|
||||
// error_description (if exists) as shown below:
|
||||
/*
|
||||
this.$auth.login(credentials, 'dashboard').then((response) => {
|
||||
this.loggingIn = false
|
||||
this.error = utils.getError(response)
|
||||
})
|
||||
*/
|
||||
|
||||
if ( this.credentials.username === "" ||
|
||||
this.credentials.email === ""
|
||||
) {
|
||||
this.error = "Izpolnite vsa polja."
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
var data = {
|
||||
username: this.credentials.username,
|
||||
email: this.credentials.email
|
||||
}
|
||||
|
||||
var component = this
|
||||
this.$http.post(this.$root.storeGet("api_addr") + "/api/new_pass",
|
||||
data, // the data to post
|
||||
{ headers: {
|
||||
'Content-type': 'application/x-www-form-urlencoded',
|
||||
}
|
||||
})
|
||||
.then(function (response) {
|
||||
component.$root.store.api_error = null
|
||||
var confirmation = response.data.confirmation
|
||||
component.$router.push({
|
||||
name: "Home"
|
||||
})
|
||||
})
|
||||
.catch(function (err) {
|
||||
component.$root.store.api_error = err
|
||||
})
|
||||
},
|
||||
},
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.ev-login {
|
||||
margin-top: 100px;
|
||||
}
|
||||
</style>
|
@ -0,0 +1,138 @@
|
||||
<template>
|
||||
<div>
|
||||
<div class="col-sm-2">
|
||||
<a href="#" v-on:click="this.$root.routeBack">Nazaj</a>
|
||||
</div>
|
||||
<div class="ev-login col-sm-4 offset-sm-4">
|
||||
<div class="alert alert-danger" v-if="error">
|
||||
<p>{{ error }}</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<input
|
||||
type="text"
|
||||
class="form-control js-login__username"
|
||||
placeholder="Uporabnik"
|
||||
v-model="credentials.username"
|
||||
autocomplete="off"
|
||||
>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<input
|
||||
type="email"
|
||||
class="form-control"
|
||||
placeholder="e-pošta"
|
||||
v-model="credentials.email"
|
||||
autocomplete="off"
|
||||
>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<input
|
||||
type="password"
|
||||
class="form-control js-login__password "
|
||||
placeholder="Geslo"
|
||||
v-model="credentials.password"
|
||||
autocomplete="off"
|
||||
>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<input
|
||||
type="password"
|
||||
class="form-control js-login__password "
|
||||
placeholder="Ponovite geslo."
|
||||
v-model="credentials.snd_password"
|
||||
autocomplete="off"
|
||||
>
|
||||
</div>
|
||||
<button
|
||||
class="btn btn-primary solid blank js-login__submit"
|
||||
@click="submit()"
|
||||
>
|
||||
Registracija<i class="fa fa-arrow-circle-o-right"></i>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: 'Register',
|
||||
data () { return {
|
||||
credentials: {
|
||||
username: "",
|
||||
password: "",
|
||||
snd_password: "",
|
||||
email: ""
|
||||
},
|
||||
error: ""
|
||||
}},
|
||||
methods: {
|
||||
clearFields () {
|
||||
for (var key in this.credentials) {
|
||||
this.credentials[key] = ""
|
||||
}
|
||||
},
|
||||
checkEmail () {
|
||||
// check? ... todo
|
||||
return true
|
||||
},
|
||||
submit () {
|
||||
//console.log(this.credentials.password)
|
||||
//console.log(this.credentials.snd_password)
|
||||
const credentials = {
|
||||
username: this.credentials.username,
|
||||
password: this.credentials.password
|
||||
}
|
||||
|
||||
// check if fields are full
|
||||
for (var key in this.credentials) {
|
||||
if (credentials[key] === "") {
|
||||
this.error = "Izpolnite vsa polja."
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// check e-mail
|
||||
if (!this.checkEmail(this.credentials.email)) {
|
||||
this.error = "Preverite e-poštni naslov."
|
||||
return
|
||||
}
|
||||
|
||||
// check passwords
|
||||
if (this.credentials.password !== this.credentials.snd_password) {
|
||||
this.error = "Gesli se ne ujemata."
|
||||
this.credentials.password = ""
|
||||
this.credentials.snd_password = ""
|
||||
return
|
||||
}
|
||||
|
||||
var component = this
|
||||
const post_data = {
|
||||
username: this.credentials.username,
|
||||
password: this.credentials.password,
|
||||
email: this.credentials.email,
|
||||
}
|
||||
this.$http.post(this.$root.storeGet("api_addr") + "/api/register",
|
||||
post_data, // the data to post
|
||||
{ headers: {
|
||||
'Content-type': 'application/json',
|
||||
}
|
||||
})
|
||||
.then(function (response) {
|
||||
component.$router.push({
|
||||
name: "Home"
|
||||
})
|
||||
})
|
||||
.catch(function (err) {
|
||||
component.$root.store.api_error = err
|
||||
component.error = "Registracija ni uspela."
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.ev-login {
|
||||
margin-top: 100px;
|
||||
}
|
||||
</style>
|
@ -0,0 +1,48 @@
|
||||
<template>
|
||||
<div class="pb-3">
|
||||
<div v-if="sense === undefined">
|
||||
pomen ni definiran
|
||||
</div>
|
||||
<div v-else>
|
||||
<span>{{ sense.desc }}</span>
|
||||
<br>
|
||||
|
||||
<small class="text-secondary">
|
||||
- {{ sense.author }}
|
||||
{{ gen_id() }}
|
||||
</small>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
name: "Sense",
|
||||
props: ["sense"],
|
||||
methods: {
|
||||
gen_id: function() {
|
||||
var id_arr = this.sense.sense_id.split("-")
|
||||
var ret = ""
|
||||
if (this.sense.author === "SSKJ") {
|
||||
if (id_arr[1] !== "0") {
|
||||
ret += ("[" + id_arr[1] + ["] "])
|
||||
}
|
||||
if (id_arr[2] !== "0") {
|
||||
ret += ("pomen " + id_arr[2])
|
||||
if (id_arr[3] !== "0") {
|
||||
ret += ("." + id_arr[3])
|
||||
}
|
||||
}
|
||||
if (id_arr[4] === "sopo") {
|
||||
ret += " (sopomenka)"
|
||||
}
|
||||
}
|
||||
// ret = this.sense.sense_id //debugging
|
||||
if (ret.length > 0) {
|
||||
ret = ": " + ret
|
||||
}
|
||||
return ret
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
@ -0,0 +1,140 @@
|
||||
// The Vue build version to load with the `import` command
|
||||
// (runtime-only or standalone) has been set in webpack.base.conf with an alias.
|
||||
import Vue from 'vue'
|
||||
import App from './App'
|
||||
import router from './router'
|
||||
import VueCookies from "vue-cookies"
|
||||
|
||||
// bootstrap
|
||||
import BootstrapVue from "bootstrap-vue"
|
||||
import 'bootstrap/dist/css/bootstrap.css'
|
||||
import 'bootstrap-vue/dist/bootstrap-vue.css'
|
||||
|
||||
// ajax
|
||||
import axios from "axios"
|
||||
|
||||
// config
|
||||
import config_data from "../config/config.json"
|
||||
// console.log(config_data)
|
||||
|
||||
Vue.config.productionTip = false
|
||||
|
||||
// cokies
|
||||
Vue.use(VueCookies)
|
||||
|
||||
// bootstrap
|
||||
Vue.use(BootstrapVue)
|
||||
|
||||
// CORS
|
||||
// Vue.$http.headers.common['Access-Control-Allow-Origin'] = true
|
||||
|
||||
Vue.prototype.$http = axios
|
||||
|
||||
// hand-made global storage
|
||||
const store = {
|
||||
api_error: null,
|
||||
api_addr: config_data.api_addr,
|
||||
// api_addr: "http://localhost:5004", // development (webpack)
|
||||
// api_addr: "http://193.2.76.103:5004", // production
|
||||
token: null,
|
||||
username: null,
|
||||
navSS: "words",
|
||||
radio: "one",
|
||||
has_se: [], // used for appending (se) to certain verbs
|
||||
}
|
||||
|
||||
const store_methods = {
|
||||
storeSet: function(key, val) {
|
||||
store[key] = val
|
||||
},
|
||||
storeGet: function(key) {
|
||||
// returns undefined if not in dict.
|
||||
// check if (variable)
|
||||
return store[key]
|
||||
}
|
||||
}
|
||||
|
||||
const login_methods = {
|
||||
checkToken: function () {
|
||||
var tthis = this
|
||||
return new Promise(function (resolve, reject) {
|
||||
if (tthis.store.token === null) {
|
||||
tthis.store.username = null
|
||||
reject(false)
|
||||
}
|
||||
var data = {
|
||||
token: tthis.store.token,
|
||||
user: tthis.store.username
|
||||
}
|
||||
tthis.$http.post(tthis.store.api_addr + "/api/token", data,
|
||||
{ headers: {
|
||||
'Content-type': 'application/x-www-form-urlencoded',
|
||||
}}
|
||||
)
|
||||
.then(function (response) {
|
||||
tthis.store.api_error = null
|
||||
if (response.data.confirmation) {
|
||||
resolve(true)
|
||||
} else {
|
||||
tthis.store.username = null
|
||||
tthis.store.token = null
|
||||
reject(false)
|
||||
}
|
||||
})
|
||||
.catch(function (err) {
|
||||
tthis.store.api_error = err
|
||||
reject(err)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const other_methods = {
|
||||
routeBack: function() {
|
||||
this.$router.go(-1)
|
||||
},
|
||||
mkspace: function (idx, word) {
|
||||
var stopwords = [".", ",", ":", ";"]
|
||||
if (stopwords.includes(word)) return false
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
/* eslint-disable no-new */
|
||||
new Vue({
|
||||
el: '#app',
|
||||
router,
|
||||
components: { App },
|
||||
template: '<App/>',
|
||||
data() { return {
|
||||
store: store,
|
||||
}},
|
||||
methods: Object.assign(store_methods, login_methods, other_methods),
|
||||
beforeCreate: function() {
|
||||
document.title = "Vezljivostni vzorci"
|
||||
if (this.$cookies.isKey("valency_token")) {
|
||||
var cookie_token = this.$cookies.get("valency_token")
|
||||
var data = {
|
||||
token: cookie_token,
|
||||
}
|
||||
var component = this
|
||||
this.$http.post(store.api_addr + "/api/token",
|
||||
data, // the data to post
|
||||
{ headers: {
|
||||
'Content-type': 'application/x-www-form-urlencoded',
|
||||
}
|
||||
})
|
||||
.then(function (response) {
|
||||
if (response.data.confirmation) {
|
||||
store.username = response.data.username
|
||||
store.token = cookie_token
|
||||
} else {
|
||||
this.$cookies.remove("valency_token")
|
||||
}
|
||||
})
|
||||
.catch(function (err) {
|
||||
store.api_error = err
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
@ -0,0 +1,48 @@
|
||||
import Vue from 'vue'
|
||||
import Router from 'vue-router'
|
||||
import Home from "@/components/Home"
|
||||
import Login from "@/components/Login"
|
||||
import Register from "@/components/Register"
|
||||
import NewPass from "@/components/NewPass"
|
||||
import MainDispl from "@/components/MainDispl"
|
||||
import EditSenses from "@/components/EditSenses"
|
||||
|
||||
Vue.use(Router)
|
||||
|
||||
export default new Router({
|
||||
mode: "history",
|
||||
routes: [
|
||||
{
|
||||
path: '/',
|
||||
redirect: "/home"
|
||||
},
|
||||
{
|
||||
path: "/home",
|
||||
name: "Home",
|
||||
component: Home,
|
||||
children: [
|
||||
{
|
||||
path: "words/:hw",
|
||||
name: "MainDispl",
|
||||
component: MainDispl,
|
||||
props: true,
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
path: '/login',
|
||||
name: 'Login',
|
||||
component: Login
|
||||
},
|
||||
{
|
||||
path: '/register',
|
||||
name: 'Register',
|
||||
component: Register
|
||||
},
|
||||
{
|
||||
path: '/new_pass',
|
||||
name: 'NewPass',
|
||||
component: NewPass
|
||||
}
|
||||
]
|
||||
})
|
@ -0,0 +1,3 @@
|
||||
proxy: {
|
||||
'*': 'http://localhost:5004'
|
||||
}
|
Loading…
Reference in new issue