Added hevristics (time predictions for calculations)

This commit is contained in:
Luka 2019-01-07 12:55:11 +01:00
parent bb9f3f0fb9
commit 5af79e9670
19 changed files with 2232 additions and 1247 deletions

View File

@ -14,6 +14,10 @@ import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamException;
import javax.xml.stream.events.*; import javax.xml.stream.events.*;
import gui.I18N;
import javafx.beans.property.ReadOnlyDoubleProperty;
import javafx.beans.property.ReadOnlyDoubleWrapper;
import javafx.concurrent.Task;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.io.LineIterator; import org.apache.commons.io.LineIterator;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
@ -24,6 +28,21 @@ import gui.ValidationUtil;
public class XML_processing { public class XML_processing {
public final static org.apache.logging.log4j.Logger logger = LogManager.getLogger(XML_processing.class); public final static org.apache.logging.log4j.Logger logger = LogManager.getLogger(XML_processing.class);
// progress tracking functionality
private static final ReadOnlyDoubleWrapper progress = new ReadOnlyDoubleWrapper();
public static boolean isCancelled = false;
public static Date startTime = new Date();
public static boolean isCollocability = false;
public double getProgress() {
return progressProperty().get();
}
public ReadOnlyDoubleProperty progressProperty() {
return progress ;
}
// public static void processCorpus(Statistics stats) { // public static void processCorpus(Statistics stats) {
// // we can preset the list's size, so there won't be a need to resize it // // we can preset the list's size, so there won't be a need to resize it
// List<Sentence> corpus = new ArrayList<>(Settings.CORPUS_SENTENCE_LIMIT); // List<Sentence> corpus = new ArrayList<>(Settings.CORPUS_SENTENCE_LIMIT);
@ -45,17 +64,22 @@ public class XML_processing {
// } // }
// } // }
public static void readXML(String path, StatisticsNew stats) { public static boolean readXML(String path, StatisticsNew stats) {
if (stats.getCorpus().getCorpusType() == CorpusType.GIGAFIDA if (stats.getCorpus().getCorpusType() == CorpusType.GIGAFIDA
|| stats.getCorpus().getCorpusType() == CorpusType.CCKRES) { || stats.getCorpus().getCorpusType() == CorpusType.CCKRES) {
readXMLGigafida(path, stats); return readXMLGigafida(path, stats);
} else if (stats.getCorpus().getCorpusType() == CorpusType.GOS) { } else if (stats.getCorpus().getCorpusType() == CorpusType.GOS) {
readXMLGos(path, stats); return readXMLGos(path, stats);
} else if (stats.getCorpus().getCorpusType() == CorpusType.SOLAR) { } else if (stats.getCorpus().getCorpusType() == CorpusType.SOLAR) {
readXMLSolar(path, stats); return readXMLSolar(path, stats);
} else if (stats.getCorpus().getCorpusType() == CorpusType.SSJ500K) { } else if (stats.getCorpus().getCorpusType() == CorpusType.SSJ500K ||
readXMLSSJ500K(path, stats); stats.getCorpus().getCorpusType() == CorpusType.GIGAFIDA2) {
return readXMLSSJ500K(path, stats);
} else if (stats.getCorpus().getCorpusType() == CorpusType.VERT) {
return readVERT(path, stats);
} }
// task.updateProgress(fileNum, size);
return false;
} }
/** /**
@ -270,7 +294,7 @@ public class XML_processing {
// } // }
@SuppressWarnings("unused") @SuppressWarnings("unused")
public static void readXMLSolar(String path, StatisticsNew stats) { public static boolean readXMLSolar(String path, StatisticsNew stats) {
boolean in_word = false; boolean in_word = false;
boolean inPunctuation = false; boolean inPunctuation = false;
String lemma = ""; String lemma = "";
@ -284,11 +308,42 @@ public class XML_processing {
Map<String, String> headBlock = null; Map<String, String> headBlock = null;
boolean includeThisBlock = false; boolean includeThisBlock = false;
int numLines = 0;
int lineNum = 0;
progress.set(0.0);
if(!isCollocability) {
startTime = new Date();
}
// get number of lines
try {
XMLInputFactory factory = XMLInputFactory.newInstance();
XMLEventReader eventReader = factory.createXMLEventReader(new FileInputStream(path));
while (eventReader.hasNext())
{
eventReader.next();
numLines ++;
// Loop just in case the file is > Long.MAX_VALUE or skip() decides to not read the entire file
}
} catch (IOException e) {
e.printStackTrace();
} catch (XMLStreamException e) {
e.printStackTrace();
}
try { try {
XMLInputFactory factory = XMLInputFactory.newInstance(); XMLInputFactory factory = XMLInputFactory.newInstance();
XMLEventReader eventReader = factory.createXMLEventReader(new FileInputStream(path)); XMLEventReader eventReader = factory.createXMLEventReader(new FileInputStream(path));
while (eventReader.hasNext()) { while (eventReader.hasNext()) {
int percentage = (int) (lineNum * 100.0 / numLines);
if(progress.get() < percentage) {
progress.set(percentage);
}
if(isCancelled) {
return false;
}
lineNum ++;
XMLEvent event = eventReader.nextEvent(); XMLEvent event = eventReader.nextEvent();
switch (event.getEventType()) { switch (event.getEventType()) {
@ -386,6 +441,7 @@ public class XML_processing {
} catch (FileNotFoundException | XMLStreamException e) { } catch (FileNotFoundException | XMLStreamException e) {
e.printStackTrace(); e.printStackTrace();
} }
return true;
} }
/** /**
@ -446,10 +502,6 @@ public class XML_processing {
* @param corpusType * @param corpusType
*/ */
public static HashSet<String> readVertHeaderTaxonomyAndFilters(String filepath, boolean corpusIsSplit, CorpusType corpusType) { public static HashSet<String> readVertHeaderTaxonomyAndFilters(String filepath, boolean corpusIsSplit, CorpusType corpusType) {
// boolean parseTaxonomy = Tax.getCorpusTypesWithTaxonomy().contains(corpusType);
// solar
Set<String> headTags = null;
HashMap<String, HashSet<String>> resultFilters = new HashMap<>();
// taxonomy corpora // taxonomy corpora
HashSet<String> resultTaxonomy = new HashSet<>(); HashSet<String> resultTaxonomy = new HashSet<>();
@ -468,19 +520,38 @@ public class XML_processing {
// String mediumId = ""; // String mediumId = "";
// String typeId = ""; // String typeId = "";
// String proofreadId = ""; // String proofreadId = "";
boolean idsPresent = false;
for (String el : split) { for (String el : split) {
String[] attribute = el.split("=\""); String[] attribute = el.split("=\"");
if (attribute[0].equals("medium_id")) { if (attribute[0].equals("medium_id")) {
// mediumId = attribute[1]; // mediumId = attribute[1];
idsPresent = true;
resultTaxonomy.add(attribute[1]); resultTaxonomy.add(attribute[1]);
} else if (attribute[0].equals("type_id")) { } else if (attribute[0].equals("type_id")) {
// typeId = attribute[1]; // typeId = attribute[1];
idsPresent = true;
resultTaxonomy.add(attribute[1]); resultTaxonomy.add(attribute[1]);
} else if (attribute[0].equals("proofread_id")) { } else if (attribute[0].equals("proofread_id")) {
// proofreadId = attribute[1]; // proofreadId = attribute[1];
idsPresent = true;
resultTaxonomy.add(attribute[1]); resultTaxonomy.add(attribute[1]);
} }
} }
if (!idsPresent){
for (String el : split) {
String[] attribute = el.split("=\"");
if (attribute[0].equals("medium")) {
// mediumId = attribute[1];
resultTaxonomy.add(attribute[1]);
} else if (attribute[0].equals("type")) {
// typeId = attribute[1];
resultTaxonomy.add(attribute[1]);
} else if (attribute[0].equals("proofread")) {
// proofreadId = attribute[1];
resultTaxonomy.add(attribute[1]);
}
}
}
} }
} }
} finally { } finally {
@ -552,6 +623,9 @@ public class XML_processing {
.getValue() .getValue()
.replace("#", ""); .replace("#", "");
if (tax.indexOf(':') >= 0) {
tax = tax.split(":")[1];
}
resultTaxonomy.add(tax); resultTaxonomy.add(tax);
} else if (parseTaxonomy && elementName.equalsIgnoreCase("term")) { } else if (parseTaxonomy && elementName.equalsIgnoreCase("term")) {
String tax = startElement.getAttributeByName(QName.valueOf("ref")) String tax = startElement.getAttributeByName(QName.valueOf("ref"))
@ -646,7 +720,7 @@ public class XML_processing {
if (tax != null) { if (tax != null) {
// keep only taxonomy properties // keep only taxonomy properties
Taxonomy currentFiletaxonomyElement = Taxonomy.factory(String.valueOf(tax.getValue()).replace("#", "")); Taxonomy currentFiletaxonomyElement = Taxonomy.factory(String.valueOf(tax.getValue()).replace("#", ""), stats.getCorpus());
currentFiletaxonomy.add(currentFiletaxonomyElement); currentFiletaxonomy.add(currentFiletaxonomyElement);
Tax taxonomy = new Tax(); Tax taxonomy = new Tax();
// currentFiletaxonomyLong.add(taxonomy.getLongTaxonomyName(currentFiletaxonomyElement)); // currentFiletaxonomyLong.add(taxonomy.getLongTaxonomyName(currentFiletaxonomyElement));
@ -799,12 +873,43 @@ public class XML_processing {
List<Sentence> corpus = new ArrayList<>(Settings.CORPUS_SENTENCE_LIMIT); // preset the list's size, so there won't be a need to resize it List<Sentence> corpus = new ArrayList<>(Settings.CORPUS_SENTENCE_LIMIT); // preset the list's size, so there won't be a need to resize it
String sentenceDelimiter = "s"; String sentenceDelimiter = "s";
int numLines = 0;
int lineNum = 0;
progress.set(0.0);
if(!isCollocability) {
startTime = new Date();
}
// get number of lines
try {
XMLInputFactory factory = XMLInputFactory.newInstance();
XMLEventReader eventReader = factory.createXMLEventReader(new FileInputStream(path));
while (eventReader.hasNext())
{
eventReader.next();
numLines ++;
// Loop just in case the file is > Long.MAX_VALUE or skip() decides to not read the entire file
}
} catch (IOException e) {
e.printStackTrace();
} catch (XMLStreamException e) {
e.printStackTrace();
}
XMLEventReader eventReader = null; XMLEventReader eventReader = null;
try { try {
XMLInputFactory factory = XMLInputFactory.newInstance(); XMLInputFactory factory = XMLInputFactory.newInstance();
eventReader = factory.createXMLEventReader(new FileInputStream(path)); eventReader = factory.createXMLEventReader(new FileInputStream(path));
while (eventReader.hasNext()) { while (eventReader.hasNext()) {
int percentage = (int) (lineNum * 100.0 / numLines);
if(progress.get() < percentage) {
progress.set(percentage);
}
if(isCancelled) {
return false;
}
lineNum ++;
XMLEvent event = eventReader.nextEvent(); XMLEvent event = eventReader.nextEvent();
switch (event.getEventType()) { switch (event.getEventType()) {
@ -815,7 +920,8 @@ public class XML_processing {
// "word" node // "word" node
if (qName.equals("w")) { if (qName.equals("w")) {
inWord = true; inWord = true;
if (!String.valueOf(startElement.getAttributeByName(QName.valueOf("ana")).getValue()).substring(0, 4).equals("msd:")){ if (!(String.valueOf(startElement.getAttributeByName(QName.valueOf("ana")).getValue()).substring(0, 4).equals("msd:") ||
String.valueOf(startElement.getAttributeByName(QName.valueOf("ana")).getValue()).substring(0, 4).equals("mte:"))){
System.out.println("MSD written incorrectly"); System.out.println("MSD written incorrectly");
} }
msd = String.valueOf(startElement.getAttributeByName(QName.valueOf("ana")).getValue()).substring(4); msd = String.valueOf(startElement.getAttributeByName(QName.valueOf("ana")).getValue()).substring(4);
@ -834,28 +940,78 @@ public class XML_processing {
if (tax != null) { if (tax != null) {
// keep only taxonomy properties // keep only taxonomy properties
Taxonomy currentFiletaxonomyElement = Taxonomy.factory(String.valueOf(tax.getValue()).replace("#", "")); Taxonomy currentFiletaxonomyElement = Taxonomy.factory(String.valueOf(tax.getValue()).replace("#", ""), stats.getCorpus());
currentFiletaxonomy.add(currentFiletaxonomyElement); currentFiletaxonomy.add(currentFiletaxonomyElement);
// Tax taxonomy = new Tax(); // Tax taxonomy = new Tax();
// currentFiletaxonomyLong.add(taxonomy.getLongTaxonomyName(currentFiletaxonomyElement)); // currentFiletaxonomyLong.add(taxonomy.getLongTaxonomyName(currentFiletaxonomyElement));
} }
} else if (qName.equalsIgnoreCase("catRef")) {
// get value from attribute target
Attribute tax = startElement.getAttributeByName(QName.valueOf("target"));
if (tax != null && !tax.getValue().equals("dedup:nodup")) {
// keep only taxonomy properties
Taxonomy currentFiletaxonomyElement = Taxonomy.factory(String.valueOf(tax.getValue()).split(":")[1], stats.getCorpus());
currentFiletaxonomy.add(currentFiletaxonomyElement);
// Tax taxonomy = new Tax();
// currentFiletaxonomyLong.add(taxonomy.getLongTaxonomyName(currentFiletaxonomyElement));
}
// if (parseTaxonomy && elementName.equalsIgnoreCase("catRef")) {
// HashMap<String, String> atts = extractAttributes(startElement);
// String debug = "";
//
// String tax = startElement.getAttributeByName(QName.valueOf("target"))
// .getValue()
// .replace("#", "");
//
// if (tax.indexOf(':') >= 0) {
// tax = tax.split(":")[1];
// }
// resultTaxonomy.add(tax);
// } else if (parseTaxonomy && elementName.equalsIgnoreCase("term")) {
// String tax = startElement.getAttributeByName(QName.valueOf("ref"))
// .getValue()
// .replace("#", "");
//
// resultTaxonomy.add(tax);
// } else if (!parseTaxonomy && headTags.contains(elementName)) {
// String tagContent = xmlEventReader.nextEvent().asCharacters().getData();
// resultFilters.get(elementName).add(tagContent);
// }
} else if (qName.equals("bibl")) { } else if (qName.equals("bibl")) {
// before proceeding to read this file, make sure that taxonomy filters are a match // before proceeding to read this file, make sure that taxonomy filters are a match
taxonomyMatch = true; taxonomyMatch = true;
} else if (qName.equals("text")){
taxonomyMatch = true;
} }
break; break;
case XMLStreamConstants.CHARACTERS: case XMLStreamConstants.CHARACTERS:
Characters characters = event.asCharacters(); Characters characters = event.asCharacters();
// "word" node value // "word" node value
if (inWord) { if (inWord) {
String word = characters.getData(); String word = characters.getData();
// if (word.equals("Banovec")){
// System.out.println("Test");
// }
sentence.add(createWord(word, lemma, msd, word, stats.getFilter())); sentence.add(createWord(word, lemma, msd, word, stats.getFilter()));
inWord = false; inWord = false;
} }
if (stats.getFilter().getNgramValue() > 1 && stats.getFilter().getNotePunctuations() && inPunctuation && sentence.size() > 0) { if (stats.getFilter().getNgramValue() > 1 && stats.getFilter().getNotePunctuations() && inPunctuation) {
// if (stats.getFilter().getNgramValue() > 1 && stats.getFilter().getNotePunctuations() && inPunctuation && sentence.size() > 0) {
String punctuation = characters.getData(); String punctuation = characters.getData();
sentence.add(createWord(punctuation, punctuation, "/", punctuation, stats.getFilter())); sentence.add(createWord(punctuation, punctuation, "/", punctuation, stats.getFilter()));
inPunctuation = false; inPunctuation = false;
@ -906,7 +1062,8 @@ public class XML_processing {
} }
} }
// fallback // fallback
else if (endElement.getName().getLocalPart().equalsIgnoreCase("div")) { else if (endElement.getName().getLocalPart().equalsIgnoreCase("div") &&
stats.getCorpus().getCorpusType() == CorpusType.SSJ500K) {
// join corpus and stats // join corpus and stats
fj(corpus, stats); fj(corpus, stats);
corpus.clear(); corpus.clear();
@ -926,11 +1083,22 @@ public class XML_processing {
// System.out.println("TEST"); // System.out.println("TEST");
} }
} }
} else if (endElement.getName().getLocalPart().equals("text")){
taxonomyMatch = false;
} }
break; break;
} }
} }
if (corpus.size() > 0) {
fj(corpus, stats);
// empty the current corpus, since we don't need the data anymore
corpus.clear();
// TODO: if (stats.isUseDB()) {
// stats.storeTmpResultsToDB();
// }
}
} catch (FileNotFoundException | XMLStreamException e) { } catch (FileNotFoundException | XMLStreamException e) {
e.printStackTrace(); e.printStackTrace();
} finally { } finally {
@ -967,10 +1135,35 @@ public class XML_processing {
String gosType = stats.getFilter().hasMsd() ? "norm" : "orth"; // orth & norm String gosType = stats.getFilter().hasMsd() ? "norm" : "orth"; // orth & norm
int numLines = 0;
int lineNum = 0;
progress.set(0.0);
if(!isCollocability) {
startTime = new Date();
}
// get number of lines
try {
XMLInputFactory factory = XMLInputFactory.newInstance();
XMLEventReader eventReader = factory.createXMLEventReader(new FileInputStream(path));
while (eventReader.hasNext())
{
eventReader.next();
numLines ++;
// Loop just in case the file is > Long.MAX_VALUE or skip() decides to not read the entire file
}
} catch (IOException e) {
e.printStackTrace();
} catch (XMLStreamException e) {
e.printStackTrace();
}
XMLEventReader eventReader = null; XMLEventReader eventReader = null;
boolean includeFile = true; boolean includeFile = true;
try { try {
XMLInputFactory factory = XMLInputFactory.newInstance(); XMLInputFactory factory = XMLInputFactory.newInstance();
eventReader = factory.createXMLEventReader(new FileInputStream(path)); eventReader = factory.createXMLEventReader(new FileInputStream(path));
@ -978,6 +1171,14 @@ public class XML_processing {
// created hashmap to combine words with normalized words // created hashmap to combine words with normalized words
while (eventReader.hasNext()) { while (eventReader.hasNext()) {
int percentage = (int) (lineNum * 100.0 / numLines);
if(progress.get() < percentage) {
progress.set(percentage);
}
if(isCancelled) {
return false;
}
lineNum ++;
XMLEvent event = eventReader.nextEvent(); XMLEvent event = eventReader.nextEvent();
// System.out.print(String.format("%s", event.toString().replaceAll("\\['http://www.tei-c.org/ns/1.0'\\]::", ""))); // System.out.print(String.format("%s", event.toString().replaceAll("\\['http://www.tei-c.org/ns/1.0'\\]::", "")));
@ -1028,7 +1229,7 @@ public class XML_processing {
if (tax != null) { if (tax != null) {
// keep only taxonomy properties // keep only taxonomy properties
Taxonomy currentFiletaxonomyElement = Taxonomy.factory(String.valueOf(tax.getValue())); Taxonomy currentFiletaxonomyElement = Taxonomy.factory(String.valueOf(tax.getValue()), stats.getCorpus());
currentFiletaxonomy.add(currentFiletaxonomyElement); currentFiletaxonomy.add(currentFiletaxonomyElement);
// Tax taxonomy = new Tax(); // Tax taxonomy = new Tax();
// currentFiletaxonomyLong.add(taxonomy.getLongTaxonomyName(currentFiletaxonomyElement)); // currentFiletaxonomyLong.add(taxonomy.getLongTaxonomyName(currentFiletaxonomyElement));
@ -1200,6 +1401,239 @@ public class XML_processing {
return true; return true;
} }
@SuppressWarnings("Duplicates")
public static boolean readVERT(String path, StatisticsNew stats) {
// taxonomy corpora
// HashSet<String> resultTaxonomy = new HashSet<>();
// regi path
String regiPath = path.substring(0, path.length()-4) + "regi";
LineIterator regiIt;
int wordIndex = -1;
int lemmaIndex = -1;
int msdIndex = -1;
boolean slovene = false;
try {
// read regi file
regiIt = FileUtils.lineIterator(new File(regiPath), "UTF-8");
try {
boolean insideHeader = false;
int attributeIndex = 0;
while (regiIt.hasNext()) {
String line = regiIt.nextLine();
if (line.length() >= 9 && line.substring(0, 9).equals("ATTRIBUTE")) {
// split over "\" "
String[] split = line.split(" ");
if (split[1].equals("word") && wordIndex == -1){
wordIndex = attributeIndex;
} else if (split[1].equals("lempos") && lemmaIndex == -1){
lemmaIndex = attributeIndex;
} else if (split[1].equals("tag") && msdIndex == -1){
msdIndex = attributeIndex;
}
attributeIndex ++;
if (wordIndex >= 0 && lemmaIndex >= 0 && msdIndex >= 0){
break;
}
} else if (line.length() >= 8 && line.substring(0, 8).equals("LANGUAGE")) {
String[] split = line.split(" ");
if (split[1].equals("\"Slovenian\"")){
slovene = true;
}
}
}
} finally {
LineIterator.closeQuietly(regiIt);
}
} catch (IOException e) {
e.printStackTrace();
}
int numLines = 0;
// get number of lines
try (FileReader input = new FileReader(path);
LineNumberReader count = new LineNumberReader(input)
)
{
while (count.skip(Long.MAX_VALUE) > 0)
{
// Loop just in case the file is > Long.MAX_VALUE or skip() decides to not read the entire file
}
numLines = count.getLineNumber() + 1; // +1 because line index starts at 0
} catch (IOException e) {
e.printStackTrace();
}
LineIterator it;
ArrayList<Taxonomy> currentFiletaxonomy = new ArrayList<>();
boolean inParagraph = false;
boolean inSentence = false;
boolean taxonomyMatch = true;
int lineNum = 0;
int numSentences = 0;
int numSentencesLimit = 1000;
List<Word> sentence = new ArrayList<>();
List<Sentence> corpus = new ArrayList<>(Settings.CORPUS_SENTENCE_LIMIT);
progress.set(0.0);
if(!isCollocability) {
startTime = new Date();
}
try {
it = FileUtils.lineIterator(new File(path), "UTF-8");
try {
boolean insideHeader = false;
while (it.hasNext()) {
int percentage = (int) (lineNum * 100.0 / numLines);
if(progress.get() < percentage) {
progress.set(percentage);
}
if(isCancelled) {
return false;
}
lineNum ++;
String line = it.nextLine();
// beginning tags
// taxonomy
if (line.length() > 4 && line.substring(1, 5).equals("text")) {
String[] split = line.split("\" ");
currentFiletaxonomy = new ArrayList<>();
boolean medium = false;
boolean type = false;
boolean proofread = false;
for (String el : split) {
String[] attribute = el.split("=\"");
boolean idsPresent = false;
if (attribute[0].equals("medium_id") && !attribute[1].equals("-")) {
Taxonomy currentFiletaxonomyElement = Taxonomy.factory(attribute[1], stats.getCorpus());
currentFiletaxonomy.add(currentFiletaxonomyElement);
medium = true;
} else if (attribute[0].equals("type_id") && !attribute[1].equals("-")) {
Taxonomy currentFiletaxonomyElement = Taxonomy.factory(attribute[1], stats.getCorpus());
currentFiletaxonomy.add(currentFiletaxonomyElement);
type = true;
} else if (attribute[0].equals("proofread_id") && !attribute[1].equals("-")) {
Taxonomy currentFiletaxonomyElement = Taxonomy.factory(attribute[1], stats.getCorpus());
currentFiletaxonomy.add(currentFiletaxonomyElement);
proofread = true;
}
if (attribute[0].equals("medium") && !attribute[1].equals("-") && !medium) {
Taxonomy currentFiletaxonomyElement = Taxonomy.factory(attribute[1], stats.getCorpus());
currentFiletaxonomy.add(currentFiletaxonomyElement);
} else if (attribute[0].equals("type") && !attribute[1].equals("-") && !type) {
Taxonomy currentFiletaxonomyElement = Taxonomy.factory(attribute[1], stats.getCorpus());
currentFiletaxonomy.add(currentFiletaxonomyElement);
} else if (attribute[0].equals("proofread") && !attribute[1].equals("-") && !attribute[1].equals("-\">") && !proofread) {
Taxonomy currentFiletaxonomyElement = Taxonomy.factory(attribute[1], stats.getCorpus());
currentFiletaxonomy.add(currentFiletaxonomyElement);
}
}
taxonomyMatch = true;
if (!ValidationUtil.isEmpty(stats.getFilter().getTaxonomy())) {
currentFiletaxonomy.retainAll(stats.getFilter().getTaxonomy()); // intersection
if (currentFiletaxonomy.isEmpty()) {
// taxonomies don't match so don't save
taxonomyMatch = false;
}
}
}
// else if((line.length() >= 3 && line.substring(0, 2).equals("<p") && line.substring(line.length() - 1, line.length()).equals(">")) ||
// (line.length() >= 3 && line.substring(0, 3).equals("<ab") && line.substring(line.length() - 1, line.length()).equals(">"))){
// inParagraph = true;
// } else if((line.length() == 4 && line.equals("</p>")) || (line.length() == 5 && line.equals("</ab>"))){
// inParagraph = false;
// }
else if(line.length() >= 3 && line.substring(0, 2).equals("<s") && line.substring(line.length() - 1, line.length()).equals(">")){
inSentence = true;
} else if(line.length() == 4 && line.equals("</s>")){
inSentence = false;
if (stats.getFilter().getNgramValue() == 0){
int numSentenceParts = 0;
for(Word w : sentence){
int v = w.getW1().length() - (stats.getFilter().getStringLength() - 1);
numSentenceParts = (v >= 0) ? (numSentenceParts + v) : numSentenceParts;
}
stats.updateUniGramOccurrences(numSentenceParts, currentFiletaxonomy);
} else if(stats.getFilter().getNgramValue() >= 1) {
stats.updateUniGramOccurrences(sentence.size(), currentFiletaxonomy);
}
sentence = runFilters(sentence, stats.getFilter());
if (!ValidationUtil.isEmpty(sentence) && taxonomyMatch) {
corpus.add(new Sentence(sentence, currentFiletaxonomy));
}
if (numSentences == numSentencesLimit) {
fj(corpus, stats);
corpus.clear();
numSentences = 0;
} else {
numSentences ++;
}
// and start a new one
sentence = new ArrayList<>();
// corpus.add(new Sentence(sentence, currentFiletaxonomy));
} else if(!(line.charAt(0) == '<' && line.charAt(line.length() - 1) == '>') && inSentence){
// } else if(!(line.charAt(0) == '<' && line.charAt(line.length() - 1) == '>') && inSentence && inParagraph){
String[] split = line.split("\t");
if(slovene) {
if (split[lemmaIndex].length() > 2 && split[lemmaIndex].charAt(split[lemmaIndex].length() - 2) == '-' && Character.isAlphabetic(split[lemmaIndex].charAt(split[lemmaIndex].length() - 1)) &&
!split[lemmaIndex].substring(split[lemmaIndex].length() - 2, split[lemmaIndex].length()).equals("-u")) {
Word word = createWord(split[wordIndex], split[lemmaIndex].substring(0, split[lemmaIndex].length() - 2), split[msdIndex], split[wordIndex], stats.getFilter());
sentence.add(word);
} else if (stats.getFilter().getNotePunctuations() && (split[lemmaIndex].length() <= 2 || (split[lemmaIndex].charAt(split[lemmaIndex].length() - 2) != '-' && !Character.isAlphabetic(split[lemmaIndex].charAt(split[lemmaIndex].length() - 1))))) {
Word word = createWord(split[wordIndex], split[lemmaIndex], split[msdIndex], split[wordIndex], stats.getFilter());
sentence.add(word);
} else if (split[lemmaIndex].length() > 2 && !split[lemmaIndex].substring(split[lemmaIndex].length() - 2, split[lemmaIndex].length()).equals("-u") ||
stats.getFilter().getNotePunctuations()) {
Word word = createWord(split[wordIndex], split[lemmaIndex].substring(0, split[lemmaIndex].length() - 2), split[msdIndex], split[wordIndex], stats.getFilter());
sentence.add(word);
}
} else {
if (split[lemmaIndex].length() > 2 && split[lemmaIndex].charAt(split[lemmaIndex].length() - 2) == '-' && Character.isAlphabetic(split[lemmaIndex].charAt(split[lemmaIndex].length() - 1)) &&
!split[lemmaIndex].substring(split[lemmaIndex].length() - 2, split[lemmaIndex].length()).equals("-z")) {
Word word = createWord(split[wordIndex], split[lemmaIndex].substring(0, split[lemmaIndex].length() - 2), split[msdIndex], split[wordIndex], stats.getFilter());
sentence.add(word);
} else if (stats.getFilter().getNotePunctuations() && (split[lemmaIndex].length() <= 2 || (split[lemmaIndex].charAt(split[lemmaIndex].length() - 2) != '-' && !Character.isAlphabetic(split[lemmaIndex].charAt(split[lemmaIndex].length() - 1))))) {
Word word = createWord(split[wordIndex], split[lemmaIndex], split[msdIndex], split[wordIndex], stats.getFilter());
sentence.add(word);
} else if (split[lemmaIndex].length() > 2 && !split[lemmaIndex].substring(split[lemmaIndex].length() - 2, split[lemmaIndex].length()).equals("-z") ||
stats.getFilter().getNotePunctuations()) {
Word word = createWord(split[wordIndex], split[lemmaIndex].substring(0, split[lemmaIndex].length() - 2), split[msdIndex], split[wordIndex], stats.getFilter());
sentence.add(word);
}
}
}
}
if (corpus.size() > 0) {
fj(corpus, stats);
corpus.clear();
}
} finally {
LineIterator.closeQuietly(it);
}
} catch (IOException e) {
e.printStackTrace();
}
// resultTaxonomy.remove("-");
return true;
}
/** /**
* Runs the sentence through some filters, so we don't do calculations when unnecessary. * Runs the sentence through some filters, so we don't do calculations when unnecessary.
* Filters: * Filters:

View File

@ -74,7 +74,7 @@
// // public static void calculateForAll(List<Sentence> corpus, Statistics stats, String taxonomy) { // // public static void calculateForAll(List<Sentence> corpus, Statistics stats, String taxonomy) {
// // for (Sentence s : corpus) { // // for (Sentence s : corpus) {
// // // disregard if wrong taxonomy // // // disregard if wrong taxonomy
// // if (!(s.getTaxonomy().startsWith(taxonomy))) { // // if (!(s.getObservableListTaxonomy().startsWith(taxonomy))) {
// // continue; // // continue;
// // } // // }
// // // //
@ -122,7 +122,7 @@
// static void calculateForAll(List<Sentence> corpus, Statistics stats, String taxonomy) { // static void calculateForAll(List<Sentence> corpus, Statistics stats, String taxonomy) {
// for (Sentence s : corpus) { // for (Sentence s : corpus) {
// // disregard if wrong taxonomy // // disregard if wrong taxonomy
//// if (taxonomy != null && !(s.getTaxonomy().startsWith(taxonomy))) { //// if (taxonomy != null && !(s.getObservableListTaxonomy().startsWith(taxonomy))) {
//// continue; //// continue;
//// } //// }
// //

View File

@ -432,7 +432,7 @@ public class Ngrams {
// String key = wordToString(skipgramCandidate, stats.getFilter().getCalculateFor()); // String key = wordToString(skipgramCandidate, stats.getFilter().getCalculateFor());
// key = (key.charAt(key.length()-1) == ',') ? key.substring(0, key.length() - 1) : key; // key = (key.charAt(key.length()-1) == ',') ? key.substring(0, key.length() - 1) : key;
// stats.updateTaxonomyResults(new MultipleHMKeys1(key), // stats.updateTaxonomyResults(new MultipleHMKeys1(key),
// stats.getCorpus().getTaxonomy()); // stats.getCorpus().getObservableListTaxonomy());
ArrayList<CalculateFor> otherKeys = stats.getFilter().getMultipleKeys(); ArrayList<CalculateFor> otherKeys = stats.getFilter().getMultipleKeys();

View File

@ -91,7 +91,7 @@ import data.Word;
// private static void calculateForTaxonomyAndJosType(List<Sentence> corpus, Statistics stats) { // private static void calculateForTaxonomyAndJosType(List<Sentence> corpus, Statistics stats) {
// for (Sentence s : corpus) { // for (Sentence s : corpus) {
// if (s.getTaxonomy().equalsIgnoreCase(stats.getDistributionTaxonomy())) { // if (s.getObservableListTaxonomy().equalsIgnoreCase(stats.getDistributionTaxonomy())) {
// List<String> sentence = new ArrayList<>(s.getWords().size()); // List<String> sentence = new ArrayList<>(s.getWords().size());
// List<Word> filteredWords = new ArrayList<>(); // List<Word> filteredWords = new ArrayList<>();
// //
@ -122,7 +122,7 @@ import data.Word;
// private static void calculateForTaxonomy(List<Sentence> corpus, Statistics stats) { // private static void calculateForTaxonomy(List<Sentence> corpus, Statistics stats) {
// for (Sentence s : corpus) { // for (Sentence s : corpus) {
// if (s.getTaxonomy().equalsIgnoreCase(stats.getDistributionTaxonomy())) { // if (s.getObservableListTaxonomy().equalsIgnoreCase(stats.getDistributionTaxonomy())) {
// List<String> sentence = new ArrayList<>(s.getWords().size()); // List<String> sentence = new ArrayList<>(s.getWords().size());
// //
// if (stats.getCf() == CalculateFor.LEMMA) { // if (stats.getCf() == CalculateFor.LEMMA) {

View File

@ -27,7 +27,8 @@ public class Corpus {
private File chosenCorpusLocation; private File chosenCorpusLocation;
private Collection<File> detectedCorpusFiles; private Collection<File> detectedCorpusFiles;
boolean headerRead; boolean headerRead;
private ObservableList<String> taxonomy; // if gigafida or gos private ArrayList<Taxonomy> taxonomy; // if gigafida or gos
private Taxonomy taxonomyTotal;
private HashMap<String, ObservableList<String>> solarFilters; // if solar private HashMap<String, ObservableList<String>> solarFilters; // if solar
private HashMap<String, HashSet<String>> solarFiltersForXML; // if solar - used while parsing xml private HashMap<String, HashSet<String>> solarFiltersForXML; // if solar - used while parsing xml
private boolean gosOrthMode; private boolean gosOrthMode;
@ -36,6 +37,7 @@ public class Corpus {
public Corpus() { public Corpus() {
validationErrors = new ArrayList<>(); validationErrors = new ArrayList<>();
setTotal();
} }
public CorpusType getCorpusType() { public CorpusType getCorpusType() {
@ -82,9 +84,25 @@ public class Corpus {
this.headerRead = headerRead; this.headerRead = headerRead;
} }
public ObservableList<String> getTaxonomy() { public Taxonomy getTotal() {
return taxonomyTotal;
}
public void setTotal() {
taxonomyTotal = new Taxonomy("Total", false);
}
public ArrayList<Taxonomy> getTaxonomy() {
return taxonomy; return taxonomy;
} }
public ObservableList<String> getObservableListTaxonomy() {
ArrayList<String> al = new ArrayList<>();
for (Taxonomy t : this.taxonomy){
al.add(t.toLongNameString());
}
return FXCollections.observableArrayList(al);
}
// //
// public ObservableList<String> getFormattedTaxonomy() { // public ObservableList<String> getFormattedTaxonomy() {
// ArrayList<String> al = Tax.getTaxonomyFormatted(new ArrayList<>(taxonomy), corpusType); // ArrayList<String> al = Tax.getTaxonomyFormatted(new ArrayList<>(taxonomy), corpusType);
@ -92,7 +110,10 @@ public class Corpus {
// } // }
public void setTaxonomy(ObservableList<String> taxonomy) { public void setTaxonomy(ObservableList<String> taxonomy) {
this.taxonomy = taxonomy; this.taxonomy = new ArrayList<>();
for(String t : taxonomy){
this.taxonomy.add(new Taxonomy(t, true));
}
logger.info("Corpus.set: ", taxonomy); logger.info("Corpus.set: ", taxonomy);
} }
@ -151,7 +172,8 @@ public class Corpus {
if (!headerRead && corpusType != null) { if (!headerRead && corpusType != null) {
// if user didn't opt into reading the headers, set default taxonomy or solar filters // if user didn't opt into reading the headers, set default taxonomy or solar filters
if (Tax.getCorpusTypesWithTaxonomy().contains(corpusType)) { if (Tax.getCorpusTypesWithTaxonomy().contains(corpusType)) {
taxonomy = Tax.getTaxonomyForComboBox(corpusType); Tax.getTaxonomyForComboBox(corpusType);
setTaxonomy(Tax.getTaxonomyForComboBox(corpusType));
} else if (corpusType == CorpusType.SOLAR && solarFilters == null) { } else if (corpusType == CorpusType.SOLAR && solarFilters == null) {
setSolarFilters(SolarFilters.getFiltersForComboBoxes()); setSolarFilters(SolarFilters.getFiltersForComboBoxes());
} }

View File

@ -2,6 +2,7 @@ package data;
public enum CorpusType { public enum CorpusType {
GIGAFIDA("Gigafida", "gigafida"), GIGAFIDA("Gigafida", "gigafida"),
GIGAFIDA2("Gigafida2.0", "gigafida2.0"),
CCKRES("ccKres ", "cckres"), CCKRES("ccKres ", "cckres"),
SOLAR("Šolar", "šolar"), SOLAR("Šolar", "šolar"),
GOS("GOS", "gos"), GOS("GOS", "gos"),

View File

@ -10,7 +10,6 @@ import java.util.*;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors;
import gui.I18N; import gui.I18N;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -51,17 +50,17 @@ public class StatisticsNew {
this.corpus = corpus; this.corpus = corpus;
this.filter = filter; this.filter = filter;
this.taxonomyResult = new ConcurrentHashMap<>(); this.taxonomyResult = new ConcurrentHashMap<>();
this.taxonomyResult.put(Taxonomy.TOTAL, new ConcurrentHashMap<>()); this.taxonomyResult.put(corpus.getTotal(), new ConcurrentHashMap<>());
this.collocability = new ConcurrentHashMap<>(); this.collocability = new ConcurrentHashMap<>();
this.uniGramTaxonomyOccurrences = new ConcurrentHashMap<>(); this.uniGramTaxonomyOccurrences = new ConcurrentHashMap<>();
this.uniGramTaxonomyOccurrences.put(Taxonomy.TOTAL, new AtomicLong(0L)); this.uniGramTaxonomyOccurrences.put(corpus.getTotal(), new AtomicLong(0L));
// create table for counting word occurrences per taxonomies // create table for counting word occurrences per taxonomies
if (this.corpus.getTaxonomy() != null && filter.getDisplayTaxonomy()) { if (this.corpus.getObservableListTaxonomy() != null && filter.getDisplayTaxonomy()) {
if (this.filter.getTaxonomy().isEmpty()) { if (this.filter.getTaxonomy().isEmpty()) {
for (int i = 0; i < this.corpus.getTaxonomy().size(); i++) { for (int i = 0; i < this.corpus.getObservableListTaxonomy().size(); i++) {
this.taxonomyResult.put(Taxonomy.factoryLongName(this.corpus.getTaxonomy().get(i)), new ConcurrentHashMap<>()); this.taxonomyResult.put(Taxonomy.factoryLongName(this.corpus.getObservableListTaxonomy().get(i), corpus), new ConcurrentHashMap<>());
} }
} else { } else {
for (int i = 0; i < this.filter.getTaxonomy().size(); i++) { for (int i = 0; i < this.filter.getTaxonomy().size(); i++) {
@ -234,14 +233,14 @@ public class StatisticsNew {
removeMinimalTaxonomy(taxonomyResult, filter.getMinimalTaxonomy()); removeMinimalTaxonomy(taxonomyResult, filter.getMinimalTaxonomy());
// if no results and nothing to save, return false // if no results and nothing to save, return false
if (!(taxonomyResult.get(Taxonomy.TOTAL).size() > 0)) { if (!(taxonomyResult.get(corpus.getTotal()).size() > 0)) {
analysisProducedResults = false; analysisProducedResults = false;
return false; return false;
} else { } else {
analysisProducedResults = true; analysisProducedResults = true;
} }
stats.add(ImmutablePair.of(resultTitle, getSortedResult(taxonomyResult.get(Taxonomy.TOTAL), Util.getValidInt(limit)))); stats.add(ImmutablePair.of(resultTitle, getSortedResult(taxonomyResult.get(corpus.getTotal()), Util.getValidInt(limit))));
Export.SetToCSV(stats, corpus.getChosenResultsLocation(), headerInfoBlock(), this, filter); Export.SetToCSV(stats, corpus.getChosenResultsLocation(), headerInfoBlock(), this, filter);
return true; return true;
} }
@ -253,14 +252,14 @@ public class StatisticsNew {
if (minimalTaxonomy == 1) if (minimalTaxonomy == 1)
return; return;
int occurances; int occurances;
for (MultipleHMKeys key : taxonomyResult.get(Taxonomy.TOTAL).keySet()){ for (MultipleHMKeys key : taxonomyResult.get(corpus.getTotal()).keySet()){
occurances = 0; occurances = 0;
for (Taxonomy columnNameKey : taxonomyResult.keySet()){ for (Taxonomy columnNameKey : taxonomyResult.keySet()){
if(!columnNameKey.equals(Taxonomy.TOTAL) && taxonomyResult.get(columnNameKey).get(key).intValue() >= 1) if(!columnNameKey.equals(corpus.getTotal()) && taxonomyResult.get(columnNameKey).get(key).intValue() >= 1)
occurances++; occurances++;
} }
if(occurances < minimalTaxonomy){ if(occurances < minimalTaxonomy){
taxonomyResult.get(Taxonomy.TOTAL).remove(key); taxonomyResult.get(corpus.getTotal()).remove(key);
} }
} }
} }
@ -271,8 +270,8 @@ public class StatisticsNew {
private void removeMinimalOccurrences(Integer minimalOccurrences) { private void removeMinimalOccurrences(Integer minimalOccurrences) {
if (minimalOccurrences == 0) if (minimalOccurrences == 0)
return; return;
for (MultipleHMKeys key : taxonomyResult.get(Taxonomy.TOTAL).keySet()){ for (MultipleHMKeys key : taxonomyResult.get(corpus.getTotal()).keySet()){
if(taxonomyResult.get(Taxonomy.TOTAL).get(key).intValue() < minimalOccurrences){ if(taxonomyResult.get(corpus.getTotal()).get(key).intValue() < minimalOccurrences){
for (Taxonomy t : taxonomyResult.keySet()){ for (Taxonomy t : taxonomyResult.keySet()){
taxonomyResult.get(t).remove(key); taxonomyResult.get(t).remove(key);
} }
@ -349,7 +348,7 @@ public class StatisticsNew {
} }
public void updateUniGramOccurrences(int amount, ArrayList<Taxonomy> taxonomy){ public void updateUniGramOccurrences(int amount, ArrayList<Taxonomy> taxonomy){
uniGramTaxonomyOccurrences.get(Taxonomy.TOTAL).set(uniGramTaxonomyOccurrences.get(Taxonomy.TOTAL).longValue() + amount); uniGramTaxonomyOccurrences.get(corpus.getTotal()).set(uniGramTaxonomyOccurrences.get(corpus.getTotal()).longValue() + amount);
for (Taxonomy t : taxonomy){ for (Taxonomy t : taxonomy){
if (uniGramTaxonomyOccurrences.get(t) != null){ if (uniGramTaxonomyOccurrences.get(t) != null){
uniGramTaxonomyOccurrences.get(t).set(uniGramTaxonomyOccurrences.get(t).longValue() + amount); uniGramTaxonomyOccurrences.get(t).set(uniGramTaxonomyOccurrences.get(t).longValue() + amount);
@ -360,15 +359,15 @@ public class StatisticsNew {
} }
public Map<Taxonomy, AtomicLong> getUniGramOccurrences(){ public Map<Taxonomy, AtomicLong> getUniGramOccurrences(){
// return uniGramTaxonomyOccurrences.get(Taxonomy.TOTAL).longValue(); // return uniGramTaxonomyOccurrences.get(corpus.getTotal()).longValue();
return uniGramTaxonomyOccurrences; return uniGramTaxonomyOccurrences;
} }
public void updateTaxonomyResults(MultipleHMKeys o, List<Taxonomy> taxonomy) { public void updateTaxonomyResults(MultipleHMKeys o, List<Taxonomy> taxonomy) {
for (Taxonomy key : taxonomyResult.keySet()) { for (Taxonomy key : taxonomyResult.keySet()) {
// first word should have the same taxonomy as others // first word should have the same taxonomy as others
if (key.equals(Taxonomy.TOTAL) || taxonomy.contains(key)) { if (key.equals(corpus.getTotal()) || taxonomy.contains(key)) {
// if (key.equals(Taxonomy.TOTAL) || taxonomy != null && taxonomy.contains(key)) { // if (key.equals(corpus.getTotal()) || taxonomy != null && taxonomy.contains(key)) {
// if taxonomy not in map and in this word // if taxonomy not in map and in this word
AtomicLong r = taxonomyResult.get(key).putIfAbsent(o, new AtomicLong(1)); AtomicLong r = taxonomyResult.get(key).putIfAbsent(o, new AtomicLong(1));
@ -607,7 +606,7 @@ public class StatisticsNew {
// sortedTaxonomyString.add(t); // sortedTaxonomyString.add(t);
// } // }
// getTaxonomyForTaxonomyResult // getTaxonomyForTaxonomyResult
tax = Tax.getTaxonomyForTaxonomyResult(corpus.getCorpusType(), taxonomyResult.keySet()); tax = Tax.getTaxonomyForTaxonomyResult(corpus, taxonomyResult.keySet());
} }
// String sep = ""; // String sep = "";
@ -618,11 +617,11 @@ public class StatisticsNew {
} }
// info.put(sep = sep + " ", s); // info.put(sep = sep + " ", s);
if (uniGramTaxonomyOccurrences.get(Taxonomy.factoryLongName(s)) == null) { if (uniGramTaxonomyOccurrences.get(Taxonomy.factoryLongName(s, corpus)) == null) {
info.put(s, ""); info.put(s, "");
continue; continue;
} }
int n = uniGramTaxonomyOccurrences.get(Taxonomy.factoryLongName(s)).intValue(); int n = uniGramTaxonomyOccurrences.get(Taxonomy.factoryLongName(s, corpus)).intValue();
if (n == 0) { if (n == 0) {
info.put(s, ""); info.put(s, "");
} else { } else {
@ -662,11 +661,11 @@ public class StatisticsNew {
// count number of all words // count number of all words
long N = 0; long N = 0;
for(AtomicLong a : oneWordTaxonomyResult.get(Taxonomy.TOTAL).values()){ for(AtomicLong a : oneWordTaxonomyResult.get(corpus.getTotal()).values()){
N += a.longValue(); N += a.longValue();
} }
for(MultipleHMKeys hmKey : taxonomyResult.get(Taxonomy.TOTAL).keySet()) { for(MultipleHMKeys hmKey : taxonomyResult.get(corpus.getTotal()).keySet()) {
// String[] splitedString = hmKey.getK1().split("\\s+"); // String[] splitedString = hmKey.getK1().split("\\s+");
long sum_fwi =0L; long sum_fwi =0L;
@ -674,15 +673,15 @@ public class StatisticsNew {
for(MultipleHMKeys smallHmKey : hmKey.getSplittedMultipleHMKeys()){ for(MultipleHMKeys smallHmKey : hmKey.getSplittedMultipleHMKeys()){
// System.out.println(smallHmKey.getK1()); // System.out.println(smallHmKey.getK1());
sum_fwi += oneWordTaxonomyResult.get(Taxonomy.TOTAL).get(smallHmKey).longValue(); sum_fwi += oneWordTaxonomyResult.get(corpus.getTotal()).get(smallHmKey).longValue();
mul_fwi *= oneWordTaxonomyResult.get(Taxonomy.TOTAL).get(smallHmKey).longValue(); mul_fwi *= oneWordTaxonomyResult.get(corpus.getTotal()).get(smallHmKey).longValue();
} }
// String t = hmKey.getK1(); // String t = hmKey.getK1();
// if(hmKey.getK1().equals("v Slovenija")){ // if(hmKey.getK1().equals("v Slovenija")){
// System.out.println("TEST"); // System.out.println("TEST");
// //
// } // }
double O = (double)taxonomyResult.get(Taxonomy.TOTAL).get(hmKey).longValue(); double O = (double)taxonomyResult.get(corpus.getTotal()).get(hmKey).longValue();
double n = (double)filter.getNgramValue(); double n = (double)filter.getNgramValue();
double E = (double)mul_fwi / Math.pow(N, n - 1); double E = (double)mul_fwi / Math.pow(N, n - 1);
if (collocabilityMap.keySet().contains(Collocability.DICE)){ if (collocabilityMap.keySet().contains(Collocability.DICE)){

View File

@ -10,7 +10,7 @@ import javafx.collections.ObservableList;
public class Tax { public class Tax {
private static LinkedHashMap<String, String> GIGAFIDA_TAXONOMY; private static LinkedHashMap<String, String> GIGAFIDA_TAXONOMY;
private static LinkedHashMap<String, String> GOS_TAXONOMY; private static LinkedHashMap<String, String> GOS_TAXONOMY;
private static final HashSet<CorpusType> corpusTypesWithTaxonomy = new HashSet<>(Arrays.asList(CorpusType.GIGAFIDA, CorpusType.GOS, CorpusType.CCKRES, CorpusType.SSJ500K, CorpusType.VERT)); private static final HashSet<CorpusType> corpusTypesWithTaxonomy = new HashSet<>(Arrays.asList(CorpusType.GIGAFIDA, CorpusType.GOS, CorpusType.CCKRES, CorpusType.SSJ500K, CorpusType.GIGAFIDA2, CorpusType.VERT));
static { static {
// GIGAFIDA ---------------------------- // GIGAFIDA ----------------------------
@ -104,7 +104,7 @@ public class Tax {
public static ObservableList<String> getTaxonomyForComboBox(CorpusType corpusType, HashSet<String> foundTax) { public static ObservableList<String> getTaxonomyForComboBox(CorpusType corpusType, HashSet<String> foundTax) {
LinkedHashMap<String, String> tax = new LinkedHashMap<>(); LinkedHashMap<String, String> tax = new LinkedHashMap<>();
if (corpusType == CorpusType.GIGAFIDA || corpusType == CorpusType.CCKRES || corpusType == CorpusType.SSJ500K) { if (corpusType == CorpusType.GIGAFIDA || corpusType == CorpusType.CCKRES || corpusType == CorpusType.SSJ500K || corpusType == CorpusType.GIGAFIDA2) {
tax = GIGAFIDA_TAXONOMY; tax = GIGAFIDA_TAXONOMY;
} else if (corpusType == CorpusType.GOS) { } else if (corpusType == CorpusType.GOS) {
tax = GOS_TAXONOMY; tax = GOS_TAXONOMY;
@ -143,13 +143,13 @@ public class Tax {
/** /**
* Returns taxonomy names only for items found in headers * Returns taxonomy names only for items found in headers
*/ */
public static ArrayList<String> getTaxonomyForTaxonomyResult(CorpusType corpusType, Set<Taxonomy> foundTax) { public static ArrayList<String> getTaxonomyForTaxonomyResult(Corpus corpus, Set<Taxonomy> foundTax) {
LinkedHashMap<String, String> tax = new LinkedHashMap<>(); LinkedHashMap<String, String> tax = new LinkedHashMap<>();
Set<Taxonomy> foundTaxHS= new HashSet<>(foundTax); Set<Taxonomy> foundTaxHS= new HashSet<>(foundTax);
if (corpusType == CorpusType.GIGAFIDA || corpusType == CorpusType.CCKRES || corpusType == CorpusType.SSJ500K) { if (corpus.getCorpusType() == CorpusType.GIGAFIDA || corpus.getCorpusType() == CorpusType.CCKRES || corpus.getCorpusType() == CorpusType.SSJ500K || corpus.getCorpusType() == CorpusType.GIGAFIDA2) {
tax = GIGAFIDA_TAXONOMY; tax = GIGAFIDA_TAXONOMY;
} else if (corpusType == CorpusType.GOS) { } else if (corpus.getCorpusType() == CorpusType.GOS) {
tax = GOS_TAXONOMY; tax = GOS_TAXONOMY;
} }
@ -161,7 +161,7 @@ public class Tax {
for(Taxonomy e : foundTaxHS){ for(Taxonomy e : foundTaxHS){
String[] elList = e.toString().split("\\."); String[] elList = e.toString().split("\\.");
for(int i = 1; i < elList.length - 1; i++){ for(int i = 1; i < elList.length - 1; i++){
Taxonomy candidate = Taxonomy.factory(String.join(".", Arrays.copyOfRange(elList, 0, elList.length - i))); Taxonomy candidate = Taxonomy.factory(String.join(".", Arrays.copyOfRange(elList, 0, elList.length - i)), corpus);
genFoundTax.add(candidate); genFoundTax.add(candidate);
} }
} }
@ -186,7 +186,7 @@ public class Tax {
// assures same relative order // assures same relative order
for (String t : tax.keySet()) { for (String t : tax.keySet()) {
if (foundTaxHS.contains(Taxonomy.factory(t))) { if (foundTaxHS.contains(Taxonomy.factory(t, corpus))) {
taxForCombo.add(tax.get(t)); taxForCombo.add(tax.get(t));
} }
} }
@ -263,13 +263,19 @@ public class Tax {
public static ArrayList<String> getTaxonomyForInfo(CorpusType corpusType, ArrayList<Taxonomy> taxonomy) { public static ArrayList<String> getTaxonomyForInfo(CorpusType corpusType, ArrayList<Taxonomy> taxonomy) {
LinkedHashMap<String, String> tax = new LinkedHashMap<>(); LinkedHashMap<String, String> tax = new LinkedHashMap<>();
if (corpusType == CorpusType.GIGAFIDA || corpusType == CorpusType.CCKRES || corpusType == CorpusType.SSJ500K) { ArrayList<String> result = new ArrayList<>();
if (corpusType == CorpusType.GIGAFIDA || corpusType == CorpusType.CCKRES || corpusType == CorpusType.SSJ500K || corpusType == CorpusType.GIGAFIDA2) {
tax = GIGAFIDA_TAXONOMY; tax = GIGAFIDA_TAXONOMY;
} else if (corpusType == CorpusType.GOS) { } else if (corpusType == CorpusType.GOS) {
tax = GOS_TAXONOMY; tax = GOS_TAXONOMY;
} else if (corpusType == CorpusType.VERT) {
for (Taxonomy t : taxonomy) {
result.add(t.toLongNameString());
}
return result;
} }
ArrayList<String> result = new ArrayList<>();
for (Taxonomy t : taxonomy) { for (Taxonomy t : taxonomy) {
result.add(tax.get(t.toString())); result.add(tax.get(t.toString()));

View File

@ -5,7 +5,7 @@ import java.util.concurrent.ConcurrentHashMap;
import javafx.collections.ObservableList; import javafx.collections.ObservableList;
public enum Taxonomy { enum TaxonomyEnum {
TOTAL("Total", "Total"), TOTAL("Total", "Total"),
// GOS // GOS
@ -85,7 +85,7 @@ public enum Taxonomy {
private final String name; private final String name;
private final String longName; private final String longName;
Taxonomy(String name, String longName) { TaxonomyEnum(String name, String longName) {
this.name = name; this.name = name;
this.longName = longName; this.longName = longName;
} }
@ -98,7 +98,7 @@ public enum Taxonomy {
return this.longName; return this.longName;
} }
public static Taxonomy factory(String tax) { public static TaxonomyEnum factory(String tax) {
if (tax != null) { if (tax != null) {
// GOS // GOS
if (DISKURZ.toString().equals(tax)) { if (DISKURZ.toString().equals(tax)) {
@ -289,7 +289,7 @@ public enum Taxonomy {
return null; return null;
} }
public static Taxonomy factoryLongName(String tax) { public static TaxonomyEnum factoryLongName(String tax) {
if (tax != null) { if (tax != null) {
// GOS // GOS
if (DISKURZ.toLongNameString().equals(tax)) { if (DISKURZ.toLongNameString().equals(tax)) {
@ -477,11 +477,15 @@ public enum Taxonomy {
} }
} }
// return new Taxonomy(tax, tax);
System.out.println("2.");
System.out.println(tax);
return null; return null;
} }
public static ArrayList<Taxonomy> taxonomySelected(Taxonomy disjointTaxonomy) { public static ArrayList<TaxonomyEnum> taxonomySelected(TaxonomyEnum disjointTaxonomy) {
ArrayList<Taxonomy> r = new ArrayList<>(); ArrayList<TaxonomyEnum> r = new ArrayList<>();
System.out.println(disjointTaxonomy); System.out.println(disjointTaxonomy);
if(disjointTaxonomy.equals(DISKURZ)){ if(disjointTaxonomy.equals(DISKURZ)){
@ -628,9 +632,9 @@ public enum Taxonomy {
return r; return r;
} }
public static ArrayList<Taxonomy> taxonomyDeselected(Taxonomy disjointTaxonomy){ public static ArrayList<TaxonomyEnum> taxonomyDeselected(TaxonomyEnum disjointTaxonomy){
ArrayList<Taxonomy> r = new ArrayList<>(); ArrayList<TaxonomyEnum> r = new ArrayList<>();
Map<Taxonomy, Taxonomy> connections = new ConcurrentHashMap<>(); Map<TaxonomyEnum, TaxonomyEnum> connections = new ConcurrentHashMap<>();
connections.put(DISKURZ_JAVNI, DISKURZ); connections.put(DISKURZ_JAVNI, DISKURZ);
connections.put(DISKURZ_INFORMATIVNO_IZOBRAZEVALNI, DISKURZ_JAVNI); connections.put(DISKURZ_INFORMATIVNO_IZOBRAZEVALNI, DISKURZ_JAVNI);
connections.put(DISKURZ_RAZVEDRILNI, DISKURZ_JAVNI); connections.put(DISKURZ_RAZVEDRILNI, DISKURZ_JAVNI);
@ -685,7 +689,7 @@ public enum Taxonomy {
connections.put(FT_DA, FT_LEKTORIRANO); connections.put(FT_DA, FT_LEKTORIRANO);
connections.put(FT_NE, FT_LEKTORIRANO); connections.put(FT_NE, FT_LEKTORIRANO);
Taxonomy currentTaxonomy = disjointTaxonomy; TaxonomyEnum currentTaxonomy = disjointTaxonomy;
r.add(currentTaxonomy); r.add(currentTaxonomy);
while(connections.containsKey(currentTaxonomy)){ while(connections.containsKey(currentTaxonomy)){
currentTaxonomy = connections.get(currentTaxonomy); currentTaxonomy = connections.get(currentTaxonomy);
@ -695,29 +699,36 @@ public enum Taxonomy {
return r; return r;
} }
public static ArrayList<Taxonomy> convertStringListToTaxonomyList(ObservableList<String> stringList){ public static ArrayList<TaxonomyEnum> convertStringListToTaxonomyList(ObservableList<String> stringList, Corpus corpus){
System.out.println("1.");
System.out.println(stringList); System.out.println(stringList);
ArrayList<Taxonomy> taxonomyList = new ArrayList<>(); ArrayList<TaxonomyEnum> taxonomyList = new ArrayList<>();
// System.out.println("INTERESTING STUFF"); // System.out.println("INTERESTING STUFF");
// System.out.println(stringList); // System.out.println(stringList);
for (String e : stringList) { for (String e : stringList) {
taxonomyList.add(factoryLongName(e)); for (Taxonomy t : corpus.getTaxonomy()){
if (t.toLongNameString().equals(e)) {
taxonomyList.add(t.getTaxonomyEnum());
}
}
} }
// System.out.println(taxonomyList); // System.out.println(taxonomyList);
// System.out.println("-----------------"); // System.out.println("-----------------");
return taxonomyList; return taxonomyList;
} }
public static void modifyingTaxonomy(ArrayList<Taxonomy> taxonomy, ArrayList<Taxonomy> checkedItemsTaxonomy, Corpus corpus){ public static void modifyingTaxonomy(ArrayList<TaxonomyEnum> taxonomy, ArrayList<TaxonomyEnum> checkedItemsTaxonomy, Corpus corpus){
// get taxonomies that were selected/deselected by user // get taxonomies that were selected/deselected by user
// System.out.println(taxonomy); System.out.println("Print here:");
// System.out.println(checkedItemsTaxonomy); System.out.println(taxonomy);
System.out.println(checkedItemsTaxonomy);
System.out.println("-------------");
Set<Taxonomy> disjointTaxonomies = new HashSet<>(checkedItemsTaxonomy); Set<TaxonomyEnum> disjointTaxonomies = new HashSet<>(checkedItemsTaxonomy);
if (taxonomy != null) { if (taxonomy != null) {
disjointTaxonomies.addAll(taxonomy); disjointTaxonomies.addAll(taxonomy);
for (Taxonomy s : checkedItemsTaxonomy) { for (TaxonomyEnum s : checkedItemsTaxonomy) {
if (taxonomy.contains(s)) { if (taxonomy.contains(s)) {
disjointTaxonomies.remove(s); disjointTaxonomies.remove(s);
} }
@ -725,11 +736,11 @@ public enum Taxonomy {
} }
// remove previously selected items plus remove taxonomies that are not presented in current setup // remove previously selected items plus remove taxonomies that are not presented in current setup
ArrayList<Taxonomy> disArr = new ArrayList<>(disjointTaxonomies); ArrayList<TaxonomyEnum> disArr = new ArrayList<>(disjointTaxonomies);
int i = 0; int i = 0;
while(i < disArr.size()){ while(i < disArr.size()){
Taxonomy s = disArr.get(i); TaxonomyEnum s = disArr.get(i);
if(!Taxonomy.convertStringListToTaxonomyList(corpus.getTaxonomy()).contains(s)){ if(!TaxonomyEnum.convertStringListToTaxonomyList(corpus.getObservableListTaxonomy(), corpus).contains(s)){
disjointTaxonomies.remove(s); disjointTaxonomies.remove(s);
disArr.remove(s); disArr.remove(s);
// taxonomy.remove(s); // taxonomy.remove(s);
@ -740,14 +751,14 @@ public enum Taxonomy {
if (disjointTaxonomies.size() > 0) { if (disjointTaxonomies.size() > 0) {
Taxonomy disjointTaxonomy = disjointTaxonomies.iterator().next(); TaxonomyEnum disjointTaxonomy = disjointTaxonomies.iterator().next();
// taxonomy was selected // taxonomy was selected
if (checkedItemsTaxonomy.contains(disjointTaxonomy)) { if (checkedItemsTaxonomy.contains(disjointTaxonomy)) {
ArrayList<Taxonomy> addTaxonomies = Taxonomy.taxonomySelected(disjointTaxonomy); ArrayList<TaxonomyEnum> addTaxonomies = TaxonomyEnum.taxonomySelected(disjointTaxonomy);
checkedItemsTaxonomy.addAll(addTaxonomies); checkedItemsTaxonomy.addAll(addTaxonomies);
} else if (taxonomy.contains(disjointTaxonomy)) { } else if (taxonomy.contains(disjointTaxonomy)) {
ArrayList<Taxonomy> removeTaxonomies = Taxonomy.taxonomyDeselected(disjointTaxonomy); ArrayList<TaxonomyEnum> removeTaxonomies = TaxonomyEnum.taxonomyDeselected(disjointTaxonomy);
checkedItemsTaxonomy.removeAll(removeTaxonomies); checkedItemsTaxonomy.removeAll(removeTaxonomies);
} }
} }
@ -755,3 +766,203 @@ public enum Taxonomy {
} }
public class Taxonomy {
private String name;
private String longName;
private TaxonomyEnum taxonomyEnum;
public Taxonomy(String tax, boolean longName) {
if (!longName) {
this.taxonomyEnum = TaxonomyEnum.factory(tax);
} else {
this.taxonomyEnum = TaxonomyEnum.factoryLongName(tax);
}
if (taxonomyEnum != null){
this.name = this.taxonomyEnum.toString();
this.longName = this.taxonomyEnum.toLongNameString();
} else {
this.name = tax;
this.longName = tax;
}
}
public Taxonomy(TaxonomyEnum taxonomyEnum) {
this.taxonomyEnum = taxonomyEnum;
this.name = this.taxonomyEnum.toString();
this.longName = this.taxonomyEnum.toLongNameString();
}
// public Taxonomy(String name, String longName) {
// this.name = name;
// this.longName = longName;
// }
public String toString() {
return this.name;
}
public String toLongNameString() {
return this.longName;
}
public TaxonomyEnum getTaxonomyEnum() {
return this.taxonomyEnum;
}
public static Taxonomy factory(String tax, Corpus corpus) {
for (Taxonomy t : corpus.getTaxonomy()){
if(tax.equals(t.toString()))
return t;
}
return null;
// return new Taxonomy(tax, false);
}
public static Taxonomy factoryLongName(String tax, Corpus corpus) {
for (Taxonomy t : corpus.getTaxonomy()){
if(tax.equals(t.toLongNameString()))
return t;
}
return null;
// return new Taxonomy(tax, true);
}
// public static ArrayList<Taxonomy> taxonomySelected(Taxonomy disjointTaxonomy) {
// ArrayList<TaxonomyEnum> rTaxonomyEnum = TaxonomyEnum.taxonomySelected(disjointTaxonomy.getTaxonomyEnum());
//
// ArrayList<Taxonomy> r = new ArrayList<>();
//
// for(TaxonomyEnum t : rTaxonomyEnum){
// r.add(new Taxonomy(t.toString(), false));
// }
//
// return r;
// }
public static ArrayList<Taxonomy> taxonomyDeselected(Taxonomy disjointTaxonomy){
// ArrayList<TaxonomyEnum> r = new ArrayList<>();
// Map<TaxonomyEnum, TaxonomyEnum> connections = new ConcurrentHashMap<>();
// connections.put(DISKURZ_JAVNI, DISKURZ);
// connections.put(DISKURZ_INFORMATIVNO_IZOBRAZEVALNI, DISKURZ_JAVNI);
// connections.put(DISKURZ_RAZVEDRILNI, DISKURZ_JAVNI);
// connections.put(DISKURZ_NEJAVNI, DISKURZ);
// connections.put(DISKURZ_NEZASEBNI, DISKURZ_NEJAVNI);
// connections.put(DISKURZ_ZASEBNI, DISKURZ_NEJAVNI);
// connections.put(SITUACIJA_RADIO, SITUACIJA);
// connections.put(SITUACIJA_TELEVIZIJA, SITUACIJA);
// connections.put(KANAL_OSEBNI_STIK, KANAL);
// connections.put(KANAL_TELEFON, KANAL);
// connections.put(KANAL_RADIO, KANAL);
// connections.put(KANAL_TELEVIZIJA, KANAL);
//
// connections.put(SSJ_KNJIZNO, SSJ_TISK);
// connections.put(SSJ_LEPOSLOVNO, SSJ_KNJIZNO);
// connections.put(SSJ_STROKOVNO, SSJ_KNJIZNO);
// connections.put(SSJ_PERIODICNO, SSJ_TISK);
// connections.put(SSJ_CASOPIS, SSJ_PERIODICNO);
// connections.put(SSJ_REVIJA, SSJ_PERIODICNO);
// connections.put(SSJ_DRUGO, SSJ_TISK);
//
// connections.put(FT_P_GOVORNI, FT_P_PRENOSNIK);
// connections.put(FT_P_ELEKTRONSKI, FT_P_PRENOSNIK);
// connections.put(FT_P_PISNI, FT_P_PRENOSNIK);
// connections.put(FT_P_OBJAVLJENO, FT_P_PISNI);
// connections.put(FT_P_KNJIZNO, FT_P_OBJAVLJENO);
// connections.put(FT_P_PERIODICNO, FT_P_OBJAVLJENO);
// connections.put(FT_P_CASOPISNO, FT_P_OBJAVLJENO);
// connections.put(FT_P_DNEVNO, FT_P_CASOPISNO);
// connections.put(FT_P_VECKRAT_TEDENSKO, FT_P_CASOPISNO);
// connections.put(FT_P_CASOPISNO_TEDENSKO, FT_P_CASOPISNO);
// connections.put(FT_P_REVIALNO, FT_P_PERIODICNO);
// connections.put(FT_P_TEDENSKO, FT_P_REVIALNO);
// connections.put(FT_P_STIRINAJSTDNEVNO, FT_P_REVIALNO);
// connections.put(FT_P_MESECNO, FT_P_REVIALNO);
// connections.put(FT_P_REDKEJE_KOT_MESECNO, FT_P_REVIALNO);
// connections.put(FT_P_OBCASNO, FT_P_REVIALNO);
// connections.put(FT_P_NEOBJAVLJENO, FT_P_PISNI);
// connections.put(FT_P_JAVNO, FT_P_NEOBJAVLJENO);
// connections.put(FT_P_INTERNO, FT_P_NEOBJAVLJENO);
// connections.put(FT_P_ZASEBNO, FT_P_NEOBJAVLJENO);
// connections.put(FT_UMETNOSTNA, FT_ZVRST);
// connections.put(FT_PESNISKA, FT_UMETNOSTNA);
// connections.put(FT_PROZNA, FT_UMETNOSTNA);
// connections.put(FT_DRAMSKA, FT_UMETNOSTNA);
// connections.put(FT_NEUMETNOSTNA, FT_ZVRST);
// connections.put(FT_STROKOVNA, FT_NEUMETNOSTNA);
// connections.put(FT_HID, FT_STROKOVNA);
// connections.put(FT_NIT, FT_STROKOVNA);
// connections.put(FT_NESTROKOVNA, FT_NEUMETNOSTNA);
// connections.put(FT_PRAVNA, FT_NEUMETNOSTNA);
// connections.put(FT_DA, FT_LEKTORIRANO);
// connections.put(FT_NE, FT_LEKTORIRANO);
//
// TaxonomyEnum currentTaxonomy = disjointTaxonomy;
// r.add(currentTaxonomy);
// while(connections.containsKey(currentTaxonomy)){
// currentTaxonomy = connections.get(currentTaxonomy);
// r.add(currentTaxonomy);
// }
// Collections.reverse(r);
// return r;
return null;
}
public static ArrayList<Taxonomy> convertStringListToTaxonomyList(ObservableList<String> stringList, Corpus corpus){
ArrayList<Taxonomy> taxonomyList = new ArrayList<>();
for (String e : stringList) {
for (Taxonomy t : corpus.getTaxonomy()){
if (t.toLongNameString().equals(e)) {
taxonomyList.add(t);
}
}
}
return taxonomyList;
}
public static ArrayList<TaxonomyEnum> taxonomyToTaxonomyEnum(ArrayList<Taxonomy> taxonomy){
System.out.println(taxonomy);
if (taxonomy == null) {
return null;
}
ArrayList<TaxonomyEnum> r = new ArrayList<>();
for (Taxonomy t : taxonomy){
if (t.taxonomyEnum == null){
return null;
}
r.add(t.taxonomyEnum);
}
return r;
}
public static ArrayList<Taxonomy> taxonomyEnumToTaxonomy(ArrayList<TaxonomyEnum> taxonomy, Corpus corpus){
// ArrayList<Taxonomy> r = new ArrayList<>();
// for (TaxonomyEnum t : taxonomy){
// r.add(new Taxonomy(t));
// }
// return r;
ArrayList<Taxonomy> r = new ArrayList<>();
for (TaxonomyEnum te : taxonomy){
for (Taxonomy t : corpus.getTaxonomy()){
if (t.taxonomyEnum.equals(te)) {
r.add(t);
break;
}
}
}
return r;
}
public static ArrayList<Taxonomy> modifyingTaxonomy(ArrayList<Taxonomy> taxonomy, ObservableList<String> checkedItems, Corpus corpus){
ArrayList<TaxonomyEnum> checkedItemsTaxonomy = TaxonomyEnum.convertStringListToTaxonomyList(checkedItems, corpus);
if (checkedItemsTaxonomy != null && corpus.getCorpusType() != CorpusType.VERT) {
TaxonomyEnum.modifyingTaxonomy(Taxonomy.taxonomyToTaxonomyEnum(taxonomy), checkedItemsTaxonomy, corpus);
return taxonomyEnumToTaxonomy(checkedItemsTaxonomy, corpus);
} else {
return convertStringListToTaxonomyList(checkedItems, corpus);
}
}
}

View File

@ -1,8 +1,11 @@
package gui; package gui;
import alg.XML_processing;
import data.*; import data.*;
import javafx.application.HostServices; import javafx.application.HostServices;
import javafx.beans.binding.Bindings; import javafx.beans.InvalidationListener;
import javafx.beans.Observable;
import javafx.beans.property.ReadOnlyDoubleWrapper;
import javafx.beans.value.ChangeListener; import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue; import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections; import javafx.collections.FXCollections;
@ -25,7 +28,6 @@ import java.util.regex.Pattern;
import static alg.XML_processing.readXML; import static alg.XML_processing.readXML;
import static gui.GUIController.showAlert; import static gui.GUIController.showAlert;
import static gui.Messages.*;
@SuppressWarnings("Duplicates") @SuppressWarnings("Duplicates")
public class CharacterAnalysisTab { public class CharacterAnalysisTab {
@ -160,6 +162,7 @@ public class CharacterAnalysisTab {
private boolean useDb; private boolean useDb;
private HostServices hostService; private HostServices hostService;
private ListChangeListener<String> taxonomyListener; private ListChangeListener<String> taxonomyListener;
private InvalidationListener progressBarListener;
private static final String [] N_GRAM_COMPUTE_FOR_LETTERS_ARRAY = {"calculateFor.WORD", "calculateFor.LEMMA"}; private static final String [] N_GRAM_COMPUTE_FOR_LETTERS_ARRAY = {"calculateFor.WORD", "calculateFor.LEMMA"};
private static final ArrayList<String> N_GRAM_COMPUTE_FOR_LETTERS = new ArrayList<>(Arrays.asList(N_GRAM_COMPUTE_FOR_LETTERS_ARRAY)); private static final ArrayList<String> N_GRAM_COMPUTE_FOR_LETTERS = new ArrayList<>(Arrays.asList(N_GRAM_COMPUTE_FOR_LETTERS_ARRAY));
@ -241,7 +244,12 @@ public class CharacterAnalysisTab {
msd = new ArrayList<>(); msd = new ArrayList<>();
// taxonomy // taxonomy
if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getTaxonomy().size() > 0) { if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getObservableListTaxonomy().size() > 0) {
taxonomyCCB.setDisable(false);
} else {
taxonomyCCB.setDisable(true);
}
if (taxonomyListener != null){ if (taxonomyListener != null){
taxonomyCCB.getCheckModel().getCheckedItems().removeListener(taxonomyListener); taxonomyCCB.getCheckModel().getCheckedItems().removeListener(taxonomyListener);
} }
@ -253,15 +261,15 @@ public class CharacterAnalysisTab {
public void onChanged(ListChangeListener.Change<? extends String> c){ public void onChanged(ListChangeListener.Change<? extends String> c){
if(changing) { if(changing) {
ObservableList<String> checkedItems = taxonomyCCB.getCheckModel().getCheckedItems(); ObservableList<String> checkedItems = taxonomyCCB.getCheckModel().getCheckedItems();
ArrayList<Taxonomy> checkedItemsTaxonomy = Taxonomy.convertStringListToTaxonomyList(checkedItems); // ArrayList<Taxonomy> checkedItemsTaxonomy = Taxonomy.convertStringListToTaxonomyList(checkedItems);
Taxonomy.modifyingTaxonomy(taxonomy, checkedItemsTaxonomy, corpus); ArrayList<Taxonomy> checkedItemsTaxonomy = Taxonomy.modifyingTaxonomy(taxonomy, checkedItems, corpus);
taxonomy = new ArrayList<>(); taxonomy = new ArrayList<>();
taxonomy.addAll(checkedItemsTaxonomy); taxonomy.addAll(checkedItemsTaxonomy);
taxonomyCCB.getItems().removeAll(); taxonomyCCB.getItems().removeAll();
taxonomyCCB.getItems().setAll(corpus.getTaxonomy()); taxonomyCCB.getItems().setAll(corpus.getObservableListTaxonomy());
// taxonomyCCB.getCheckModel().clearChecks(); // taxonomyCCB.getCheckModel().clearChecks();
changing = false; changing = false;
@ -275,19 +283,17 @@ public class CharacterAnalysisTab {
} }
}; };
taxonomyCCB.getCheckModel().clearChecks(); taxonomyCCB.getCheckModel().clearChecks();
taxonomyCCB.setDisable(false);
taxonomyCCB.getItems().removeAll(); taxonomyCCB.getItems().removeAll();
taxonomyCCB.getItems().setAll(corpus.getTaxonomy()); taxonomyCCB.getItems().setAll(corpus.getObservableListTaxonomy());
taxonomyCCB.getCheckModel().getCheckedItems().addListener(taxonomyListener); taxonomyCCB.getCheckModel().getCheckedItems().addListener(taxonomyListener);
} else {
taxonomyCCB.setDisable(true);
}
displayTaxonomy = false; displayTaxonomy = false;
displayTaxonomyChB.setSelected(false); displayTaxonomyChB.setSelected(false);
// set // set
if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getTaxonomy().size() > 0) { if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getObservableListTaxonomy().size() > 0) {
displayTaxonomyChB.setDisable(false); displayTaxonomyChB.setDisable(false);
displayTaxonomyChB.selectedProperty().addListener((observable, oldValue, newValue) -> { displayTaxonomyChB.selectedProperty().addListener((observable, oldValue, newValue) -> {
displayTaxonomy = newValue; displayTaxonomy = newValue;
@ -475,7 +481,7 @@ public class CharacterAnalysisTab {
// if ((currentCorpusType != null && currentCorpusType != corpus.getCorpusType())) { // if ((currentCorpusType != null && currentCorpusType != corpus.getCorpusType())) {
// // user changed corpus (by type) or by selection & triggered a rescan of headers // // user changed corpus (by type) or by selection & triggered a rescan of headers
// // see if we read taxonomy from headers, otherwise use default values for given corpus // // see if we read taxonomy from headers, otherwise use default values for given corpus
// ObservableList<String> tax = corpus.getTaxonomy(); // ObservableList<String> tax = corpus.getObservableListTaxonomy();
// taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType()); // taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType());
// //
// currentCorpusType = corpus.getCorpusType(); // currentCorpusType = corpus.getCorpusType();
@ -485,7 +491,7 @@ public class CharacterAnalysisTab {
// } // }
// //
// // see if we read taxonomy from headers, otherwise use default values for given corpus // // see if we read taxonomy from headers, otherwise use default values for given corpus
// ObservableList<String> tax = corpus.getTaxonomy(); // ObservableList<String> tax = corpus.getObservableListTaxonomy();
// taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType()); // taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType());
// taxonomyCCB.getItems().addAll(taxonomyCCBValues); // taxonomyCCB.getItems().addAll(taxonomyCCBValues);
// //
@ -548,7 +554,7 @@ public class CharacterAnalysisTab {
// if calculateFor was selected for something other than a word or a lemma -> reset // if calculateFor was selected for something other than a word or a lemma -> reset
if (!(calculateFor == CalculateFor.WORD || calculateFor == CalculateFor.LEMMA)) { if (!(calculateFor == CalculateFor.WORD || calculateFor == CalculateFor.LEMMA)) {
// if the user selected something else before selecting ngram for letters, reset that choice // if the user selected something else before selecting ngram for letters, reset that choice
calculateFor = CalculateFor.LEMMA; calculateFor = CalculateFor.WORD;
calculateForCB.getSelectionModel().select(0); calculateForCB.getSelectionModel().select(0);
} }
@ -637,16 +643,66 @@ public class CharacterAnalysisTab {
@SuppressWarnings("Duplicates") @SuppressWarnings("Duplicates")
@Override @Override
protected Void call() throws Exception { protected Void call() throws Exception {
long i = 0; if(corpusFiles.size() > 1){
cancel.setVisible(true);
}
int i = 0;
// DateFormat df = new SimpleDateFormat("hh:mm:ss");
Date startTime = new Date();
Date previousTime = new Date();
int remainingSeconds = -1;
for (File f : corpusFiles) { for (File f : corpusFiles) {
readXML(f.toString(), statistic); final int iFinal = i;
XML_processing xml_processing = new XML_processing();
i++; i++;
if (isCancelled()) { if (isCancelled()) {
updateMessage(I18N.get("message.CANCELING_NOTIFICATION")); updateMessage(I18N.get("message.CANCELING_NOTIFICATION"));
break; break;
} }
if (corpusFiles.size() > 1) {
if ((new Date()).getTime() - previousTime.getTime() > 500 || remainingSeconds == -1){
remainingSeconds = (int) (((new Date()).getTime() - startTime.getTime()) * (1.0/i) * (corpusFiles.size() - i) / 1000);
previousTime = new Date();
}
this.updateProgress(i, corpusFiles.size()); this.updateProgress(i, corpusFiles.size());
this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size(), f.getName())); this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size(), f.getName(), remainingSeconds));
} else {
if(progressBarListener != null) {
xml_processing.progressProperty().removeListener(progressBarListener);
}
progressBarListener = new InvalidationListener() {
int remainingSeconds = -1;
Date previousTime = new Date();
@Override
public void invalidated(Observable observable) {
cancel.setVisible(true);
if ((new Date()).getTime() - previousTime.getTime() > 500 || remainingSeconds == -1){
remainingSeconds = (int) (((new Date()).getTime() - xml_processing.startTime.getTime()) *
(1.0/(iFinal * 100 + ((ReadOnlyDoubleWrapper) observable).get() + 1)) *
((corpusFiles.size() - iFinal - 1) * 100 + 100 - ((ReadOnlyDoubleWrapper) observable).get()) / 1000);
previousTime = new Date();
}
xml_processing.isCancelled = isCancelled();
updateProgress((iFinal * 100) + ((ReadOnlyDoubleWrapper) observable).get() + 1, corpusFiles.size() * 100);
updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), iFinal + 1, corpusFiles.size(), f.getName(), remainingSeconds));
// updateProgress((iFinal * 100) + (double) observable, corpusFiles.size() * 100);
}
};
// this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size(), f.getName(), remainingSeconds));
xml_processing.progressProperty().addListener(progressBarListener);
// xml_processing.progressProperty().addListener((obs, oldProgress, newProgress) ->
// updateProgress((iFinal * 100) + newProgress.doubleValue(), corpusFiles.size() * 100));
}
xml_processing.readXML(f.toString(), statistic);
if (isCancelled()) {
updateMessage(I18N.get("message.CANCELING_NOTIFICATION"));
break;
}
// readXML(f.toString(), statistic, this, corpusFiles.size(), startTime, previousTime, i);
} }
return null; return null;
@ -703,8 +759,6 @@ public class CharacterAnalysisTab {
logger.info("cancel button"); logger.info("cancel button");
}); });
cancel.setVisible(true);
final Thread thread = new Thread(task, "task"); final Thread thread = new Thread(task, "task");
thread.setDaemon(true); thread.setDaemon(true);
thread.start(); thread.start();

View File

@ -6,11 +6,13 @@ import static gui.Messages.*;
import static util.Util.*; import static util.Util.*;
import java.io.File; import java.io.File;
import java.io.IOException;
import java.util.*; import java.util.*;
import javafx.scene.layout.AnchorPane; import javafx.scene.layout.AnchorPane;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOCase; import org.apache.commons.io.IOCase;
import org.apache.commons.io.LineIterator;
import org.apache.commons.io.filefilter.FileFilterUtils; import org.apache.commons.io.filefilter.FileFilterUtils;
import org.apache.commons.io.filefilter.TrueFileFilter; import org.apache.commons.io.filefilter.TrueFileFilter;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
@ -205,9 +207,6 @@ public class CorpusTab {
// scan for xml files // scan for xml files
Collection<File> corpusFiles = FileUtils.listFiles(selectedDirectory, FileFilterUtils.suffixFileFilter("xml", IOCase.INSENSITIVE), TrueFileFilter.INSTANCE); Collection<File> corpusFiles = FileUtils.listFiles(selectedDirectory, FileFilterUtils.suffixFileFilter("xml", IOCase.INSENSITIVE), TrueFileFilter.INSTANCE);
corpusLocation = selectedDirectory.getAbsolutePath();
corpusFilesSize = String.valueOf(corpusFiles.size());
Messages.setChooseCorpusProperties(corpusLocation, corpusFilesSize, corpusType != null ? corpusType.toString() : null);
// make sure there are corpus files in selected directory or notify the user about it // make sure there are corpus files in selected directory or notify the user about it
if (corpusFiles.size() == 0) { if (corpusFiles.size() == 0) {
@ -215,10 +214,20 @@ public class CorpusTab {
corpusFiles = FileUtils.listFiles(selectedDirectory, FileFilterUtils.suffixFileFilter("vert", IOCase.INSENSITIVE), TrueFileFilter.INSTANCE); corpusFiles = FileUtils.listFiles(selectedDirectory, FileFilterUtils.suffixFileFilter("vert", IOCase.INSENSITIVE), TrueFileFilter.INSTANCE);
Collection<File> corpusFilesRegi = FileUtils.listFiles(selectedDirectory, FileFilterUtils.suffixFileFilter("regi", IOCase.INSENSITIVE), TrueFileFilter.INSTANCE); Collection<File> corpusFilesRegi = FileUtils.listFiles(selectedDirectory, FileFilterUtils.suffixFileFilter("regi", IOCase.INSENSITIVE), TrueFileFilter.INSTANCE);
// if (!checkRegiFile(corpusFilesRegi)){
// return;
// }
if (corpusFiles.size() == 0){ if (corpusFiles.size() == 0){
logger.info("alert: ", I18N.get("message.WARNING_CORPUS_NOT_FOUND")); logger.info("alert: ", I18N.get("message.WARNING_CORPUS_NOT_FOUND"));
showAlert(Alert.AlertType.ERROR, I18N.get("message.WARNING_CORPUS_NOT_FOUND"), null); showAlert(Alert.AlertType.ERROR, I18N.get("message.WARNING_CORPUS_NOT_FOUND"), null);
} else if (corpusFilesRegi.size() == 0){
GUIController.showAlert(Alert.AlertType.ERROR, String.format(I18N.get("message.ERROR_NO_REGI_FILE_FOUND"), selectedDirectory.getAbsolutePath()));
} else { } else {
corpusLocation = selectedDirectory.getAbsolutePath();
corpusFilesSize = String.valueOf(corpusFiles.size());
Messages.setChooseCorpusProperties(corpusLocation, corpusFilesSize, corpusType != null ? corpusType.toString() : null);
corpusType = VERT; corpusType = VERT;
corpus.setCorpusType(corpusType); corpus.setCorpusType(corpusType);
@ -255,12 +264,17 @@ public class CorpusTab {
} }
} else { } else {
corpusLocation = selectedDirectory.getAbsolutePath();
corpusFilesSize = String.valueOf(corpusFiles.size());
Messages.setChooseCorpusProperties(corpusLocation, corpusFilesSize, corpusType != null ? corpusType.toString() : null);
String chooseCorpusLabelContentTmp = detectCorpusType(corpusFiles); String chooseCorpusLabelContentTmp = detectCorpusType(corpusFiles);
if (chooseCorpusLabelContentTmp == null) { if (chooseCorpusLabelContentTmp == null) {
logger.info("alert: ", I18N.get("message.WARNING_CORPUS_NOT_FOUND")); logger.info("alert: ", I18N.get("message.WARNING_CORPUS_NOT_FOUND"));
showAlert(Alert.AlertType.ERROR, I18N.get("message.WARNING_CORPUS_NOT_FOUND"), null); showAlert(Alert.AlertType.ERROR, I18N.get("message.WARNING_CORPUS_NOT_FOUND"), null);
} else { } else {
initNewCorpus(selectedDirectory, corpusFiles); initNewCorpus(selectedDirectory, corpusFiles);
Messages.setChooseCorpusProperties(corpusLocation, corpusFilesSize, corpusType.toString()); Messages.setChooseCorpusProperties(corpusLocation, corpusFilesSize, corpusType.toString());
@ -330,6 +344,28 @@ public class CorpusTab {
Messages.setChooseCorpusL(chooseCorpusL, chooseCorpusLabelContent); Messages.setChooseCorpusL(chooseCorpusL, chooseCorpusLabelContent);
} }
private boolean checkRegiFile(Collection<File> corpusFiles) {
// CorpusType corpusType = corpus.getCorpusType();
// Collection<File> corpusFiles = corpus.getDetectedCorpusFiles();
for (File file : corpusFiles) {
// try to open .regi file
String regiPath = file.getAbsolutePath().substring(0, file.getAbsolutePath().length() - 4) + "regi";
LineIterator regiIt;
try {
// read regi file
regiIt = FileUtils.lineIterator(new File(regiPath), "UTF-8");
LineIterator.closeQuietly(regiIt);
} catch (IOException e) {
GUIController.showAlert(Alert.AlertType.ERROR, String.format(I18N.get("message.ERROR_NO_REGI_FILE_FOUND"), regiPath));
return false;
}
}
return true;
}
private void readHeaderInfo() { private void readHeaderInfo() {
CorpusType corpusType = corpus.getCorpusType(); CorpusType corpusType = corpus.getCorpusType();
Collection<File> corpusFiles = corpus.getDetectedCorpusFiles(); Collection<File> corpusFiles = corpus.getDetectedCorpusFiles();
@ -339,7 +375,7 @@ public class CorpusTab {
logger.info("reading header data for ", corpusType.toString()); logger.info("reading header data for ", corpusType.toString());
if (corpusType == CorpusType.GIGAFIDA || corpusType == CorpusType.GOS || corpusType == CorpusType.CCKRES || corpusType == CorpusType.SSJ500K) { if (corpusType == CorpusType.GIGAFIDA || corpusType == CorpusType.GOS || corpusType == CorpusType.CCKRES || corpusType == CorpusType.SSJ500K || corpusType == CorpusType.GIGAFIDA2) {
boolean corpusIsSplit = corpusFiles.size() > 1; boolean corpusIsSplit = corpusFiles.size() > 1;
final Task<HashSet<String>> task = new Task<HashSet<String>>() { final Task<HashSet<String>> task = new Task<HashSet<String>>() {
@ -505,18 +541,18 @@ public class CorpusTab {
task.setOnSucceeded(e -> { task.setOnSucceeded(e -> {
ObservableList<String> readTaxonomy = Tax.getTaxonomyForComboBox(corpusType, task.getValue()); ObservableList<String> readTaxonomy = Tax.getTaxonomyForComboBox(corpusType, task.getValue());
if (ValidationUtil.isEmpty(readTaxonomy)) { // if (ValidationUtil.isEmpty(readTaxonomy)) {
// if no taxonomy found alert the user and keep other tabs disabled // // if no taxonomy found alert the user and keep other tabs disabled
logger.info("No vert filters found in headers."); // logger.info("No vert filters found in headers.");
GUIController.showAlert(Alert.AlertType.ERROR, I18N.get("message.WARNING_NO_SOLAR_FILTERS_FOUND")); // GUIController.showAlert(Alert.AlertType.ERROR, I18N.get("message.WARNING_NO_SOLAR_FILTERS_FOUND"));
} else { // } else {
// set taxonomy, update label // set taxonomy, update label
corpus.setTaxonomy(readTaxonomy); corpus.setTaxonomy(readTaxonomy);
corpus.setHeaderRead(true); corpus.setHeaderRead(true);
Messages.setChooseCorpusL(chooseCorpusL, chooseCorpusLabelContent); Messages.setChooseCorpusL(chooseCorpusL, chooseCorpusLabelContent);
setResults(); setResults();
setCorpusForAnalysis(); setCorpusForAnalysis();
} // }
togglePiAndSetCorpusWrapper(false); togglePiAndSetCorpusWrapper(false);
@ -525,6 +561,7 @@ public class CorpusTab {
task.setOnCancelled(e -> togglePiAndSetCorpusWrapper(false)); task.setOnCancelled(e -> togglePiAndSetCorpusWrapper(false));
task.setOnFailed(e -> togglePiAndSetCorpusWrapper(false)); task.setOnFailed(e -> togglePiAndSetCorpusWrapper(false));
final Thread thread = new Thread(task, "task"); final Thread thread = new Thread(task, "task");
thread.setDaemon(true); thread.setDaemon(true);
thread.start(); thread.start();
@ -599,7 +636,12 @@ public class CorpusTab {
if (title.contains(SOLAR.getNameLowerCase())) { if (title.contains(SOLAR.getNameLowerCase())) {
corpusType = SOLAR; corpusType = SOLAR;
} else if (title.contains(GIGAFIDA.getNameLowerCase())) { } else if (title.contains(GIGAFIDA.getNameLowerCase())) {
String edition = XML_processing.readXMLHeaderTag(f.getAbsolutePath(), "edition").toLowerCase();
if (Double.valueOf(edition) < 2.0) {
corpusType = GIGAFIDA; corpusType = GIGAFIDA;
} else {
corpusType = GIGAFIDA2;
}
} else if (title.contains(CCKRES.getNameLowerCase())) { } else if (title.contains(CCKRES.getNameLowerCase())) {
corpusType = CCKRES; corpusType = CCKRES;
} else if (title.contains(GOS.getNameLowerCase())) { } else if (title.contains(GOS.getNameLowerCase())) {

View File

@ -114,8 +114,10 @@ public class Messages {
.append(String.format(I18N.get("message.NOTIFICATION_CORPUS"), chooseCorpusLabelProperties[2])); .append(String.format(I18N.get("message.NOTIFICATION_CORPUS"), chooseCorpusLabelProperties[2]));
chooseCorpusLabelContent = sb.toString(); chooseCorpusLabelContent = sb.toString();
if (chooseCorpusL != null) {
chooseCorpusL.textProperty().unbind(); chooseCorpusL.textProperty().unbind();
chooseCorpusL.setText(chooseCorpusLabelContent); chooseCorpusL.setText(chooseCorpusLabelContent);
} }
} }
} }
}

View File

@ -1,22 +1,23 @@
package gui; package gui;
import alg.XML_processing;
import data.*; import data.*;
import javafx.application.HostServices; import javafx.application.HostServices;
import javafx.beans.InvalidationListener;
import javafx.beans.Observable;
import javafx.beans.property.ReadOnlyDoubleWrapper;
import javafx.beans.value.ChangeListener; import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue; import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.collections.ListChangeListener; import javafx.collections.ListChangeListener;
import javafx.collections.ObservableList; import javafx.collections.ObservableList;
import javafx.concurrent.Task; import javafx.concurrent.Task;
import javafx.fxml.FXML; import javafx.fxml.FXML;
import javafx.scene.Scene;
import javafx.scene.control.*; import javafx.scene.control.*;
import javafx.scene.layout.AnchorPane; import javafx.scene.layout.AnchorPane;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.controlsfx.control.CheckComboBox; import org.controlsfx.control.CheckComboBox;
import org.controlsfx.control.IndexedCheckModel;
import java.io.File; import java.io.File;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
@ -26,7 +27,6 @@ import java.util.regex.Pattern;
import static alg.XML_processing.readXML; import static alg.XML_processing.readXML;
import static gui.GUIController.showAlert; import static gui.GUIController.showAlert;
import static gui.Messages.*;
@SuppressWarnings("Duplicates") @SuppressWarnings("Duplicates")
public class OneWordAnalysisTab { public class OneWordAnalysisTab {
@ -158,6 +158,7 @@ public class OneWordAnalysisTab {
private ListChangeListener<String> taxonomyListener; private ListChangeListener<String> taxonomyListener;
private ListChangeListener<String> alsoVisualizeListener; private ListChangeListener<String> alsoVisualizeListener;
private ChangeListener<String> calculateForListener; private ChangeListener<String> calculateForListener;
private InvalidationListener progressBarListener;
// private static final ObservableList<String> N_GRAM_COMPUTE_FOR_WORDS = FXCollections.observableArrayList("lema", "različnica", "oblikoskladenjska oznaka"); // private static final ObservableList<String> N_GRAM_COMPUTE_FOR_WORDS = FXCollections.observableArrayList("lema", "različnica", "oblikoskladenjska oznaka");
// private static final ObservableList<String> N_GRAM_COMPUTE_FOR_LETTERS = FXCollections.observableArrayList("lema", "različnica"); // private static final ObservableList<String> N_GRAM_COMPUTE_FOR_LETTERS = FXCollections.observableArrayList("lema", "različnica");
@ -383,7 +384,12 @@ public class OneWordAnalysisTab {
alsoVisualizeCCB.getCheckModel().getCheckedItems().addListener(alsoVisualizeListener); alsoVisualizeCCB.getCheckModel().getCheckedItems().addListener(alsoVisualizeListener);
// taxonomy // taxonomy
if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getTaxonomy().size() > 0) { if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getObservableListTaxonomy().size() > 0) {
taxonomyCCB.setDisable(false);
} else {
taxonomyCCB.setDisable(true);
}
if (taxonomyListener != null){ if (taxonomyListener != null){
taxonomyCCB.getCheckModel().getCheckedItems().removeListener(taxonomyListener); taxonomyCCB.getCheckModel().getCheckedItems().removeListener(taxonomyListener);
} }
@ -395,15 +401,16 @@ public class OneWordAnalysisTab {
public void onChanged(Change<? extends String> c) { public void onChanged(Change<? extends String> c) {
if (changing) { if (changing) {
ObservableList<String> checkedItems = taxonomyCCB.getCheckModel().getCheckedItems(); ObservableList<String> checkedItems = taxonomyCCB.getCheckModel().getCheckedItems();
ArrayList<Taxonomy> checkedItemsTaxonomy = Taxonomy.convertStringListToTaxonomyList(checkedItems); // ArrayList<Taxonomy> checkedItemsTaxonomy = Taxonomy.convertStringListToTaxonomyList(checkedItems);
ArrayList<Taxonomy> checkedItemsTaxonomy = Taxonomy.modifyingTaxonomy(taxonomy, checkedItems, corpus);
Taxonomy.modifyingTaxonomy(taxonomy, checkedItemsTaxonomy, corpus); // Taxonomy.modifyingTaxonomy(taxonomy, checkedItemsTaxonomy, corpus);
taxonomy = new ArrayList<>(); taxonomy = new ArrayList<>();
taxonomy.addAll(checkedItemsTaxonomy); taxonomy.addAll(checkedItemsTaxonomy);
taxonomyCCB.getItems().removeAll(); taxonomyCCB.getItems().removeAll();
taxonomyCCB.getItems().setAll(corpus.getTaxonomy()); taxonomyCCB.getItems().setAll(corpus.getObservableListTaxonomy());
// taxonomyCCB.getCheckModel().clearChecks(); // taxonomyCCB.getCheckModel().clearChecks();
changing = false; changing = false;
@ -418,19 +425,16 @@ public class OneWordAnalysisTab {
}; };
taxonomyCCB.getCheckModel().clearChecks(); taxonomyCCB.getCheckModel().clearChecks();
taxonomyCCB.setDisable(false);
taxonomyCCB.getItems().removeAll(); taxonomyCCB.getItems().removeAll();
taxonomyCCB.getItems().setAll(corpus.getTaxonomy()); taxonomyCCB.getItems().setAll(corpus.getObservableListTaxonomy());
taxonomyCCB.getCheckModel().getCheckedItems().addListener(taxonomyListener); taxonomyCCB.getCheckModel().getCheckedItems().addListener(taxonomyListener);
} else {
taxonomyCCB.setDisable(true);
}
displayTaxonomy = false; displayTaxonomy = false;
displayTaxonomyChB.setSelected(false); displayTaxonomyChB.setSelected(false);
// set // set
if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getTaxonomy().size() > 0) { if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getObservableListTaxonomy().size() > 0) {
displayTaxonomyChB.setDisable(false); displayTaxonomyChB.setDisable(false);
displayTaxonomyChB.selectedProperty().addListener((observable, oldValue, newValue) -> { displayTaxonomyChB.selectedProperty().addListener((observable, oldValue, newValue) -> {
displayTaxonomy = newValue; displayTaxonomy = newValue;
@ -586,7 +590,7 @@ public class OneWordAnalysisTab {
// if ((currentCorpusType != null && currentCorpusType != corpus.getCorpusType())) { // if ((currentCorpusType != null && currentCorpusType != corpus.getCorpusType())) {
// // user changed corpus (by type) or by selection & triggered a rescan of headers // // user changed corpus (by type) or by selection & triggered a rescan of headers
// // see if we read taxonomy from headers, otherwise use default values for given corpus // // see if we read taxonomy from headers, otherwise use default values for given corpus
// ObservableList<String> tax = corpus.getTaxonomy(); // ObservableList<String> tax = corpus.getObservableListTaxonomy();
// taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType()); // taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType());
// //
// currentCorpusType = corpus.getCorpusType(); // currentCorpusType = corpus.getCorpusType();
@ -596,7 +600,7 @@ public class OneWordAnalysisTab {
// } // }
// //
// // see if we read taxonomy from headers, otherwise use default values for given corpus // // see if we read taxonomy from headers, otherwise use default values for given corpus
// ObservableList<String> tax = corpus.getTaxonomy(); // ObservableList<String> tax = corpus.getObservableListTaxonomy();
// taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType()); // taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType());
// taxonomyCCB.getItems().addAll(taxonomyCCBValues); // taxonomyCCB.getItems().addAll(taxonomyCCBValues);
// //
@ -733,22 +737,63 @@ public class OneWordAnalysisTab {
logger.info("Started execution: ", statistic.getFilter()); logger.info("Started execution: ", statistic.getFilter());
Collection<File> corpusFiles = statistic.getCorpus().getDetectedCorpusFiles(); Collection<File> corpusFiles = statistic.getCorpus().getDetectedCorpusFiles();
boolean corpusIsSplit = corpusFiles.size() > 1;
final Task<Void> task = new Task<Void>() { final Task<Void> task = new Task<Void>() {
@SuppressWarnings("Duplicates") @SuppressWarnings("Duplicates")
@Override @Override
protected Void call() throws Exception { protected Void call() throws Exception {
long i = 0; if(corpusFiles.size() > 1){
cancel.setVisible(true);
}
int i = 0;
Date startTime = new Date();
Date previousTime = new Date();
int remainingSeconds = -1;
for (File f : corpusFiles) { for (File f : corpusFiles) {
readXML(f.toString(), statistic); final int iFinal = i;
XML_processing xml_processing = new XML_processing();
i++; i++;
if (corpusFiles.size() > 1) {
if ((new Date()).getTime() - previousTime.getTime() > 500 || remainingSeconds == -1){
remainingSeconds = (int) (((new Date()).getTime() - startTime.getTime()) * (1.0/i) * (corpusFiles.size() - i) / 1000);
previousTime = new Date();
}
this.updateProgress(i, corpusFiles.size());
this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size(), f.getName(), remainingSeconds));
// if (isCancelled()) {
// updateMessage(I18N.get("message.CANCELING_NOTIFICATION"));
// break;
// }
} else {
if(progressBarListener != null) {
xml_processing.progressProperty().removeListener(progressBarListener);
}
progressBarListener = new InvalidationListener() {
int remainingSeconds = -1;
Date previousTime = new Date();
@Override
public void invalidated(Observable observable) {
cancel.setVisible(true);
if ((new Date()).getTime() - previousTime.getTime() > 500 || remainingSeconds == -1){
remainingSeconds = (int) (((new Date()).getTime() - xml_processing.startTime.getTime()) *
(1.0/(iFinal * 100 + ((ReadOnlyDoubleWrapper) observable).get() + 1)) *
((corpusFiles.size() - iFinal - 1) * 100 + 100 - ((ReadOnlyDoubleWrapper) observable).get()) / 1000);
previousTime = new Date();
}
xml_processing.isCancelled = isCancelled();
updateProgress((iFinal * 100) + ((ReadOnlyDoubleWrapper) observable).get() + 1, corpusFiles.size() * 100);
updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), iFinal + 1, corpusFiles.size(), f.getName(), remainingSeconds));
}
};
xml_processing.progressProperty().addListener(progressBarListener);
}
xml_processing.readXML(f.toString(), statistic);
if (isCancelled()) { if (isCancelled()) {
updateMessage(I18N.get("message.CANCELING_NOTIFICATION")); updateMessage(I18N.get("message.CANCELING_NOTIFICATION"));
break; break;
} }
this.updateProgress(i, corpusFiles.size());
this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size(), f.getName()));
} }
return null; return null;
@ -805,7 +850,6 @@ public class OneWordAnalysisTab {
logger.info("cancel button"); logger.info("cancel button");
}); });
cancel.setVisible(true);
final Thread thread = new Thread(task, "task"); final Thread thread = new Thread(task, "task");
thread.setDaemon(true); thread.setDaemon(true);
thread.start(); thread.start();

View File

@ -2,21 +2,20 @@ package gui;
import static alg.XML_processing.*; import static alg.XML_processing.*;
import static gui.GUIController.*; import static gui.GUIController.*;
import static gui.Messages.*;
import java.io.File; import java.io.File;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.util.*; import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import alg.XML_processing;
import javafx.application.HostServices; import javafx.application.HostServices;
import javafx.beans.InvalidationListener;
import javafx.beans.Observable;
import javafx.beans.property.ReadOnlyDoubleWrapper;
import javafx.beans.value.ChangeListener; import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue; import javafx.beans.value.ObservableValue;
import javafx.scene.layout.AnchorPane; import javafx.scene.layout.AnchorPane;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
@ -208,6 +207,7 @@ public class StringAnalysisTabNew2 {
private ListChangeListener<String> alsoVisualizeListener; private ListChangeListener<String> alsoVisualizeListener;
private ListChangeListener<String> collocabilityListener; private ListChangeListener<String> collocabilityListener;
private ChangeListener<String> calculateForListener; private ChangeListener<String> calculateForListener;
private InvalidationListener progressBarListener;
// private static final ObservableList<String> N_GRAM_COMPUTE_FOR_WORDS = FXCollections.observableArrayList("lema", "različnica", "oblikoskladenjska oznaka"); // private static final ObservableList<String> N_GRAM_COMPUTE_FOR_WORDS = FXCollections.observableArrayList("lema", "različnica", "oblikoskladenjska oznaka");
// private static final ObservableList<String> N_GRAM_COMPUTE_FOR_LETTERS = FXCollections.observableArrayList("lema", "različnica"); // private static final ObservableList<String> N_GRAM_COMPUTE_FOR_LETTERS = FXCollections.observableArrayList("lema", "različnica");
@ -306,13 +306,14 @@ public class StringAnalysisTabNew2 {
notePunctuations = newValue; notePunctuations = newValue;
logger.info("note punctuations: ", notePunctuations); logger.info("note punctuations: ", notePunctuations);
}); });
notePunctuationsChB.setSelected(false);
notePunctuationsChB.setTooltip(new Tooltip(I18N.get("message.TOOLTIP_readNotePunctuationsChB"))); notePunctuationsChB.setTooltip(new Tooltip(I18N.get("message.TOOLTIP_readNotePunctuationsChB")));
displayTaxonomy = false; displayTaxonomy = false;
displayTaxonomyChB.setSelected(false); displayTaxonomyChB.setSelected(false);
// set // set
if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getTaxonomy().size() > 0) { if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getObservableListTaxonomy().size() > 0) {
displayTaxonomyChB.setDisable(false); displayTaxonomyChB.setDisable(false);
displayTaxonomyChB.selectedProperty().addListener((observable, oldValue, newValue) -> { displayTaxonomyChB.selectedProperty().addListener((observable, oldValue, newValue) -> {
displayTaxonomy = newValue; displayTaxonomy = newValue;
@ -515,7 +516,12 @@ public class StringAnalysisTabNew2 {
alsoVisualizeCCB.getCheckModel().getCheckedItems().addListener(alsoVisualizeListener); alsoVisualizeCCB.getCheckModel().getCheckedItems().addListener(alsoVisualizeListener);
// taxonomy // taxonomy
if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getTaxonomy().size() > 0) { if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getObservableListTaxonomy().size() > 0) {
taxonomyCCB.setDisable(false);
} else {
taxonomyCCB.setDisable(true);
}
if (taxonomyListener != null){ if (taxonomyListener != null){
taxonomyCCB.getCheckModel().getCheckedItems().removeListener(taxonomyListener); taxonomyCCB.getCheckModel().getCheckedItems().removeListener(taxonomyListener);
} }
@ -527,15 +533,16 @@ public class StringAnalysisTabNew2 {
public void onChanged(ListChangeListener.Change<? extends String> c){ public void onChanged(ListChangeListener.Change<? extends String> c){
if(changing) { if(changing) {
ObservableList<String> checkedItems = taxonomyCCB.getCheckModel().getCheckedItems(); ObservableList<String> checkedItems = taxonomyCCB.getCheckModel().getCheckedItems();
ArrayList<Taxonomy> checkedItemsTaxonomy = Taxonomy.convertStringListToTaxonomyList(checkedItems); // ArrayList<Taxonomy> checkedItemsTaxonomy = Taxonomy.convertStringListToTaxonomyList(checkedItems);
//
Taxonomy.modifyingTaxonomy(taxonomy, checkedItemsTaxonomy, corpus); // Taxonomy.modifyingTaxonomy(taxonomy, checkedItemsTaxonomy, corpus);
ArrayList<Taxonomy> checkedItemsTaxonomy = Taxonomy.modifyingTaxonomy(taxonomy, checkedItems, corpus);
taxonomy = new ArrayList<>(); taxonomy = new ArrayList<>();
taxonomy.addAll(checkedItemsTaxonomy); taxonomy.addAll(checkedItemsTaxonomy);
taxonomyCCB.getItems().removeAll(); taxonomyCCB.getItems().removeAll();
taxonomyCCB.getItems().setAll(corpus.getTaxonomy()); taxonomyCCB.getItems().setAll(corpus.getObservableListTaxonomy());
// taxonomyCCB.getCheckModel().clearChecks(); // taxonomyCCB.getCheckModel().clearChecks();
changing = false; changing = false;
@ -549,14 +556,11 @@ public class StringAnalysisTabNew2 {
} }
}; };
taxonomyCCB.getCheckModel().clearChecks(); taxonomyCCB.getCheckModel().clearChecks();
taxonomyCCB.setDisable(false);
taxonomyCCB.getItems().removeAll(); taxonomyCCB.getItems().removeAll();
taxonomyCCB.getItems().setAll(corpus.getTaxonomy()); taxonomyCCB.getItems().setAll(corpus.getObservableListTaxonomy());
taxonomyCCB.getCheckModel().getCheckedItems().addListener(taxonomyListener); taxonomyCCB.getCheckModel().getCheckedItems().addListener(taxonomyListener);
} else {
taxonomyCCB.setDisable(true);
}
// skip // skip
skipValueCB.valueProperty().addListener((observable, oldValue, newValue) -> { skipValueCB.valueProperty().addListener((observable, oldValue, newValue) -> {
@ -738,7 +742,7 @@ public class StringAnalysisTabNew2 {
// if ((currentCorpusType != null && currentCorpusType != corpus.getCorpusType())) { // if ((currentCorpusType != null && currentCorpusType != corpus.getCorpusType())) {
// // user changed corpus (by type) or by selection & triggered a rescan of headers // // user changed corpus (by type) or by selection & triggered a rescan of headers
// // see if we read taxonomy from headers, otherwise use default values for given corpus // // see if we read taxonomy from headers, otherwise use default values for given corpus
// ObservableList<String> tax = corpus.getTaxonomy(); // ObservableList<String> tax = corpus.getObservableListTaxonomy();
// taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType()); // taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType());
// //
// currentCorpusType = corpus.getCorpusType(); // currentCorpusType = corpus.getCorpusType();
@ -748,7 +752,7 @@ public class StringAnalysisTabNew2 {
// } // }
// //
// // see if we read taxonomy from headers, otherwise use default values for given corpus // // see if we read taxonomy from headers, otherwise use default values for given corpus
// ObservableList<String> tax = corpus.getTaxonomy(); // ObservableList<String> tax = corpus.getObservableListTaxonomy();
// taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType()); // taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType());
// taxonomyCCB.getItems().addAll(taxonomyCCBValues); // taxonomyCCB.getItems().addAll(taxonomyCCBValues);
// //
@ -913,16 +917,78 @@ public class StringAnalysisTabNew2 {
@SuppressWarnings("Duplicates") @SuppressWarnings("Duplicates")
@Override @Override
protected Void call() throws Exception { protected Void call() throws Exception {
long i = corpusFiles.size(); if(corpusFiles.size() > 1){
for (File f : corpusFiles) { cancel.setVisible(true);
readXML(f.toString(), statisticsOneGrams);
i++;
this.updateProgress(i, corpusFiles.size() * 2);
if (statistic.getFilter().getCollocability().size() > 0) {
this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size() * 2, f.getName()));
} else {
this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size(), f.getName()));
} }
int i = corpusFiles.size();
Date startTime = new Date();
Date previousTime = new Date();
int remainingSeconds = -1;
int corpusSize;
if (statistic.getFilter().getCollocability().size() > 0) {
corpusSize = corpusFiles.size() * 2;
} else {
corpusSize = corpusFiles.size();
}
for (File f : corpusFiles) {
final int iFinal = i;
XML_processing xml_processing = new XML_processing();
i++;
if (corpusFiles.size() > 1) {
if ((new Date()).getTime() - previousTime.getTime() > 500 || remainingSeconds == -1){
remainingSeconds = (int) (((new Date()).getTime() - startTime.getTime()) * (1.0/i) * (corpusSize - i) / 1000);
previousTime = new Date();
}
this.updateProgress(i, corpusSize);
this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusSize, f.getName(), remainingSeconds));
// if (isCancelled()) {
// updateMessage(I18N.get("message.CANCELING_NOTIFICATION"));
// break;
// }
} else {
if(progressBarListener != null) {
xml_processing.progressProperty().removeListener(progressBarListener);
}
progressBarListener = new InvalidationListener() {
int remainingSeconds = -1;
Date previousTime = new Date();
@Override
public void invalidated(Observable observable) {
cancel.setVisible(true);
if ((new Date()).getTime() - previousTime.getTime() > 500 || remainingSeconds == -1){
remainingSeconds = (int) (((new Date()).getTime() - xml_processing.startTime.getTime()) *
(1.0/(iFinal * 100 + ((ReadOnlyDoubleWrapper) observable).get() + 1)) *
((corpusSize - iFinal - 1) * 100 + 100 - ((ReadOnlyDoubleWrapper) observable).get()) / 1000);
// System.out.println(((new Date()).getTime() - xml_processing.startTime.getTime()));
// System.out.println((1.0/(iFinal * 100 + ((ReadOnlyDoubleWrapper) observable).get() + 1)));
// System.out.println(((corpusSize - iFinal - 1) * 100 + 100 - ((ReadOnlyDoubleWrapper) observable).get()));
// System.out.println(remainingSeconds);
previousTime = new Date();
}
xml_processing.isCancelled = isCancelled();
updateProgress((iFinal * 100) + ((ReadOnlyDoubleWrapper) observable).get() + 1, corpusSize * 100);
updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), iFinal + 1, corpusSize, f.getName(), remainingSeconds));
}
};
xml_processing.progressProperty().addListener(progressBarListener);
}
xml_processing.isCollocability = true;
xml_processing.readXML(f.toString(), statisticsOneGrams);
xml_processing.isCollocability = false;
if (isCancelled()) {
updateMessage(I18N.get("message.CANCELING_NOTIFICATION"));
break;
}
// readXML(f.toString(), statisticsOneGrams);
// i++;
// this.updateProgress(i, corpusFiles.size() * 2);
// if (statistic.getFilter().getCollocability().size() > 0) {
// this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size() * 2, f.getName()));
// } else {
// this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size(), f.getName()));
// }
} }
return null; return null;
@ -998,8 +1064,6 @@ public class StringAnalysisTabNew2 {
task.cancel(); task.cancel();
// logger.info("cancel button"); // logger.info("cancel button");
}); });
// cancel.setVisible(true);
return task; return task;
} }
@ -1009,28 +1073,90 @@ public class StringAnalysisTabNew2 {
// Task<Void> task_collocability = null; // Task<Void> task_collocability = null;
Collection<File> corpusFiles = statistic.getCorpus().getDetectedCorpusFiles(); Collection<File> corpusFiles = statistic.getCorpus().getDetectedCorpusFiles();
boolean corpusIsSplit = corpusFiles.size() > 1;
final Task<Void> task = new Task<Void>() { final Task<Void> task = new Task<Void>() {
@SuppressWarnings("Duplicates") @SuppressWarnings("Duplicates")
@Override @Override
protected Void call() throws Exception { protected Void call() throws Exception {
long i = 0; if(corpusFiles.size() > 1){
cancel.setVisible(true);
}
int i = 0;
Date startTime = new Date();
Date previousTime = new Date();
int remainingSeconds = -1;
int corpusSize;
if (statistic.getFilter().getCollocability().size() > 0) {
corpusSize = corpusFiles.size() * 2;
} else {
corpusSize = corpusFiles.size();
}
for (File f : corpusFiles) { for (File f : corpusFiles) {
readXML(f.toString(), statistic); final int iFinal = i;
XML_processing xml_processing = new XML_processing();
i++; i++;
if (corpusFiles.size() > 1) {
if ((new Date()).getTime() - previousTime.getTime() > 500 || remainingSeconds == -1){
remainingSeconds = (int) (((new Date()).getTime() - startTime.getTime()) * (1.0/i) * (corpusSize - i) / 1000);
previousTime = new Date();
}
this.updateProgress(i, corpusSize);
this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusSize, f.getName(), remainingSeconds));
// if (isCancelled()) {
// updateMessage(I18N.get("message.CANCELING_NOTIFICATION"));
// break;
// }
} else {
if(progressBarListener != null) {
xml_processing.progressProperty().removeListener(progressBarListener);
}
progressBarListener = new InvalidationListener() {
int remainingSeconds = -1;
Date previousTime = new Date();
@Override
public void invalidated(Observable observable) {
cancel.setVisible(true);
if ((new Date()).getTime() - previousTime.getTime() > 500 || remainingSeconds == -1){
remainingSeconds = (int) (((new Date()).getTime() - xml_processing.startTime.getTime()) *
(1.0/(iFinal * 100 + ((ReadOnlyDoubleWrapper) observable).get() + 1)) *
((corpusSize - iFinal - 1) * 100 + 100 - ((ReadOnlyDoubleWrapper) observable).get()) / 1000);
// System.out.println(((new Date()).getTime() - xml_processing.startTime.getTime()));
// System.out.println((1.0/(iFinal * 100 + ((ReadOnlyDoubleWrapper) observable).get())) + 1);
// System.out.println(((corpusSize - iFinal - 1) * 100 + 100 - ((ReadOnlyDoubleWrapper) observable).get()));
// System.out.println(remainingSeconds);
previousTime = new Date();
}
xml_processing.isCancelled = isCancelled();
updateProgress((iFinal * 100) + ((ReadOnlyDoubleWrapper) observable).get() + 1, corpusSize * 100);
updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), iFinal + 1, corpusSize, f.getName(), remainingSeconds));
}
};
xml_processing.progressProperty().addListener(progressBarListener);
}
xml_processing.readXML(f.toString(), statistic);
if (isCancelled()) { if (isCancelled()) {
updateMessage(I18N.get("message.CANCELING_NOTIFICATION")); updateMessage(I18N.get("message.CANCELING_NOTIFICATION"));
break; break;
} }
if (statistic.getFilter().getCollocability().size() > 0) { if(!(corpusFiles.size() > 1)){
this.updateProgress(i, corpusFiles.size() * 2); cancel.setVisible(false);
this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size() * 2, f.getName()));
} else {
this.updateProgress(i, corpusFiles.size());
this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size(), f.getName()));
} }
// this.updateMessage(String.format(ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y, i, corpusFiles.size() * 2, f.getName())); // readXML(f.toString(), statistic);
// i++;
// if (isCancelled()) {
// updateMessage(I18N.get("message.CANCELING_NOTIFICATION"));
// break;
// }
// if (statistic.getFilter().getCollocability().size() > 0) {
// this.updateProgress(i, corpusFiles.size() * 2);
// this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size() * 2, f.getName()));
// } else {
// this.updateProgress(i, corpusFiles.size());
// this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size(), f.getName()));
// }
//// this.updateMessage(String.format(ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y, i, corpusFiles.size() * 2, f.getName()));
} }
return null; return null;
@ -1106,8 +1232,6 @@ public class StringAnalysisTabNew2 {
logger.info("cancel button"); logger.info("cancel button");
}); });
cancel.setVisible(true);
final Thread thread = new Thread(task, "task"); final Thread thread = new Thread(task, "task");
thread.setDaemon(true); thread.setDaemon(true);
thread.start(); thread.start();

View File

@ -2,14 +2,10 @@ package gui;
import static alg.XML_processing.*; import static alg.XML_processing.*;
import static gui.GUIController.*; import static gui.GUIController.*;
import static gui.Messages.*;
import java.io.File; import java.io.File;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.util.ArrayList; import java.util.*;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import javafx.application.HostServices; import javafx.application.HostServices;
import javafx.scene.control.*; import javafx.scene.control.*;
@ -73,11 +69,11 @@ public class WordFormationTab {
// taxonomy // taxonomy
if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType())) { if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType())) {
taxonomyCCB.getItems().removeAll(); taxonomyCCB.getItems().removeAll();
taxonomyCCB.getItems().setAll(corpus.getTaxonomy()); taxonomyCCB.getItems().setAll(corpus.getObservableListTaxonomy());
taxonomyCCB.getCheckModel().getCheckedItems().addListener((ListChangeListener<String>) c -> { taxonomyCCB.getCheckModel().getCheckedItems().addListener((ListChangeListener<String>) c -> {
taxonomy = new ArrayList<>(); taxonomy = new ArrayList<>();
ObservableList<String> checkedItems = taxonomyCCB.getCheckModel().getCheckedItems(); ObservableList<String> checkedItems = taxonomyCCB.getCheckModel().getCheckedItems();
ArrayList<Taxonomy> checkedItemsTaxonomy = Taxonomy.convertStringListToTaxonomyList(checkedItems); ArrayList<Taxonomy> checkedItemsTaxonomy = Taxonomy.convertStringListToTaxonomyList(checkedItems, corpus);
taxonomy.addAll(checkedItemsTaxonomy); taxonomy.addAll(checkedItemsTaxonomy);
logger.info(String.format("Selected taxonomy: %s", StringUtils.join(checkedItems, ","))); logger.info(String.format("Selected taxonomy: %s", StringUtils.join(checkedItems, ",")));
}); });
@ -175,7 +171,9 @@ public class WordFormationTab {
@SuppressWarnings("Duplicates") @SuppressWarnings("Duplicates")
@Override @Override
protected Void call() throws Exception { protected Void call() throws Exception {
long i = 0; int i = 0;
Date startTime = new Date();
Date previousTime = new Date();
for (File f : corpusFiles) { for (File f : corpusFiles) {
readXML(f.toString(), statistic); readXML(f.toString(), statistic);
i++; i++;

View File

@ -1,10 +1,13 @@
package gui; package gui;
import alg.XML_processing;
import data.*; import data.*;
import javafx.application.HostServices; import javafx.application.HostServices;
import javafx.beans.InvalidationListener;
import javafx.beans.Observable;
import javafx.beans.property.ReadOnlyDoubleWrapper;
import javafx.beans.value.ChangeListener; import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue; import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.collections.ListChangeListener; import javafx.collections.ListChangeListener;
import javafx.collections.ObservableList; import javafx.collections.ObservableList;
import javafx.concurrent.Task; import javafx.concurrent.Task;
@ -24,7 +27,6 @@ import java.util.regex.Pattern;
import static alg.XML_processing.readXML; import static alg.XML_processing.readXML;
import static gui.GUIController.showAlert; import static gui.GUIController.showAlert;
import static gui.Messages.*;
@SuppressWarnings("Duplicates") @SuppressWarnings("Duplicates")
public class WordLevelTab { public class WordLevelTab {
@ -196,6 +198,7 @@ public class WordLevelTab {
private ListChangeListener<String> taxonomyListener; private ListChangeListener<String> taxonomyListener;
private ListChangeListener<String> alsoVisualizeListener; private ListChangeListener<String> alsoVisualizeListener;
private ChangeListener<String> calculateForListener; private ChangeListener<String> calculateForListener;
private InvalidationListener progressBarListener;
// private static final ObservableList<String> N_GRAM_COMPUTE_FOR_WORDS = FXCollections.observableArrayList("lema", "različnica"); // private static final ObservableList<String> N_GRAM_COMPUTE_FOR_WORDS = FXCollections.observableArrayList("lema", "različnica");
// private static final ObservableList<String> N_GRAM_COMPUTE_FOR_LETTERS = FXCollections.observableArrayList("lema", "različnica"); // private static final ObservableList<String> N_GRAM_COMPUTE_FOR_LETTERS = FXCollections.observableArrayList("lema", "različnica");
@ -509,7 +512,12 @@ public class WordLevelTab {
alsoVisualizeCCB.getCheckModel().getCheckedItems().addListener(alsoVisualizeListener); alsoVisualizeCCB.getCheckModel().getCheckedItems().addListener(alsoVisualizeListener);
// taxonomy // taxonomy
if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getTaxonomy().size() > 0) { if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getObservableListTaxonomy().size() > 0) {
taxonomyCCB.setDisable(false);
} else {
taxonomyCCB.setDisable(true);
}
if (taxonomyListener != null){ if (taxonomyListener != null){
taxonomyCCB.getCheckModel().getCheckedItems().removeListener(taxonomyListener); taxonomyCCB.getCheckModel().getCheckedItems().removeListener(taxonomyListener);
} }
@ -521,15 +529,16 @@ public class WordLevelTab {
public void onChanged(ListChangeListener.Change<? extends String> c){ public void onChanged(ListChangeListener.Change<? extends String> c){
if(changing) { if(changing) {
ObservableList<String> checkedItems = taxonomyCCB.getCheckModel().getCheckedItems(); ObservableList<String> checkedItems = taxonomyCCB.getCheckModel().getCheckedItems();
ArrayList<Taxonomy> checkedItemsTaxonomy = Taxonomy.convertStringListToTaxonomyList(checkedItems); // ArrayList<Taxonomy> checkedItemsTaxonomy = Taxonomy.convertStringListToTaxonomyList(checkedItems);
Taxonomy.modifyingTaxonomy(taxonomy, checkedItemsTaxonomy, corpus); ArrayList<Taxonomy> checkedItemsTaxonomy = Taxonomy.modifyingTaxonomy(taxonomy, checkedItems, corpus);
// Taxonomy.modifyingTaxonomy(taxonomy, checkedItemsTaxonomy, corpus);
taxonomy = new ArrayList<>(); taxonomy = new ArrayList<>();
taxonomy.addAll(checkedItemsTaxonomy); taxonomy.addAll(checkedItemsTaxonomy);
taxonomyCCB.getItems().removeAll(); taxonomyCCB.getItems().removeAll();
taxonomyCCB.getItems().setAll(corpus.getTaxonomy()); taxonomyCCB.getItems().setAll(corpus.getObservableListTaxonomy());
// taxonomyCCB.getCheckModel().clearChecks(); // taxonomyCCB.getCheckModel().clearChecks();
changing = false; changing = false;
@ -544,19 +553,16 @@ public class WordLevelTab {
}; };
taxonomyCCB.getCheckModel().clearChecks(); taxonomyCCB.getCheckModel().clearChecks();
taxonomyCCB.setDisable(false);
taxonomyCCB.getItems().removeAll(); taxonomyCCB.getItems().removeAll();
taxonomyCCB.getItems().setAll(corpus.getTaxonomy()); taxonomyCCB.getItems().setAll(corpus.getObservableListTaxonomy());
taxonomyCCB.getCheckModel().getCheckedItems().addListener(taxonomyListener); taxonomyCCB.getCheckModel().getCheckedItems().addListener(taxonomyListener);
} else {
taxonomyCCB.setDisable(true);
}
displayTaxonomy = false; displayTaxonomy = false;
displayTaxonomyChB.setSelected(false); displayTaxonomyChB.setSelected(false);
// set // set
if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getTaxonomy().size() > 0) { if (Tax.getCorpusTypesWithTaxonomy().contains(corpus.getCorpusType()) && corpus.getObservableListTaxonomy().size() > 0) {
displayTaxonomyChB.setDisable(false); displayTaxonomyChB.setDisable(false);
displayTaxonomyChB.selectedProperty().addListener((observable, oldValue, newValue) -> { displayTaxonomyChB.selectedProperty().addListener((observable, oldValue, newValue) -> {
displayTaxonomy = newValue; displayTaxonomy = newValue;
@ -714,7 +720,7 @@ public class WordLevelTab {
// if ((currentCorpusType != null && currentCorpusType != corpus.getCorpusType())) { // if ((currentCorpusType != null && currentCorpusType != corpus.getCorpusType())) {
// // user changed corpus (by type) or by selection & triggered a rescan of headers // // user changed corpus (by type) or by selection & triggered a rescan of headers
// // see if we read taxonomy from headers, otherwise use default values for given corpus // // see if we read taxonomy from headers, otherwise use default values for given corpus
// ObservableList<String> tax = corpus.getTaxonomy(); // ObservableList<String> tax = corpus.getObservableListTaxonomy();
// taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType()); // taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType());
// //
// currentCorpusType = corpus.getCorpusType(); // currentCorpusType = corpus.getCorpusType();
@ -724,7 +730,7 @@ public class WordLevelTab {
// } // }
// //
// // see if we read taxonomy from headers, otherwise use default values for given corpus // // see if we read taxonomy from headers, otherwise use default values for given corpus
// ObservableList<String> tax = corpus.getTaxonomy(); // ObservableList<String> tax = corpus.getObservableListTaxonomy();
// taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType()); // taxonomyCCBValues = tax != null ? tax : Taxonomy.getDefaultForComboBox(corpus.getCorpusType());
// taxonomyCCB.getItems().addAll(taxonomyCCBValues); // taxonomyCCB.getItems().addAll(taxonomyCCBValues);
// //
@ -879,22 +885,63 @@ public class WordLevelTab {
logger.info("Started execution: ", statistic.getFilter()); logger.info("Started execution: ", statistic.getFilter());
Collection<File> corpusFiles = statistic.getCorpus().getDetectedCorpusFiles(); Collection<File> corpusFiles = statistic.getCorpus().getDetectedCorpusFiles();
boolean corpusIsSplit = corpusFiles.size() > 1;
final Task<Void> task = new Task<Void>() { final Task<Void> task = new Task<Void>() {
@SuppressWarnings("Duplicates") @SuppressWarnings("Duplicates")
@Override @Override
protected Void call() throws Exception { protected Void call() throws Exception {
long i = 0; if(corpusFiles.size() > 1){
cancel.setVisible(true);
}
int i = 0;
Date startTime = new Date();
Date previousTime = new Date();
int remainingSeconds = -1;
for (File f : corpusFiles) { for (File f : corpusFiles) {
readXML(f.toString(), statistic); final int iFinal = i;
XML_processing xml_processing = new XML_processing();
i++; i++;
if (isCancelled()) { if (isCancelled()) {
updateMessage(I18N.get("message.CANCELING_NOTIFICATION")); updateMessage(I18N.get("message.CANCELING_NOTIFICATION"));
break; break;
} }
if (corpusFiles.size() > 1) {
if ((new Date()).getTime() - previousTime.getTime() > 500 || remainingSeconds == -1){
remainingSeconds = (int) (((new Date()).getTime() - startTime.getTime()) * (1.0/i) * (corpusFiles.size() - i) / 1000);
previousTime = new Date();
}
this.updateProgress(i, corpusFiles.size()); this.updateProgress(i, corpusFiles.size());
this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size(), f.getName())); this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size(), f.getName(), remainingSeconds));
} else {
if(progressBarListener != null) {
xml_processing.progressProperty().removeListener(progressBarListener);
}
progressBarListener = new InvalidationListener() {
int remainingSeconds = -1;
Date previousTime = new Date();
@Override
public void invalidated(Observable observable) {
cancel.setVisible(true);
if ((new Date()).getTime() - previousTime.getTime() > 500 || remainingSeconds == -1){
remainingSeconds = (int) (((new Date()).getTime() - xml_processing.startTime.getTime()) *
(1.0/(iFinal * 100 + ((ReadOnlyDoubleWrapper) observable).get() + 1)) *
((corpusFiles.size() - iFinal - 1) * 100 + 100 - ((ReadOnlyDoubleWrapper) observable).get()) / 1000);
previousTime = new Date();
}
xml_processing.isCancelled = isCancelled();
updateProgress((iFinal * 100) + ((ReadOnlyDoubleWrapper) observable).get() + 1, corpusFiles.size() * 100);
updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), iFinal + 1, corpusFiles.size(), f.getName(), remainingSeconds));
}
};
xml_processing.progressProperty().addListener(progressBarListener);
}
xml_processing.readXML(f.toString(), statistic);
if (isCancelled()) {
updateMessage(I18N.get("message.CANCELING_NOTIFICATION"));
break;
}
} }
return null; return null;
@ -951,7 +998,6 @@ public class WordLevelTab {
logger.info("cancel button"); logger.info("cancel button");
}); });
cancel.setVisible(true);
final Thread thread = new Thread(task, "task"); final Thread thread = new Thread(task, "task");
thread.setDaemon(true); thread.setDaemon(true);
thread.start(); thread.start();

View File

@ -111,8 +111,8 @@ public class Export {
} }
} }
headerInfoBlock.put(filter.getCalculateFor().totalSumString(filter.getNgramValue()), String.valueOf(num_taxonomy_frequencies.get(Taxonomy.TOTAL).longValue())); headerInfoBlock.put(filter.getCalculateFor().totalSumString(filter.getNgramValue()), String.valueOf(num_taxonomy_frequencies.get(statistics.getCorpus().getTotal()).longValue()));
headerInfoBlock.put(filter.getCalculateFor().foundSumString(filter.getNgramValue()), String.valueOf(num_selected_taxonomy_frequencies.get(Taxonomy.TOTAL).longValue())); headerInfoBlock.put(filter.getCalculateFor().foundSumString(filter.getNgramValue()), String.valueOf(num_selected_taxonomy_frequencies.get(statistics.getCorpus().getTotal()).longValue()));
// headerInfoBlock.put(filter.getCalculateFor().toMetadataString(), String.valueOf(num_frequencies)); // headerInfoBlock.put(filter.getCalculateFor().toMetadataString(), String.valueOf(num_frequencies));
for (CalculateFor otherKey : filter.getMultipleKeys()) { for (CalculateFor otherKey : filter.getMultipleKeys()) {
@ -134,7 +134,7 @@ public class Export {
} }
for (Taxonomy key : taxonomyResults.keySet()) { for (Taxonomy key : taxonomyResults.keySet()) {
if(!key.equals(Taxonomy.TOTAL) && num_taxonomy_frequencies.containsKey(key) && num_taxonomy_frequencies.get(key).longValue() > 0) { if(!key.equals(statistics.getCorpus().getTotal()) && num_taxonomy_frequencies.containsKey(key) && num_taxonomy_frequencies.get(key).longValue() > 0) {
FILE_HEADER_AL.add(I18N.get("exportTable.absoluteFrequency") + " [" + key.toString() + "]"); FILE_HEADER_AL.add(I18N.get("exportTable.absoluteFrequency") + " [" + key.toString() + "]");
FILE_HEADER_AL.add(I18N.get("exportTable.percentage") + " [" + key.toString() + "]"); FILE_HEADER_AL.add(I18N.get("exportTable.percentage") + " [" + key.toString() + "]");
FILE_HEADER_AL.add(I18N.get("exportTable.relativeFrequency") + " [" + key.toString() + "]"); FILE_HEADER_AL.add(I18N.get("exportTable.relativeFrequency") + " [" + key.toString() + "]");
@ -280,10 +280,10 @@ public class Export {
dataEntry.add(e.getValue().toString()); dataEntry.add(e.getValue().toString());
dataEntry.add(formatNumberAsPercent((double) e.getValue() / num_selected_taxonomy_frequencies.get(Taxonomy.TOTAL))); dataEntry.add(formatNumberAsPercent((double) e.getValue() / num_selected_taxonomy_frequencies.get(statistics.getCorpus().getTotal())));
dataEntry.add(String.format("%.2f", ((double) e.getValue() * 1000000)/num_taxonomy_frequencies.get(Taxonomy.TOTAL).longValue())); dataEntry.add(String.format("%.2f", ((double) e.getValue() * 1000000)/num_taxonomy_frequencies.get(statistics.getCorpus().getTotal()).longValue()));
for (Taxonomy key : taxonomyResults.keySet()){ for (Taxonomy key : taxonomyResults.keySet()){
if(!key.equals(Taxonomy.TOTAL) && num_taxonomy_frequencies.containsKey(key) && num_taxonomy_frequencies.get(key).longValue() > 0) { if(!key.equals(statistics.getCorpus().getTotal()) && num_taxonomy_frequencies.containsKey(key) && num_taxonomy_frequencies.get(key).longValue() > 0) {
AtomicLong frequency = taxonomyResults.get(key).get(e.getKey()); AtomicLong frequency = taxonomyResults.get(key).get(e.getKey());
dataEntry.add(frequency.toString()); dataEntry.add(frequency.toString());
dataEntry.add(formatNumberAsPercent((double) frequency.get() / num_selected_taxonomy_frequencies.get(key))); dataEntry.add(formatNumberAsPercent((double) frequency.get() / num_selected_taxonomy_frequencies.get(key)));

View File

@ -118,6 +118,7 @@ message.WARNING_NO_SOLAR_FILTERS_FOUND=We weren't able to read filters from corp
message.ERROR_WHILE_EXECUTING=Error in program execution. message.ERROR_WHILE_EXECUTING=Error in program execution.
message.ERROR_WHILE_SAVING_RESULTS_TO_CSV=Error while saving results. message.ERROR_WHILE_SAVING_RESULTS_TO_CSV=Error while saving results.
message.ERROR_NOT_ENOUGH_MEMORY=You do not have sufficient RAM for analyzing such amount of data. You can try changing filters. message.ERROR_NOT_ENOUGH_MEMORY=You do not have sufficient RAM for analyzing such amount of data. You can try changing filters.
message.ERROR_NO_REGI_FILE_FOUND=Missing file \"%s\".
message.MISSING_NGRAM_LEVEL=N-gram level message.MISSING_NGRAM_LEVEL=N-gram level
message.MISSING_CALCULATE_FOR=Calculate for message.MISSING_CALCULATE_FOR=Calculate for
@ -132,7 +133,7 @@ message.NOTIFICATION_ANALYSIS_COMPLETED_NO_RESULTS=Analysis completed, however n
message.RESULTS_PATH_SET_TO_DEFAULT=Save location is set on corpus location. message.RESULTS_PATH_SET_TO_DEFAULT=Save location is set on corpus location.
message.NOTIFICATION_ANALYSIS_CANCELED=Analysis was cancled. message.NOTIFICATION_ANALYSIS_CANCELED=Analysis was cancled.
message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y=Analyzing file %d of %d (%s) message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y=Analyzing file %d of %d (%s) - Estimated time remaining %d s
message.CANCELING_NOTIFICATION=Canceled message.CANCELING_NOTIFICATION=Canceled
message.LABEL_CORPUS_LOCATION_NOT_SET=Corpus location is not set message.LABEL_CORPUS_LOCATION_NOT_SET=Corpus location is not set

View File

@ -118,6 +118,7 @@ message.WARNING_NO_SOLAR_FILTERS_FOUND=Iz korpusnih datotek ni bilo moč razbrat
message.ERROR_WHILE_EXECUTING=Prišlo je do napake med izvajanjem. message.ERROR_WHILE_EXECUTING=Prišlo je do napake med izvajanjem.
message.ERROR_WHILE_SAVING_RESULTS_TO_CSV=Prišlo je do napake med shranjevanje rezultatov. message.ERROR_WHILE_SAVING_RESULTS_TO_CSV=Prišlo je do napake med shranjevanje rezultatov.
message.ERROR_NOT_ENOUGH_MEMORY=Na voljo imate premalo pomnilnika (RAM-a) za analizo takšne količine podatkov. message.ERROR_NOT_ENOUGH_MEMORY=Na voljo imate premalo pomnilnika (RAM-a) za analizo takšne količine podatkov.
message.ERROR_NO_REGI_FILE_FOUND=Manjka datoteka \"%s\".
message.MISSING_NGRAM_LEVEL=N-gram nivo message.MISSING_NGRAM_LEVEL=N-gram nivo
message.MISSING_CALCULATE_FOR=Izračunaj za message.MISSING_CALCULATE_FOR=Izračunaj za
@ -132,7 +133,7 @@ message.NOTIFICATION_ANALYSIS_COMPLETED_NO_RESULTS=Analiza je zaključena, venda
message.RESULTS_PATH_SET_TO_DEFAULT=Lokacija za shranjevanje rezultatov je nastavljena na lokacijo korpusa. message.RESULTS_PATH_SET_TO_DEFAULT=Lokacija za shranjevanje rezultatov je nastavljena na lokacijo korpusa.
message.NOTIFICATION_ANALYSIS_CANCELED=Analiziranje je bilo prekinjeno. message.NOTIFICATION_ANALYSIS_CANCELED=Analiziranje je bilo prekinjeno.
message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y=Analiziram datoteko %d od %d (%s) message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y=Analiziram datoteko %d od %d (%s) - Preostali čas %d s
message.CANCELING_NOTIFICATION=Prekinjeno message.CANCELING_NOTIFICATION=Prekinjeno
message.LABEL_CORPUS_LOCATION_NOT_SET=Lokacija korpusa ni nastavljena message.LABEL_CORPUS_LOCATION_NOT_SET=Lokacija korpusa ni nastavljena