BIG REFACTOR - erasing unused code

This commit is contained in:
2019-03-19 15:09:46 +01:00
parent 10666b4453
commit 2c028cd334
50 changed files with 13 additions and 4317 deletions

View File

@@ -1,46 +0,0 @@
package util;
import java.util.Arrays;
import java.util.HashSet;
import java.util.stream.IntStream;
public class Combinations {
private static HashSet<HashSet<Integer>> result = new HashSet<>();
/* arr[] ---> Input Array
data[] ---> Temporary array to store current combination
start & end ---> Staring and Ending indexes in arr[]
index ---> Current index in data[]
r ---> Size of a combination to be printed */
static void combinationUtil(int arr[], Integer data[], int start, int end, int index, int combinationLength) {
// Current combination is ready to be printed, print it
if (index == combinationLength) {
result.add(new HashSet<>(Arrays.asList(data)));
return;
}
// replace index with all possible elements. The condition
// "end-i+1 >= r-index" makes sure that including one element
// at index will make a combination with remaining elements
// at remaining positions
for (int i = start; i <= end && end - i + 1 >= combinationLength - index; i++) {
data[index] = arr[i];
combinationUtil(arr, data, i + 1, end, index + 1, combinationLength);
}
}
public static HashSet<HashSet<Integer>> generateIndices(int maxNOfIndices) {
result = new HashSet<>();
int[] arr = IntStream.range(1, maxNOfIndices).toArray();
for (int i = 1; i < maxNOfIndices - 1; i++) {
// A temporary array to store all combination one by one
combinationUtil(arr, new Integer[i], 0, arr.length - 1, 0, i);
}
// also add an empty one for X.... (all of this type)
result.add(new HashSet<>());
return result;
}
}

View File

@@ -6,7 +6,6 @@ import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;
import data.*;
@@ -16,49 +15,11 @@ import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.csv.QuoteMode;
import org.apache.commons.lang3.tuple.Pair;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import data.Enums.WordLevelType;
@SuppressWarnings("unchecked")
public class Export {
// public static void SetToJSON(Set<Pair<String, Map<MultipleHMKeys, Long>>> set) {
// JSONArray wrapper = new JSONArray();
//
// for (Pair<String, Map<MultipleHMKeys, Long>> p : set) {
// JSONArray data_wrapper = new JSONArray();
// JSONObject metric = new JSONObject();
//
// String title = p.getLeft();
// Map<MultipleHMKeys, Long> map = p.getRight();
//
// if (map.isEmpty())
// continue;
//
// long total = Util.mapSumFrequencies(map);
//
// for (Map.Entry<MultipleHMKeys, Long> e : map.entrySet()) {
// JSONObject data_entry = new JSONObject();
// data_entry.put("word", e.getKey());
// data_entry.put("frequency", e.getValue());
// data_entry.put("percent", formatNumberAsPercent((double) e.getValue() / total));
//
// data_wrapper.add(data_entry);
// }
//
// metric.put("Title", title);
// metric.put("data", data_wrapper);
// wrapper.add(metric);
// }
//
// try (FileWriter file = new FileWriter("statistics.json")) {
// file.write(wrapper.toJSONString());
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
public static String SetToCSV(Set<Pair<String, Map<MultipleHMKeys, Long>>> set, File resultsPath, LinkedHashMap<String, String> headerInfoBlock,
StatisticsNew statistics, Filter filter) {
Map<Taxonomy, Map<MultipleHMKeys, AtomicLong>> taxonomyResults = statistics.getTaxonomyResult();
@@ -68,15 +29,6 @@ public class Export {
List<Object> FILE_HEADER_AL = new ArrayList<>();
Object[] FILE_HEADER;
//Count frequencies
// long num_frequencies = 0;
// for (Pair<String, Map<MultipleHMKeys, Long>> p : set) {
// Map<MultipleHMKeys, Long> map = p.getRight();
// if (map.isEmpty())
// continue;
// num_frequencies = Util.mapSumFrequencies(map);
// }
Map<Taxonomy, Long> num_selected_taxonomy_frequencies = new ConcurrentHashMap<>();
for (Taxonomy taxonomyKey : taxonomyResults.keySet()) {
num_selected_taxonomy_frequencies.put(taxonomyKey, (long) 0);
@@ -113,7 +65,6 @@ public class Export {
headerInfoBlock.put(filter.getCalculateFor().totalSumString(filter.getNgramValue()), String.valueOf(num_taxonomy_frequencies.get(statistics.getCorpus().getTotal()).longValue()));
headerInfoBlock.put(filter.getCalculateFor().foundSumString(filter.getNgramValue()), String.valueOf(num_selected_taxonomy_frequencies.get(statistics.getCorpus().getTotal()).longValue()));
// headerInfoBlock.put(filter.getCalculateFor().toMetadataString(), String.valueOf(num_frequencies));
for (CalculateFor otherKey : filter.getMultipleKeys()) {
FILE_HEADER_AL.add(otherKey.toHeaderString(filter.getNgramValue()));
@@ -163,10 +114,7 @@ public class Export {
for (Pair<String, Map<MultipleHMKeys, Long>> p : set) {
String title = p.getLeft();
// statistics.setTimeEnding();
title = statistics.generateResultTitle();
// statistics.
fileName = title.replace(": ", "-");
fileName = fileName.replace(" ", "_").concat(".csv");
@@ -178,8 +126,6 @@ public class Export {
if (map.isEmpty())
continue;
// long total = Util.mapSumFrequencies(map);
OutputStreamWriter fileWriter = null;
CSVPrinter csvFilePrinter = null;
@@ -289,10 +235,7 @@ public class Export {
dataEntry.add(frequency.toString());
dataEntry.add(formatNumberAsPercent((double) frequency.get() / num_selected_taxonomy_frequencies.get(key), statistics.getCorpus().getPunctuation()));
dataEntry.add(formatNumberForExport(((double) frequency.get() * 1000000) / num_taxonomy_frequencies.get(key).longValue(), statistics.getCorpus().getPunctuation()));
// dataEntry.add(formatNumberAsPercent((double) frequency.get() / statistics.getUniGramOccurrences()));
// dataEntry.add(String.format("%.2f", ((double) frequency.get() * 1000000) / statistics.getUniGramOccurrences()));
}
}
if (filter.getCollocability().size() > 0){
@@ -303,39 +246,6 @@ public class Export {
// Write msd separated per letters at the end of each line in csv
if (filter.getWriteMsdAtTheEnd()) {
// String msd = "";
//
// if (filter.getCalculateFor().equals(CalculateFor.MORPHOSYNTACTIC_SPECS)){
// msd = e.getKey().getK1();
// } else if (filter.getMultipleKeys().contains(CalculateFor.MORPHOSYNTACTIC_SPECS)) {
// i = 0;
// for (CalculateFor otherKey : filter.getMultipleKeys()){
// switch(i){
// case 0:
// if (otherKey.equals(CalculateFor.MORPHOSYNTACTIC_SPECS)){
// msd = e.getKey().getK2();
// }
// break;
// case 1:
// if (otherKey.equals(CalculateFor.MORPHOSYNTACTIC_SPECS)){
// msd = e.getKey().getK3();
// }
// break;
// case 2:
// if (otherKey.equals(CalculateFor.MORPHOSYNTACTIC_SPECS)){
// msd = e.getKey().getK4();
// }
// break;
// case 3:
// if (otherKey.equals(CalculateFor.MORPHOSYNTACTIC_SPECS)){
// msd = e.getKey().getK5();
// }
// break;
// }
//
// i++;
// }
// }
String msd = e.getKey().getMsd(filter);
String [] charArray = msd.split("(?!^)");
dataEntry.addAll(Arrays.asList(charArray));
@@ -372,67 +282,6 @@ public class Export {
return s;
}
// public static String SetToCSV(String title, Object[][] result, File resultsPath, LinkedHashMap<String, String> headerInfoBlock) {
// //Delimiter used in CSV file
// String NEW_LINE_SEPARATOR = "\n";
//
// //CSV file header
// Object[] FILE_HEADER = {"word", "frequency", "percent"};
//
// String fileName = "";
//
// fileName = title.replace(": ", "-");
// fileName = fileName.replace(" ", "_").concat(".csv");
//
// fileName = resultsPath.toString().concat(File.separator).concat(fileName);
//
// OutputStreamWriter fileWriter = null;
// CSVPrinter csvFilePrinter = null;
//
// //Create the CSVFormat object with "\n" as a record delimiter
// CSVFormat csvFileFormat = CSVFormat.DEFAULT.withRecordSeparator(NEW_LINE_SEPARATOR).withDelimiter(';');
//
// try {
// //initialize FileWriter object
// fileWriter = new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8);
//
// //initialize CSVPrinter object
// csvFilePrinter = new CSVPrinter(fileWriter, csvFileFormat);
//
// // write info block
// printHeaderInfo(csvFilePrinter, headerInfoBlock);
//
// //Create CSV file header
// csvFilePrinter.printRecord(FILE_HEADER);
//
// for (Object[] resultEntry : result) {
// List dataEntry = new ArrayList<>();
// dataEntry.add(resultEntry[0]);
// dataEntry.add(resultEntry[1]);
// dataEntry.add(formatNumberAsPercent(resultEntry[2]), statistics.getCorpus().getPunctuation());
// csvFilePrinter.printRecord(dataEntry);
// }
// } catch (Exception e) {
// System.out.println("Error in CsvFileWriter!");
// e.printStackTrace();
// } finally {
// try {
// if (fileWriter != null) {
// fileWriter.flush();
// fileWriter.close();
// }
// if (csvFilePrinter != null) {
// csvFilePrinter.close();
// }
// } catch (IOException e) {
// System.out.println("Error while flushing/closing fileWriter/csvPrinter!");
// e.printStackTrace();
// }
// }
//
// return fileName;
// }
public static String nestedMapToCSV(String title, Map<WordLevelType, Map<String, Map<String, Long>>> result, File resultsPath, LinkedHashMap<String, String> headerInfoBlock) {
//Delimiter used in CSV file
String NEW_LINE_SEPARATOR = "\n";

View File

@@ -1,31 +0,0 @@
package util;
public class Key /*implements Comparable<Key> */ {
// private final String value;
//
// Key(String value) {
// this.value = value;
// }
//
// @Override
// public int compareTo(Key o) {
// return Objects.compare(this.value, o.value);
// }
//
// @Override
// public boolean equals(Object o) {
// if (this.equals(o)) {
// return true;
// }
// if (o == null || getClass() != o.getClass()) {
// return false;
// }
// Key key = (Key) o;
// return Objects.equals(value, key.value);
// }
//
// @Override
// public int hashCode() {
// return 0;
// }
}

View File

@@ -57,9 +57,6 @@ public class Tasks {
f2.setIsMinimalRelFreScraper(true);
StatisticsNew statisticsMinRelFre = new StatisticsNew(corpus, f2, useDb);
// StatisticsNew statisticsMinRelFre = new StatisticsNew(corpus, f, useDb);
Collection<File> corpusFiles = statisticsMinRelFre.getCorpus().getDetectedCorpusFiles();
final javafx.concurrent.Task<Void> task = new javafx.concurrent.Task<Void>() {
@@ -97,10 +94,6 @@ public class Tasks {
}
this.updateProgress(i, corpusSize);
this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusSize, f.getName(), remainingSeconds));
// if (isCancelled()) {
// updateMessage(I18N.get("message.CANCELING_NOTIFICATION"));
// break;
// }
} else {
xml_processing.progressBarListener = new InvalidationListener() {
int remainingSeconds = -1;
@@ -112,10 +105,6 @@ public class Tasks {
remainingSeconds = (int) (((new Date()).getTime() - xml_processing.startTime.getTime()) *
(1.0/(iFinal * 100 + ((ReadOnlyDoubleWrapper) observable).get() + 1)) *
((corpusSize - iFinal - 1) * 100 + 100 - ((ReadOnlyDoubleWrapper) observable).get()) / 1000);
// System.out.println(((new Date()).getTime() - xml_processing.startTime.getTime()));
// System.out.println((1.0/(iFinal * 100 + ((ReadOnlyDoubleWrapper) observable).get())) + 1);
// System.out.println(((corpusSize - iFinal - 1) * 100 + 100 - ((ReadOnlyDoubleWrapper) observable).get()));
// System.out.println(remainingSeconds);
previousTime = new Date();
}
xml_processing.isCancelled = isCancelled();
@@ -138,7 +127,6 @@ public class Tasks {
// add remaining minRelFre results
if(statisticsMinRelFre.getFilter().getIsMinimalRelFreScraper()) {
// long countFor1MWords = stats.getCountWordsForMinimalRelFreNgrams() +
long countFor1MWords = statisticsMinRelFre.getUniGramOccurrences().get(statisticsMinRelFre.getCorpus().getTotal()).longValue();
double absToRelFactor = (statisticsMinRelFre.getFilter().getMinimalRelFre() / 1000000.0) * countFor1MWords;
@@ -151,8 +139,6 @@ public class Tasks {
for(Taxonomy taxonomy : statisticsMinRelFre.getUniGramOccurrences().keySet()){
statisticsMinRelFre.getUniGramOccurrences().put(taxonomy, new AtomicLong(0));
}
// System.out.println("asd");
}
return null;
@@ -174,7 +160,6 @@ public class Tasks {
logger.error("Error while executing", e);
ngramProgressBar.progressProperty().unbind();
ngramProgressBar.setProgress(0.0);
// ngramProgressBar.setStyle(Settings.FX_ACCENT_NOK);
progressLabel.textProperty().unbind();
progressLabel.setText("");
cancel.setVisible(false);
@@ -184,7 +169,6 @@ public class Tasks {
showAlert(Alert.AlertType.INFORMATION, I18N.get("message.NOTIFICATION_ANALYSIS_CANCELED"));
ngramProgressBar.progressProperty().unbind();
ngramProgressBar.setProgress(0.0);
// ngramProgressBar.setStyle(Settings.FX_ACCENT_OK);
progressLabel.textProperty().unbind();
progressLabel.setText("");
cancel.setVisible(false);
@@ -215,19 +199,6 @@ public class Tasks {
if(multipleFiles){
cancel.setVisible(true);
}
// int i = corpusFiles.size();
// Date startTime = new Date();
// Date previousTime = new Date();
// int remainingSeconds = -1;
// int corpusSize;
// if (statistic.getFilter().getCollocability().size() > 0) {
// corpusSize = corpusFiles.size() * 2;
// } else {
// corpusSize = corpusFiles.size();
// }
Date startTime = new Date();
Date previousTime = new Date();
int remainingSeconds = -1;
@@ -264,13 +235,6 @@ public class Tasks {
this.updateProgress(i, corpusSize);
this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusSize, f.getName(), remainingSeconds));
// if ((new Date()).getTime() - previousTime.getTime() > 500 || remainingSeconds == -1){
// remainingSeconds = (int) (((new Date()).getTime() - startTime.getTime()) * (1.0/i) * (corpusSize - i) / 1000);
// previousTime = new Date();
// }
// this.updateProgress(i, corpusSize);
// this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusSize, f.getName(), remainingSeconds));
} else {
xml_processing.progressBarListener = new InvalidationListener() {
int remainingSeconds = -1;
@@ -282,10 +246,6 @@ public class Tasks {
remainingSeconds = (int) (((new Date()).getTime() - xml_processing.startTime.getTime()) *
(1.0/(iFinal * 100 + ((ReadOnlyDoubleWrapper) observable).get() + 1)) *
((corpusSize - iFinal - 1) * 100 + 100 - ((ReadOnlyDoubleWrapper) observable).get()) / 1000);
// System.out.println(((new Date()).getTime() - xml_processing.startTime.getTime()));
// System.out.println((1.0/(iFinal * 100 + ((ReadOnlyDoubleWrapper) observable).get())) + 1);
// System.out.println(((corpusSize - iFinal - 1) * 100 + 100 - ((ReadOnlyDoubleWrapper) observable).get()));
// System.out.println(remainingSeconds);
previousTime = new Date();
}
xml_processing.isCancelled = isCancelled();
@@ -304,24 +264,9 @@ public class Tasks {
if(!(multipleFiles)){
cancel.setVisible(false);
}
// readXML(f.toString(), statistic);
// i++;
// if (isCancelled()) {
// updateMessage(I18N.get("message.CANCELING_NOTIFICATION"));
// break;
// }
// if (statistic.getFilter().getCollocability().size() > 0) {
// this.updateProgress(i, corpusFiles.size() * 2);
// this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size() * 2, f.getName()));
// } else {
// this.updateProgress(i, corpusFiles.size());
// this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size(), f.getName()));
// }
//// this.updateMessage(String.format(ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y, i, corpusFiles.size() * 2, f.getName()));
}
// if getMinimalRelFre > 1 erase all words that have lower occurrences at the end of processing
if (statistic.getFilter().getMinimalRelFre() > 1){
// long countFor1MWords = stats.getCountWordsForMinimalRelFreNgrams() +
long countFor1MWords = statistic.getUniGramOccurrences().get(statistic.getCorpus().getTotal()).longValue();
double absToRelFactor = (statistic.getFilter().getMinimalRelFre() / 1000000.0) * countFor1MWords;
@@ -356,7 +301,6 @@ public class Tasks {
} else {
try {
// System.out.print(statistics);
boolean successullySaved = statistic.saveResultToDisk();
if (successullySaved) {
showAlert(Alert.AlertType.INFORMATION, I18N.get("message.NOTIFICATION_ANALYSIS_COMPLETED"));
@@ -371,7 +315,6 @@ public class Tasks {
logger.error("Out of memory error", e1);
}
ngramProgressBar.progressProperty().unbind();
// ngramProgressBar.setStyle(Settings.FX_ACCENT_OK);
progressLabel.textProperty().unbind();
progressLabel.setText("");
cancel.setVisible(false);
@@ -385,7 +328,6 @@ public class Tasks {
logger.error("Error while executing", e);
ngramProgressBar.progressProperty().unbind();
ngramProgressBar.setProgress(0.0);
// ngramProgressBar.setStyle(Settings.FX_ACCENT_NOK);
progressLabel.textProperty().unbind();
progressLabel.setText("");
cancel.setVisible(false);
@@ -395,7 +337,6 @@ public class Tasks {
showAlert(Alert.AlertType.INFORMATION, I18N.get("message.NOTIFICATION_ANALYSIS_CANCELED"));
ngramProgressBar.progressProperty().unbind();
ngramProgressBar.setProgress(0.0);
// ngramProgressBar.setStyle(Settings.FX_ACCENT_OK);
progressLabel.textProperty().unbind();
progressLabel.setText("");
cancel.setVisible(false);
@@ -421,17 +362,9 @@ public class Tasks {
if(multipleFiles){
cancel.setVisible(true);
}
// int i = corpusFiles.size();
Date startTime = new Date();
Date previousTime = new Date();
int remainingSeconds = -1;
// int corpusSize;
// if (statistic.getFilter().getCollocability().size() > 0) {
// corpusSize = corpusFiles.size() * 2;
// } else {
// corpusSize = corpusFiles.size();
// }
int corpusSize;
int i;
@@ -461,10 +394,6 @@ public class Tasks {
}
this.updateProgress(i, corpusSize);
this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusSize, f.getName(), remainingSeconds));
// if (isCancelled()) {
// updateMessage(I18N.get("message.CANCELING_NOTIFICATION"));
// break;
// }
} else {
xml_processing.progressBarListener = new InvalidationListener() {
int remainingSeconds = -1;
@@ -476,10 +405,6 @@ public class Tasks {
remainingSeconds = (int) (((new Date()).getTime() - xml_processing.startTime.getTime()) *
(1.0/(iFinal * 100 + ((ReadOnlyDoubleWrapper) observable).get() + 1)) *
((corpusSize - iFinal - 1) * 100 + 100 - ((ReadOnlyDoubleWrapper) observable).get()) / 1000);
// System.out.println(((new Date()).getTime() - xml_processing.startTime.getTime()));
// System.out.println((1.0/(iFinal * 100 + ((ReadOnlyDoubleWrapper) observable).get() + 1)));
// System.out.println(((corpusSize - iFinal - 1) * 100 + 100 - ((ReadOnlyDoubleWrapper) observable).get()));
// System.out.println(remainingSeconds);
previousTime = new Date();
}
xml_processing.isCancelled = isCancelled();
@@ -497,14 +422,6 @@ public class Tasks {
updateMessage(I18N.get("message.CANCELING_NOTIFICATION"));
break;
}
// readXML(f.toString(), statisticsOneGrams);
// i++;
// this.updateProgress(i, corpusFiles.size() * 2);
// if (statistic.getFilter().getCollocability().size() > 0) {
// this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size() * 2, f.getName()));
// } else {
// this.updateMessage(String.format(I18N.get("message.ONGOING_NOTIFICATION_ANALYZING_FILE_X_OF_Y"), i, corpusFiles.size(), f.getName()));
// }
}
return null;
@@ -517,7 +434,6 @@ public class Tasks {
task.setOnSucceeded(e -> {
try {
System.out.print(statistic);
// calculate_collocabilities(statistic, statisticsOneGrams);
statistic.updateCalculateCollocabilities(statisticsOneGrams);
boolean successullySaved = statistic.saveResultToDisk();
if (successullySaved) {
@@ -532,21 +448,6 @@ public class Tasks {
showAlert(Alert.AlertType.ERROR, I18N.get("message.ERROR_NOT_ENOUGH_MEMORY"));
logger.error("Out of memory error", e1);
}
// try {
// boolean successullySaved = statistic.saveResultToDisk();
// if (successullySaved) {
// showAlert(Alert.AlertType.INFORMATION, Messages.NOTIFICATION_ANALYSIS_COMPLETED);
// } else {
// showAlert(Alert.AlertType.INFORMATION, Messages.NOTIFICATION_ANALYSIS_COMPLETED_NO_RESULTS);
// }
// } catch (UnsupportedEncodingException e1) {
// showAlert(Alert.AlertType.ERROR, ERROR_WHILE_SAVING_RESULTS_TO_CSV);
// logger.error("Error while saving", e1);
// } catch (OutOfMemoryError e1){
// showAlert(Alert.AlertType.ERROR, ERROR_NOT_ENOUGH_MEMORY);
// logger.error("Out of memory error", e1);
// }
//
ngramProgressBar.progressProperty().unbind();
// ngramProgressBar.setStyle(Settings.FX_ACCENT_OK);
progressLabel.textProperty().unbind();
@@ -559,7 +460,6 @@ public class Tasks {
logger.error("Error while executing", e);
ngramProgressBar.progressProperty().unbind();
ngramProgressBar.setProgress(0.0);
// ngramProgressBar.setStyle(Settings.FX_ACCENT_NOK);
progressLabel.textProperty().unbind();
progressLabel.setText("");
cancel.setVisible(false);
@@ -569,7 +469,6 @@ public class Tasks {
showAlert(Alert.AlertType.INFORMATION, I18N.get("message.NOTIFICATION_ANALYSIS_CANCELED"));
ngramProgressBar.progressProperty().unbind();
ngramProgressBar.setProgress(0.0);
// ngramProgressBar.setStyle(Settings.FX_ACCENT_OK);
progressLabel.textProperty().unbind();
progressLabel.setText("");
cancel.setVisible(false);
@@ -578,7 +477,6 @@ public class Tasks {
// When cancel button is pressed cancel analysis
cancel.setOnAction(e -> {
task.cancel();
// logger.info("cancel button");
});
return task;
}

View File

@@ -49,15 +49,4 @@ public class TimeWatch {
return "Elapsed Time in nano seconds: ";
}
private void exampleUsage() {
TimeWatch watch = TimeWatch.start();
// do something...
System.out.println("Elapsed Time custom format: " + watch.toMinuteSeconds());
System.out.println("Elapsed Time in seconds: " + watch.time(TimeUnit.SECONDS));
System.out.println("Elapsed Time in nano seconds: " + watch.time());
}
}

View File

@@ -20,22 +20,6 @@ import gui.ValidationUtil;
public class Util {
public final static Logger logger = LogManager.getLogger(Util.class);
public static String toReadableTime(long time) {
long hours = time(TimeUnit.HOURS, time);
long minutes = time(TimeUnit.MINUTES, time) - TimeUnit.HOURS.toMinutes(hours);
long seconds = time(TimeUnit.SECONDS, time) - TimeUnit.HOURS.toSeconds(hours) - TimeUnit.MINUTES.toSeconds(minutes);
long milliseconds = time(TimeUnit.MILLISECONDS, time) - TimeUnit.HOURS.toMillis(hours) - TimeUnit.MINUTES.toMillis(minutes) - TimeUnit.SECONDS.toMillis(seconds);
long microseconds = time(TimeUnit.MICROSECONDS, time) - TimeUnit.HOURS.toMicros(hours) - TimeUnit.MINUTES.toMicros(minutes) - TimeUnit.SECONDS.toMicros(seconds) - TimeUnit.MILLISECONDS.toMicros(milliseconds);
long nanoseconds = time(TimeUnit.NANOSECONDS, time) - TimeUnit.HOURS.toNanos(hours) - TimeUnit.MINUTES.toNanos(minutes) - TimeUnit.SECONDS.toNanos(seconds) - TimeUnit.MILLISECONDS.toNanos(milliseconds) - TimeUnit.MICROSECONDS.toNanos(microseconds);
return String.format("%d h, %d min, %d s, %d ms, %d µs, %d ns", hours, minutes, seconds, milliseconds, microseconds, nanoseconds);
}
private static long time(TimeUnit unit, long t) {
return unit.convert(t, TimeUnit.NANOSECONDS);
}
/**
* Converts a number to a more readable format.
* 12345 -> 12.345
@@ -97,12 +81,6 @@ public class Util {
return types.contains(o.getClass());
}
public static <K, V> void printMap(Map<K, V> map) {
System.out.println("\nkey: value");
map.forEach((k, v) -> System.out.print(String.format("%s:\t %,8d%n", k, v)));
System.out.println();
}
/**
* Generic map converter -> since AtomicLongs aren't as comparable.
* Converts ConcurrentHashMap<K, AtomicLong> to HashMap<K, Long>
@@ -117,23 +95,6 @@ public class Util {
return m;
}
public class ValueThenKeyComparator<K extends Comparable<? super K>,
V extends Comparable<? super V>>
implements Comparator<Map.Entry<K, V>> {
public int compare(Map.Entry<K, V> a, Map.Entry<K, V> b) {
int cmp1 = a.getValue().compareTo(b.getValue());
if (cmp1 != 0) {
return cmp1;
} else {
return a.getKey().compareTo(b.getKey());
}
}
}
/**
* Sorts a map in a descending order by value.
@@ -183,25 +144,6 @@ public class Util {
return result;
}
public static <K, V> void printMap(Map<K, Integer> map, String title, int number_of_words) {
System.out.println(String.format("\n%s\n------------\nkey: value\tpercent", title));
map.forEach((k, v) ->
System.out.println(String.format("%s:\t %s\t %s%%",
k,
Util.formatNumberReadable(v),
Util.formatNumberReadable((double) v / number_of_words * 100))));
System.out.println();
}
static long mapSumFrequencies(Map<MultipleHMKeys, Long> map) {
long sum = 0;
for (long value : map.values()) {
sum += value;
}
return sum;
}
/**
* Used for passing optional integer values for sorting.

View File

@@ -84,16 +84,6 @@ public class RDB {
}
}
// public byte[] atomicIntToByteArray(final AtomicLong i) {
// BigInteger bigInt = BigInteger.valueOf(i.intValue());
//
// return bigInt.toByteArray();
// }
public RocksDB getDb() {
return db;
}
public Map<String, AtomicLong> getDump() throws UnsupportedEncodingException {
Map<String, AtomicLong> dump = new HashMap<>();
RocksDB.loadLibrary();