use of edu.stanford.nlp.io.RuntimeIOException in project CoreNLP by stanfordnlp.
the class Dictionaries method loadSemantics.
public void loadSemantics(Properties props) throws ClassNotFoundException, IOException {
log.info("LOADING SEMANTICS");
// load word vector
if (HybridCorefProperties.loadWordEmbedding(props)) {
log.info("LOAD: WordVectors");
String wordvectorFile = HybridCorefProperties.getPathSerializedWordVectors(props);
String word2vecFile = HybridCorefProperties.getPathWord2Vec(props);
try {
// Try to read the serialized vectors
vectors = VectorMap.deserialize(wordvectorFile);
} catch (IOException e) {
// If that fails, try to read the vectors from the word2vec file
if (new File(word2vecFile).exists()) {
vectors = VectorMap.readWord2Vec(word2vecFile);
if (wordvectorFile != null && !wordvectorFile.startsWith("edu")) {
vectors.serialize(wordvectorFile);
}
} else {
// If that fails, give up and crash
throw new RuntimeIOException(e);
}
}
dimVector = vectors.entrySet().iterator().next().getValue().length;
// if(Boolean.parseBoolean(props.getProperty("useValDictionary"))) {
// log.info("LOAD: ValDictionary");
// for(String line : IOUtils.readLines(valDict)) {
// String[] split = line.toLowerCase().split("\t");
// strToEntity.put(split[0], split[2]);
// dictScore.setCount(split[0], Double.parseDouble(split[1]));
// }
// }
}
}
use of edu.stanford.nlp.io.RuntimeIOException in project CoreNLP by stanfordnlp.
the class MultinomialLogisticClassifier method load.
private static <LL, FF> MultinomialLogisticClassifier<LL, FF> load(String path) {
Timing t = new Timing();
try (ObjectInputStream in = IOUtils.readStreamFromString(path)) {
double[][] myWeights = ErasureUtils.uncheckedCast(in.readObject());
Index<FF> myFeatureIndex = ErasureUtils.uncheckedCast(in.readObject());
Index<LL> myLabelIndex = ErasureUtils.uncheckedCast(in.readObject());
t.done(logger, "Loading classifier from " + path);
return new MultinomialLogisticClassifier<>(myWeights, myFeatureIndex, myLabelIndex);
} catch (IOException | ClassNotFoundException e) {
throw new RuntimeIOException("Error loading classifier from " + path, e);
}
}
use of edu.stanford.nlp.io.RuntimeIOException in project CoreNLP by stanfordnlp.
the class CMMClassifier method getThresholds.
private static List<Pair<Pattern, Integer>> getThresholds(String filename) {
BufferedReader in = null;
try {
in = IOUtils.readerFromString(filename);
List<Pair<Pattern, Integer>> thresholds = new ArrayList<>();
for (String line; (line = in.readLine()) != null; ) {
int i = line.lastIndexOf(' ');
Pattern p = Pattern.compile(line.substring(0, i));
// log.info(":"+line.substring(0,i)+":");
Integer t = Integer.valueOf(line.substring(i + 1));
Pair<Pattern, Integer> pair = new Pair<>(p, t);
thresholds.add(pair);
}
in.close();
return thresholds;
} catch (IOException e) {
throw new RuntimeIOException("Error reading threshold file", e);
} finally {
IOUtils.closeIgnoringExceptions(in);
}
}
use of edu.stanford.nlp.io.RuntimeIOException in project CoreNLP by stanfordnlp.
the class StanfordCoreNLPServer method launchServer.
/**
* The main method.
* Read the command line arguments and run the server.
*
* @param args The command line arguments
*
* @throws IOException Thrown if we could not start / run the server.
*/
public static StanfordCoreNLPServer launchServer(String[] args) throws IOException {
// Add a bit of logging
log("--- " + StanfordCoreNLPServer.class.getSimpleName() + "#main() called ---");
String build = System.getenv("BUILD");
if (build != null) {
log(" Build: " + build);
}
Runtime.getRuntime().addShutdownHook(new Thread(() -> log("CoreNLP Server is shutting down.")));
// Fill arguments
ArgumentParser.fillOptions(StanfordCoreNLPServer.class, args);
// get server properties from command line
Properties serverProperties = StringUtils.argsToProperties(args);
// must come after filling global options
StanfordCoreNLPServer server = new StanfordCoreNLPServer(serverProperties);
ArgumentParser.fillOptions(server, args);
// server port is not the default 9000
if (!serverProperties.containsKey("status_port") && serverProperties.containsKey("port")) {
server.statusPort = Integer.parseInt(serverProperties.getProperty("port"));
}
log("Threads: " + ArgumentParser.threads);
// Start the liveness server
AtomicBoolean live = new AtomicBoolean(false);
server.livenessServer(live);
// Create the homepage
FileHandler homepage;
try {
homepage = new FileHandler("edu/stanford/nlp/pipeline/demo/corenlp-brat.html");
} catch (IOException e) {
throw new RuntimeIOException(e);
}
// Pre-load the models
if (StanfordCoreNLPServer.preloadedAnnotators != null) {
Properties props = new Properties();
server.defaultProps.forEach((key1, value) -> props.setProperty(key1.toString(), value.toString()));
// -preload flag alone means to load all default annotators
// -preload flag with a list of annotators means to preload just that list (e.g. tokenize,ssplit,pos)
String annotatorsToLoad = (StanfordCoreNLPServer.preloadedAnnotators.trim().equals("true")) ? server.defaultProps.getProperty("annotators") : StanfordCoreNLPServer.preloadedAnnotators;
if (annotatorsToLoad != null)
props.setProperty("annotators", annotatorsToLoad);
try {
new StanfordCoreNLP(props);
} catch (Throwable throwable) {
err("Could not pre-load annotators in server; encountered exception:");
err(throwable);
}
}
// Credentials
Optional<Pair<String, String>> credentials = Optional.empty();
if (server.username != null && server.password != null) {
credentials = Optional.of(Pair.makePair(server.username, server.password));
}
// Run the server
log("Starting server...");
server.run(credentials, req -> true, res -> {
}, homepage, server.ssl, live);
return server;
}
use of edu.stanford.nlp.io.RuntimeIOException in project CoreNLP by stanfordnlp.
the class TokensRegexNERAnnotator method readEntries.
// end static class Entry
/**
* Creates a combined list of Entries using the provided mapping files.
*
* @param mappings List of mapping files
* @return list of Entries
*/
private static List<Entry> readEntries(String annotatorName, Set<String> noDefaultOverwriteLabels, List<Boolean> ignoreCaseList, List<String[]> headerList, Map<Entry, Integer> entryToMappingFileNumber, boolean verbose, String[] annotationFieldnames, String... mappings) {
// Unlike RegexNERClassifier, we don't bother sorting the entries.
// We leave it to TokensRegex NER to sort out the priorities and matches
// (typically after all the matches has been made since for some TokensRegex expressions,
// we don't know how many tokens are matched until after the matching is done).
List<Entry> entries = new ArrayList<>();
TrieMap<String, Entry> seenRegexes = new TrieMap<>();
// Arrays.sort(mappings);
for (int mappingFileIndex = 0; mappingFileIndex < mappings.length; mappingFileIndex++) {
String mapping = mappings[mappingFileIndex];
try (BufferedReader rd = IOUtils.readerFromString(mapping)) {
readEntries(annotatorName, headerList.get(mappingFileIndex), annotationFieldnames, entries, seenRegexes, mapping, rd, noDefaultOverwriteLabels, ignoreCaseList.get(mappingFileIndex), mappingFileIndex, entryToMappingFileNumber, verbose);
} catch (IOException e) {
throw new RuntimeIOException("Couldn't read TokensRegexNER from " + mapping, e);
}
}
if (mappings.length != 1) {
logger.log(annotatorName + ": Read " + entries.size() + " unique entries from " + mappings.length + " files");
}
return entries;
}
Aggregations