use of java.io.BufferedReader in project CoreNLP by stanfordnlp.
the class CharniakScoredParsesReaderWriter method stringToParses.
/**
* Convert string representing scored parses (in the charniak parser output format)
* to list of scored parse trees
* @param parseStr
* @return list of scored parse trees
*/
public List<ScoredObject<Tree>> stringToParses(String parseStr) {
try {
BufferedReader br = new BufferedReader(new StringReader(parseStr));
Iterable<List<ScoredObject<Tree>>> trees = readScoredTrees("", br);
List<ScoredObject<Tree>> res = null;
if (trees != null) {
Iterator<List<ScoredObject<Tree>>> iter = trees.iterator();
if (iter != null && iter.hasNext()) {
res = iter.next();
}
}
br.close();
return res;
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
use of java.io.BufferedReader in project CoreNLP by stanfordnlp.
the class AbstractBatchOptimizer method optimize.
public <T> ConcatVector optimize(T[] dataset, AbstractDifferentiableFunction<T> fn, ConcatVector initialWeights, double l2regularization, double convergenceDerivativeNorm, boolean quiet) {
if (!quiet)
log.info("\n**************\nBeginning training\n");
else
log.info("[Beginning quiet training]");
TrainingWorker<T> mainWorker = new TrainingWorker<>(dataset, fn, initialWeights, l2regularization, convergenceDerivativeNorm, quiet);
new Thread(mainWorker).start();
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
if (!quiet) {
log.info("NOTE: you can press any key (and maybe ENTER afterwards to jog stdin) to terminate learning early.");
log.info("The convergence criteria are quite aggressive if left uninterrupted, and will run for a while");
log.info("if left to their own devices.\n");
while (true) {
if (mainWorker.isFinished) {
log.info("training completed without interruption");
return mainWorker.weights;
}
try {
if (br.ready()) {
log.info("received quit command: quitting");
log.info("training completed by interruption");
mainWorker.isFinished = true;
return mainWorker.weights;
}
} catch (IOException e) {
e.printStackTrace();
}
}
} else {
while (!mainWorker.isFinished) {
synchronized (mainWorker.naturalTerminationBarrier) {
try {
mainWorker.naturalTerminationBarrier.wait();
} catch (InterruptedException e) {
throw new RuntimeInterruptedException(e);
}
}
}
log.info("[Quiet training complete]");
return mainWorker.weights;
}
}
use of java.io.BufferedReader in project CoreNLP by stanfordnlp.
the class ParseAndPrintMatrices method main.
public static void main(String[] args) throws IOException {
String modelPath = null;
String outputPath = null;
String inputPath = null;
String testTreebankPath = null;
FileFilter testTreebankFilter = null;
List<String> unusedArgs = Generics.newArrayList();
for (int argIndex = 0; argIndex < args.length; ) {
if (args[argIndex].equalsIgnoreCase("-model")) {
modelPath = args[argIndex + 1];
argIndex += 2;
} else if (args[argIndex].equalsIgnoreCase("-output")) {
outputPath = args[argIndex + 1];
argIndex += 2;
} else if (args[argIndex].equalsIgnoreCase("-input")) {
inputPath = args[argIndex + 1];
argIndex += 2;
} else if (args[argIndex].equalsIgnoreCase("-testTreebank")) {
Pair<String, FileFilter> treebankDescription = ArgUtils.getTreebankDescription(args, argIndex, "-testTreebank");
argIndex = argIndex + ArgUtils.numSubArgs(args, argIndex) + 1;
testTreebankPath = treebankDescription.first();
testTreebankFilter = treebankDescription.second();
} else {
unusedArgs.add(args[argIndex++]);
}
}
String[] newArgs = unusedArgs.toArray(new String[unusedArgs.size()]);
LexicalizedParser parser = LexicalizedParser.loadModel(modelPath, newArgs);
DVModel model = DVParser.getModelFromLexicalizedParser(parser);
File outputFile = new File(outputPath);
FileSystem.checkNotExistsOrFail(outputFile);
FileSystem.mkdirOrFail(outputFile);
int count = 0;
if (inputPath != null) {
Reader input = new BufferedReader(new FileReader(inputPath));
DocumentPreprocessor processor = new DocumentPreprocessor(input);
for (List<HasWord> sentence : processor) {
// index from 1
count++;
ParserQuery pq = parser.parserQuery();
if (!(pq instanceof RerankingParserQuery)) {
throw new IllegalArgumentException("Expected a RerankingParserQuery");
}
RerankingParserQuery rpq = (RerankingParserQuery) pq;
if (!rpq.parse(sentence)) {
throw new RuntimeException("Unparsable sentence: " + sentence);
}
RerankerQuery reranker = rpq.rerankerQuery();
if (!(reranker instanceof DVModelReranker.Query)) {
throw new IllegalArgumentException("Expected a DVModelReranker");
}
DeepTree deepTree = ((DVModelReranker.Query) reranker).getDeepTrees().get(0);
IdentityHashMap<Tree, SimpleMatrix> vectors = deepTree.getVectors();
for (Map.Entry<Tree, SimpleMatrix> entry : vectors.entrySet()) {
log.info(entry.getKey() + " " + entry.getValue());
}
FileWriter fout = new FileWriter(outputPath + File.separator + "sentence" + count + ".txt");
BufferedWriter bout = new BufferedWriter(fout);
bout.write(SentenceUtils.listToString(sentence));
bout.newLine();
bout.write(deepTree.getTree().toString());
bout.newLine();
for (HasWord word : sentence) {
outputMatrix(bout, model.getWordVector(word.word()));
}
Tree rootTree = findRootTree(vectors);
outputTreeMatrices(bout, rootTree, vectors);
bout.flush();
fout.close();
}
}
}
use of java.io.BufferedReader in project CoreNLP by stanfordnlp.
the class ChineseSimWordAvgDepGrammar method getMap.
public Map<Pair<Integer, String>, List<Triple<Integer, String, Double>>> getMap(String filename) {
Map<Pair<Integer, String>, List<Triple<Integer, String, Double>>> hashMap = Generics.newHashMap();
try {
BufferedReader wordMapBReader = new BufferedReader(new InputStreamReader(new FileInputStream(filename), "UTF-8"));
String wordMapLine;
Pattern linePattern = Pattern.compile("sim\\((.+)/(.+):(.+)/(.+)\\)=(.+)");
while ((wordMapLine = wordMapBReader.readLine()) != null) {
Matcher m = linePattern.matcher(wordMapLine);
if (!m.matches()) {
log.info("Ill-formed line in similar word map file: " + wordMapLine);
continue;
}
Pair<Integer, String> iTW = new Pair<>(wordIndex.addToIndex(m.group(1)), m.group(2));
double score = Double.parseDouble(m.group(5));
List<Triple<Integer, String, Double>> tripleList = hashMap.get(iTW);
if (tripleList == null) {
tripleList = new ArrayList<>();
hashMap.put(iTW, tripleList);
}
tripleList.add(new Triple<>(wordIndex.addToIndex(m.group(3)), m.group(4), score));
}
} catch (IOException e) {
throw new RuntimeException("Problem reading similar words file!");
}
return hashMap;
}
use of java.io.BufferedReader in project platform_frameworks_base by android.
the class ZygoteInit method preloadClasses.
/**
* Performs Zygote process initialization. Loads and initializes
* commonly used classes.
*
* Most classes only cause a few hundred bytes to be allocated, but
* a few will allocate a dozen Kbytes (in one case, 500+K).
*/
private static void preloadClasses() {
final VMRuntime runtime = VMRuntime.getRuntime();
InputStream is;
try {
is = new FileInputStream(PRELOADED_CLASSES);
} catch (FileNotFoundException e) {
Log.e(TAG, "Couldn't find " + PRELOADED_CLASSES + ".");
return;
}
Log.i(TAG, "Preloading classes...");
long startTime = SystemClock.uptimeMillis();
// Drop root perms while running static initializers.
final int reuid = Os.getuid();
final int regid = Os.getgid();
// We need to drop root perms only if we're already root. In the case of "wrapped"
// processes (see WrapperInit), this function is called from an unprivileged uid
// and gid.
boolean droppedPriviliges = false;
if (reuid == ROOT_UID && regid == ROOT_GID) {
try {
Os.setregid(ROOT_GID, UNPRIVILEGED_GID);
Os.setreuid(ROOT_UID, UNPRIVILEGED_UID);
} catch (ErrnoException ex) {
throw new RuntimeException("Failed to drop root", ex);
}
droppedPriviliges = true;
}
// Alter the target heap utilization. With explicit GCs this
// is not likely to have any effect.
float defaultUtilization = runtime.getTargetHeapUtilization();
runtime.setTargetHeapUtilization(0.8f);
try {
BufferedReader br = new BufferedReader(new InputStreamReader(is), 256);
int count = 0;
String line;
while ((line = br.readLine()) != null) {
// Skip comments and blank lines.
line = line.trim();
if (line.startsWith("#") || line.equals("")) {
continue;
}
Trace.traceBegin(Trace.TRACE_TAG_DALVIK, line);
try {
if (false) {
Log.v(TAG, "Preloading " + line + "...");
}
// Load and explicitly initialize the given class. Use
// Class.forName(String, boolean, ClassLoader) to avoid repeated stack lookups
// (to derive the caller's class-loader). Use true to force initialization, and
// null for the boot classpath class-loader (could as well cache the
// class-loader of this class in a variable).
Class.forName(line, true, null);
count++;
} catch (ClassNotFoundException e) {
Log.w(TAG, "Class not found for preloading: " + line);
} catch (UnsatisfiedLinkError e) {
Log.w(TAG, "Problem preloading " + line + ": " + e);
} catch (Throwable t) {
Log.e(TAG, "Error preloading " + line + ".", t);
if (t instanceof Error) {
throw (Error) t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException) t;
}
throw new RuntimeException(t);
}
Trace.traceEnd(Trace.TRACE_TAG_DALVIK);
}
Log.i(TAG, "...preloaded " + count + " classes in " + (SystemClock.uptimeMillis() - startTime) + "ms.");
} catch (IOException e) {
Log.e(TAG, "Error reading " + PRELOADED_CLASSES + ".", e);
} finally {
IoUtils.closeQuietly(is);
// Restore default.
runtime.setTargetHeapUtilization(defaultUtilization);
// Fill in dex caches with classes, fields, and methods brought in by preloading.
Trace.traceBegin(Trace.TRACE_TAG_DALVIK, "PreloadDexCaches");
runtime.preloadDexCaches();
Trace.traceEnd(Trace.TRACE_TAG_DALVIK);
// Bring back root. We'll need it later if we're in the zygote.
if (droppedPriviliges) {
try {
Os.setreuid(ROOT_UID, ROOT_UID);
Os.setregid(ROOT_GID, ROOT_GID);
} catch (ErrnoException ex) {
throw new RuntimeException("Failed to restore root", ex);
}
}
}
}
Aggregations