use of edu.stanford.nlp.io.RuntimeIOException in project CoreNLP by stanfordnlp.
the class ClassifierCombiner method serializeClassifier.
// write a ClassifierCombiner to disk, this is based on CRFClassifier code
@Override
public void serializeClassifier(String serializePath) {
log.info("Serializing classifier to " + serializePath + "...");
ObjectOutputStream oos = null;
try {
oos = IOUtils.writeStreamFromString(serializePath);
serializeClassifier(oos);
log.info("done.");
} catch (Exception e) {
throw new RuntimeIOException("Failed to save classifier", e);
} finally {
IOUtils.closeIgnoringExceptions(oos);
}
}
use of edu.stanford.nlp.io.RuntimeIOException in project CoreNLP by stanfordnlp.
the class CRFClassifierEvaluator method outputToCmd.
@Override
public void outputToCmd(OutputStream outputStream) {
try {
PrintWriter pw = IOUtils.encodedOutputStreamPrintWriter(outputStream, null, true);
classifier.classifyAndWriteAnswers(data, featurizedData, pw, classifier.makeReaderAndWriter());
} catch (IOException ex) {
throw new RuntimeIOException(ex);
}
}
use of edu.stanford.nlp.io.RuntimeIOException in project CoreNLP by stanfordnlp.
the class ResettableReaderIteratorFactory method iterator.
/**
* Returns an Iterator over the input sources in the underlying Collection.
*
* @return an Iterator over the input sources in the underlying Collection.
*/
@Override
public Iterator<Reader> iterator() {
Collection<Object> newCollection = new ArrayList<>();
for (Object o : c) {
if (o instanceof Reader) {
String name = o.toString() + ".tmp";
File tmpFile;
try {
tmpFile = File.createTempFile(name, "");
} catch (Exception e) {
throw new RuntimeIOException(e);
}
tmpFile.deleteOnExit();
StringUtils.printToFile(tmpFile, IOUtils.slurpReader((Reader) o), false, false, enc);
newCollection.add(tmpFile);
} else {
newCollection.add(o);
}
}
c = newCollection;
return new ReaderIterator();
}
use of edu.stanford.nlp.io.RuntimeIOException in project CoreNLP by stanfordnlp.
the class ParseAndSetLabels method writeTrees.
public static void writeTrees(List<Tree> trees, String outputFile) {
logger.info("Writing new trees to " + outputFile);
try {
BufferedWriter out = new BufferedWriter(new FileWriter(outputFile));
for (Tree tree : trees) {
out.write(tree.toString());
out.write("\n");
}
out.close();
} catch (IOException e) {
throw new RuntimeIOException(e);
}
}
use of edu.stanford.nlp.io.RuntimeIOException in project CoreNLP by stanfordnlp.
the class CoNLLReadingITest method loadConllFileOriginal.
public static void loadConllFileOriginal(String inFile, List<CoreMap> sents, List<DependencyTree> trees, boolean unlabeled, boolean cPOS) {
CoreLabelTokenFactory tf = new CoreLabelTokenFactory(false);
try (BufferedReader reader = IOUtils.readerFromString(inFile)) {
List<CoreLabel> sentenceTokens = new ArrayList<>();
DependencyTree tree = new DependencyTree();
for (String line : IOUtils.getLineIterable(reader, false)) {
String[] splits = line.split("\t");
if (splits.length < 10) {
if (sentenceTokens.size() > 0) {
trees.add(tree);
CoreMap sentence = new CoreLabel();
sentence.set(CoreAnnotations.TokensAnnotation.class, sentenceTokens);
sents.add(sentence);
tree = new DependencyTree();
sentenceTokens = new ArrayList<>();
}
} else {
String word = splits[1], pos = cPOS ? splits[3] : splits[4], depType = splits[7];
int head = -1;
try {
head = Integer.parseInt(splits[6]);
} catch (NumberFormatException e) {
continue;
}
CoreLabel token = tf.makeToken(word, 0, 0);
token.setTag(pos);
token.set(CoreAnnotations.CoNLLDepParentIndexAnnotation.class, head);
token.set(CoreAnnotations.CoNLLDepTypeAnnotation.class, depType);
sentenceTokens.add(token);
if (!unlabeled)
tree.add(head, depType);
else
tree.add(head, Config.UNKNOWN);
}
}
} catch (IOException e) {
throw new RuntimeIOException(e);
}
}
Aggregations