use of java.io.FileInputStream in project lucida by claritylab.
the class DeserializationFilter method apply.
/**
* Filters an array of <code>Result</code> objects.
*
* @param results results to filter
* @return filtered results
*/
public Result[] apply(Result[] results) {
// any input file set?
if (serialFiles == null || serialFiles.length == 0)
return results;
// keep old results
ArrayList<Result> resultsL = new ArrayList<Result>();
for (Result result : results) resultsL.add(result);
// deserialize and add results
for (File serialFile : serialFiles) {
// input file exists?
if (!serialFile.exists())
continue;
try {
FileInputStream fis = new FileInputStream(serialFile);
ObjectInputStream ois = new ObjectInputStream(fis);
try {
while (true) {
Object o = ois.readObject();
if (o instanceof Result) {
Result result = (Result) o;
resultsL.add(result);
}
}
} catch (EOFException e) {
/* end of file reached */
}
ois.close();
} catch (Exception e) {
MsgPrinter.printErrorMsg("Could not read serialized results:");
MsgPrinter.printErrorMsg(e.toString());
System.exit(1);
}
}
return resultsL.toArray(new Result[resultsL.size()]);
}
use of java.io.FileInputStream in project crate by crate.
the class PutHeadChunkRunnable method run.
@Override
public void run() {
FileInputStream fileInputStream = null;
try {
int bufSize = 4096;
int bytesRead;
int size;
int maxFileGrowthWait = 5;
int fileGrowthWaited = 0;
byte[] buffer = new byte[bufSize];
long remainingBytes = bytesToSend;
File pendingFile;
try {
pendingFile = digestBlob.file();
if (pendingFile == null) {
pendingFile = digestBlob.getContainerFile();
}
fileInputStream = new FileInputStream(pendingFile);
} catch (FileNotFoundException e) {
// this happens if the file has already been moved from tmpDirectory to containerDirectory
pendingFile = digestBlob.getContainerFile();
fileInputStream = new FileInputStream(pendingFile);
}
while (remainingBytes > 0) {
size = (int) Math.min(bufSize, remainingBytes);
bytesRead = fileInputStream.read(buffer, 0, size);
if (bytesRead < size) {
waitUntilFileHasGrown(pendingFile);
fileGrowthWaited++;
if (fileGrowthWaited == maxFileGrowthWait) {
throw new HeadChunkFileTooSmallException(pendingFile.getAbsolutePath());
}
if (bytesRead < 1) {
continue;
}
}
remainingBytes -= bytesRead;
transportService.submitRequest(recipientNode, BlobHeadRequestHandler.Actions.PUT_BLOB_HEAD_CHUNK, new PutBlobHeadChunkRequest(transferId, new BytesArray(buffer, 0, bytesRead)), TransportRequestOptions.EMPTY, EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
}
} catch (IOException ex) {
logger.error("IOException in PutHeadChunkRunnable", ex);
} finally {
blobTransferTarget.putHeadChunkTransferFinished(transferId);
if (watcher != null) {
try {
watcher.close();
} catch (IOException e) {
logger.error("Error closing WatchService in {}", e, getClass().getSimpleName());
}
}
if (fileInputStream != null) {
try {
fileInputStream.close();
} catch (IOException e) {
logger.error("Error closing HeadChunk", e);
}
}
}
}
use of java.io.FileInputStream in project lucida by claritylab.
the class Experimenter method initialize.
/**
* The input properties that must be defined are:
* <ul>
* <li> <code>learningCombos</code> : A "|"-separated list of comma-separated
* lists of learning algorithms. The outer, "|"-separated list specifies the different hierarchical
* classifiers to experiment with. The inner, comma-separated list of algorithms
* specifies the structure of the hierarchical classifier to use for one
* experiment.
* The set of valid algorithm names is:
* <ul>
* <li> <code>KNN</code>
* <li> <code>KWAY_MIX</code>
* <li> <code>MAX_ENT</code>
* <li> <code>BWINNOW_OVA</code>
* <li> <code>MPERCEPTRON_OVA </code>
* <li> <code>NBAYES_OVA</code>
* <li> <code>VPERCEPTRON_OVA</code>
* <li> <code>ADABOOST_OVA</code>
* <li> <code>ADABOOST_CB</code>
* <li> <code>ADABOOST_MFF</code>
* <li> <code>ADABOOSTL_OVA</code>
* <li> <code>ADABOOSTL_CB</code>
* <li> <code>ADABOOSTL_MFF</code>
* <li> <code>DTREE_OVA</code>
* <li> <code>DTREE_CB</code>
* <li> <code>DTREE_MFF</code>
* <li> <code>NEGBI_OVA</code>
* <li> <code>NEGBI_CB</code>
* <li> <code>NEGBI_MFF</code>
* <li> <code>SVM_OVA</code>
* <li> <code>SVM_CB</code>
* <li> <code>SVM_MFF</code>
* </ul>
* <li> <code>featureTypeCombos</code> : A "|"-separated list of comma-separated
* lists of feature types.
* </ul>
*/
public void initialize() throws Exception {
if (isInitialized())
return;
if (languagePair == null)
throw new Exception("languagePair must be set before calling initialize");
String propertiesFileName = System.getProperty("ephyra.home", ".") + "/conf/" + Experimenter.class.getName() + ".properties";
properties.load(new FileInputStream(propertiesFileName));
properties = properties.mapProperties().get(languagePair.getFirst() + "_" + languagePair.getSecond());
learningCombos = properties.getProperty("learningCombos").split("\\|");
featureTypeCombos = properties.getProperty("featureTypeCombos").split("\\|");
setInitialized(true);
}
use of java.io.FileInputStream in project lucida by claritylab.
the class ASSERT method checkLogFile.
/**
* Checks the log file for ASSERT failures. Returns <code>Integer.MAX_VALUE</code> if ASSERT successfully parsed the
* sentences or the index of the last sentence that was parsed if ASSERT failed. -1 indicates that no sentence could
* be parsed.
*
* @param logf log file
* @return <code>Integer.MAX_VALUE</code> or index of last sentence that was parsed
*/
private static int checkLogFile(File logf) {
try {
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(logf), "ISO-8859-1"));
int lastIndex = -1;
Pattern p = Pattern.compile("^(\\d++): ");
while (br.ready()) {
String line = br.readLine();
Matcher m = p.matcher(line);
if (m.find())
lastIndex = Integer.parseInt(m.group(1));
if (line.contains(" DOMAIN/FRAME/")) {
br.close();
// ASSERT crashed
return lastIndex;
}
}
br.close();
// log file looks ok
return Integer.MAX_VALUE;
} catch (IOException e) {
// log file cannot be read
return -1;
}
}
use of java.io.FileInputStream in project lucida by claritylab.
the class WordNet method initialize.
/**
* Initializes the wrapper for the WordNet dictionary.
*
* @param properties property file
*/
public static boolean initialize(String properties) {
try {
File file = new File(properties);
JWNL.initialize(new FileInputStream(file));
dict = net.didion.jwnl.dictionary.Dictionary.getInstance();
} catch (Exception e) {
return false;
}
return true;
}
Aggregations