use of org.pentaho.di.core.exception.KettleFileException in project pentaho-kettle by pentaho.
the class SortRows method getBuffer.
// get sorted rows from available files in iterative manner.
// that means call to this method will continue to return rows
// till all temp files will not be read to the end.
Object[] getBuffer() throws KettleValueException {
Object[] retval;
// Open all files at once and read one row from each file...
if (data.files.size() > 0 && (data.dis.size() == 0 || data.fis.size() == 0)) {
if (log.isBasic()) {
logBasic(BaseMessages.getString(PKG, "SortRows.Basic.OpeningTempFiles", data.files.size()));
}
try {
for (int f = 0; f < data.files.size() && !isStopped(); f++) {
FileObject fileObject = data.files.get(f);
String filename = KettleVFS.getFilename(fileObject);
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "SortRows.Detailed.OpeningTempFile", filename));
}
InputStream fi = KettleVFS.getInputStream(fileObject);
DataInputStream di;
data.fis.add(fi);
if (data.compressFiles) {
di = getDataInputStream(new GZIPInputStream(new BufferedInputStream(fi)));
} else {
di = new DataInputStream(new BufferedInputStream(fi, 50000));
}
data.dis.add(di);
// How long is the buffer?
int buffersize = data.bufferSizes.get(f);
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "SortRows.Detailed.FromFileExpectingRows", filename, buffersize));
}
if (buffersize > 0) {
Object[] row = data.outputRowMeta.readData(di);
// new row from input stream
data.rowbuffer.add(row);
data.tempRows.add(new RowTempFile(row, f));
}
}
// Sort the data row buffer
Collections.sort(data.tempRows, data.comparator);
} catch (Exception e) {
logError(BaseMessages.getString(PKG, "SortRows.Error.ErrorReadingBackTempFiles"), e);
}
}
if (data.files.size() == 0) {
if (data.getBufferIndex < data.buffer.size()) {
retval = data.buffer.get(data.getBufferIndex);
data.getBufferIndex++;
} else {
retval = null;
}
} else {
if (data.rowbuffer.size() == 0) {
retval = null;
} else {
//
if (log.isRowLevel()) {
for (int i = 0; i < data.rowbuffer.size() && !isStopped(); i++) {
Object[] b = data.rowbuffer.get(i);
logRowlevel(BaseMessages.getString(PKG, "SortRows.RowLevel.PrintRow", i, data.outputRowMeta.getString(b)));
}
}
RowTempFile rowTempFile = data.tempRows.remove(0);
retval = rowTempFile.row;
int smallest = rowTempFile.fileNumber;
// now get another Row for position smallest
FileObject file = data.files.get(smallest);
DataInputStream di = data.dis.get(smallest);
InputStream fi = data.fis.get(smallest);
try {
Object[] row2 = data.outputRowMeta.readData(di);
RowTempFile extra = new RowTempFile(row2, smallest);
int index = Collections.binarySearch(data.tempRows, extra, data.comparator);
if (index < 0) {
data.tempRows.add(index * (-1) - 1, extra);
} else {
data.tempRows.add(index, extra);
}
} catch (KettleFileException fe) {
// empty file or EOF mostly
GZIPInputStream gzfi = (data.compressFiles) ? data.gzis.get(smallest) : null;
try {
di.close();
fi.close();
if (gzfi != null) {
gzfi.close();
}
file.delete();
} catch (IOException e) {
logError(BaseMessages.getString(PKG, "SortRows.Error.UnableToCloseFile", smallest, file.toString()));
setErrors(1);
stopAll();
return null;
}
data.files.remove(smallest);
data.dis.remove(smallest);
data.fis.remove(smallest);
if (gzfi != null) {
data.gzis.remove(smallest);
}
//
for (RowTempFile rtf : data.tempRows) {
if (rtf.fileNumber > smallest) {
rtf.fileNumber--;
}
}
} catch (SocketTimeoutException e) {
// should never happen on local files
throw new KettleValueException(e);
}
}
}
return retval;
}
use of org.pentaho.di.core.exception.KettleFileException in project pentaho-kettle by pentaho.
the class ScriptAddedFunctions method checkAndLoadJSFile.
// Evaluates the given ScriptFile
private static void checkAndLoadJSFile(ScriptEngine actualContext, Bindings eval_scope, String fileName) {
Reader inStream = null;
try {
inStream = new InputStreamReader(KettleVFS.getInputStream(fileName));
actualContext.eval(inStream, eval_scope);
} catch (KettleFileException Signal) {
/*
* //TODO AKRETION: see if we can find better catches compatibles with JSR223 catch (FileNotFoundException Signal)
* { new RuntimeException("Unable to open file \"" + fileName + "\" (reason: \"" + Signal.getMessage() + "\")"); }
* catch (WrappedException Signal) { new RuntimeException("WrappedException while evaluating file \"" + fileName +
* "\" (reason: \"" + Signal.getMessage() + "\")"); } catch (EvaluatorException Signal) { new
* RuntimeException("EvaluatorException while evaluating file \"" + fileName + "\" (reason: \"" +
* Signal.getMessage() + "\")"); } catch (JavaScriptException Signal) { new
* RuntimeException("JavaScriptException while evaluating file \"" + fileName + "\" (reason: \"" +
* Signal.getMessage() + "\")"); } catch (IOException Signal) { new RuntimeException("Error while reading file \""
* + fileName + "\" (reason: \"" + Signal.getMessage() + "\")" ); }
*/
new RuntimeException("Error while reading file \"" + fileName + "\" (reason: \"" + Signal.getMessage() + "\")");
} catch (ScriptException Signal) {
new RuntimeException("Error while reading file \"" + fileName + "\" (reason: \"" + Signal.getMessage() + "\")");
} finally {
try {
if (inStream != null) {
inStream.close();
}
} catch (Exception Signal) {
// Ignore
}
}
}
use of org.pentaho.di.core.exception.KettleFileException in project pentaho-kettle by pentaho.
the class ScriptValuesAddedFunctions method checkAndLoadJSFile.
// Evaluates the given ScriptFile
private static void checkAndLoadJSFile(Context actualContext, Scriptable eval_scope, String fileName) {
Reader inStream = null;
try {
inStream = new InputStreamReader(KettleVFS.getInputStream(fileName));
actualContext.evaluateReader(eval_scope, inStream, fileName, 1, null);
} catch (FileNotFoundException Signal) {
Context.reportError("Unable to open file \"" + fileName + "\" (reason: \"" + Signal.getMessage() + "\")");
} catch (WrappedException Signal) {
Context.reportError("WrappedException while evaluating file \"" + fileName + "\" (reason: \"" + Signal.getMessage() + "\")");
} catch (EvaluatorException Signal) {
Context.reportError("EvaluatorException while evaluating file \"" + fileName + "\" (reason: \"" + Signal.getMessage() + "\")");
} catch (JavaScriptException Signal) {
Context.reportError("JavaScriptException while evaluating file \"" + fileName + "\" (reason: \"" + Signal.getMessage() + "\")");
} catch (IOException Signal) {
Context.reportError("Error while reading file \"" + fileName + "\" (reason: \"" + Signal.getMessage() + "\")");
} catch (KettleFileException Signal) {
Context.reportError("Error while reading file \"" + fileName + "\" (reason: \"" + Signal.getMessage() + "\")");
} finally {
try {
if (inStream != null) {
inStream.close();
}
} catch (Exception Signal) {
// Ignore
}
}
}
use of org.pentaho.di.core.exception.KettleFileException in project pentaho-kettle by pentaho.
the class AbstractFileErrorHandler method getWriter.
/**
* returns the OutputWiter if exists. Otherwhise it will create a new one.
*
* @return
* @throws KettleException
*/
Writer getWriter(Object source) throws KettleException {
try {
Writer outputStreamWriter = writers.get(source);
if (outputStreamWriter != null) {
return outputStreamWriter;
}
FileObject file = getReplayFilename(destinationDirectory, processingFilename, dateString, fileExtension, source);
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, file, baseStep.getTransMeta().getName(), baseStep.getStepname());
baseStep.addResultFile(resultFile);
try {
if (encoding == null) {
outputStreamWriter = new OutputStreamWriter(KettleVFS.getOutputStream(file, false));
} else {
outputStreamWriter = new OutputStreamWriter(KettleVFS.getOutputStream(file, false), encoding);
}
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "AbstractFileErrorHandler.Exception.CouldNotCreateFileErrorHandlerForFile") + file.getName().getURI(), e);
}
writers.put(source, outputStreamWriter);
return outputStreamWriter;
} catch (KettleFileException e) {
throw new KettleException(BaseMessages.getString(PKG, "AbstractFileErrorHandler.Exception.CouldNotCreateFileErrorHandlerForFile"), e);
}
}
use of org.pentaho.di.core.exception.KettleFileException in project pentaho-kettle by pentaho.
the class CsvInput method readFieldNamesFromFile.
String[] readFieldNamesFromFile(String fileName, CsvInputMeta csvInputMeta) throws KettleException {
String delimiter = environmentSubstitute(csvInputMeta.getDelimiter());
String enclosure = environmentSubstitute(csvInputMeta.getEnclosure());
String realEncoding = environmentSubstitute(csvInputMeta.getEncoding());
try (FileObject fileObject = KettleVFS.getFileObject(fileName, getTransMeta());
BOMInputStream inputStream = new BOMInputStream(KettleVFS.getInputStream(fileObject), ByteOrderMark.UTF_8, ByteOrderMark.UTF_16LE, ByteOrderMark.UTF_16BE)) {
InputStreamReader reader = null;
if (Utils.isEmpty(realEncoding)) {
reader = new InputStreamReader(inputStream);
} else {
reader = new InputStreamReader(inputStream, realEncoding);
}
EncodingType encodingType = EncodingType.guessEncodingType(reader.getEncoding());
String line = TextFileInput.getLine(log, reader, encodingType, TextFileInputMeta.FILE_FORMAT_UNIX, new StringBuilder(1000));
String[] fieldNames = CsvInput.guessStringsFromLine(log, line, delimiter, enclosure, csvInputMeta.getEscapeCharacter());
if (!Utils.isEmpty(csvInputMeta.getEnclosure())) {
removeEnclosure(fieldNames, csvInputMeta.getEnclosure());
}
trimFieldNames(fieldNames);
return fieldNames;
} catch (IOException e) {
throw new KettleFileException(BaseMessages.getString(PKG, "CsvInput.Exception.CreateFieldMappingError"), e);
}
}
Aggregations