use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.
the class XBaseInput method openNextFile.
private void openNextFile() throws KettleException {
// Close the last file before opening the next...
if (data.xbi != null) {
logBasic(BaseMessages.getString(PKG, "XBaseInput.Log.FinishedReadingRecords"));
data.xbi.close();
}
// Replace possible environment variables...
data.file_dbf = data.files.getFile(data.fileNr);
data.fileNr++;
try {
data.xbi = new XBase(log, KettleVFS.getInputStream(data.file_dbf));
data.xbi.setDbfFile(data.file_dbf.getName().getURI());
data.xbi.open();
if (!Utils.isEmpty(meta.getCharactersetName())) {
data.xbi.getReader().setCharactersetName(meta.getCharactersetName());
}
logBasic(BaseMessages.getString(PKG, "XBaseInput.Log.OpenedXBaseFile") + " : [" + data.xbi + "]");
data.fields = data.xbi.getFields();
// Add this to the result file names...
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, data.file_dbf, getTransMeta().getName(), getStepname());
resultFile.setComment(BaseMessages.getString(PKG, "XBaseInput.ResultFile.Comment"));
addResultFile(resultFile);
} catch (Exception e) {
logError(BaseMessages.getString(PKG, "XBaseInput.Log.Error.CouldNotOpenXBaseFile1") + data.file_dbf + BaseMessages.getString(PKG, "XBaseInput.Log.Error.CouldNotOpenXBaseFile2") + e.getMessage());
throw new KettleException(e);
}
}
use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.
the class PropertyInput method openNextFile.
private boolean openNextFile() {
InputStream fis = null;
try {
if (!meta.isFileField()) {
if (data.filenr >= data.files.nrOfFiles()) {
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "PropertyInput.Log.FinishedProcessing"));
}
return false;
}
// Is this the last file?
data.last_file = (data.filenr == data.files.nrOfFiles() - 1);
data.file = data.files.getFile(data.filenr);
// Move file pointer ahead!
data.filenr++;
} else {
// Get row from input rowset & set row busy!
data.readrow = getRow();
if (data.readrow == null) {
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "PropertyInput.Log.FinishedProcessing"));
}
return false;
}
if (first) {
first = false;
data.inputRowMeta = getInputRowMeta();
data.outputRowMeta = data.inputRowMeta.clone();
meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore);
// Get total previous fields
data.totalpreviousfields = data.inputRowMeta.size();
// Create convert meta-data objects that will contain Date & Number formatters
data.convertRowMeta = data.outputRowMeta.cloneToType(ValueMetaInterface.TYPE_STRING);
// Check is filename field is provided
if (Utils.isEmpty(meta.getDynamicFilenameField())) {
logError(BaseMessages.getString(PKG, "PropertyInput.Log.NoField"));
throw new KettleException(BaseMessages.getString(PKG, "PropertyInput.Log.NoField"));
}
// cache the position of the field
if (data.indexOfFilenameField < 0) {
data.indexOfFilenameField = getInputRowMeta().indexOfValue(meta.getDynamicFilenameField());
if (data.indexOfFilenameField < 0) {
// The field is unreachable !
logError(BaseMessages.getString(PKG, "PropertyInput.Log.ErrorFindingField") + "[" + meta.getDynamicFilenameField() + "]");
throw new KettleException(BaseMessages.getString(PKG, "PropertyInput.Exception.CouldnotFindField", meta.getDynamicFilenameField()));
}
}
}
// End if first
String filename = getInputRowMeta().getString(data.readrow, data.indexOfFilenameField);
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "PropertyInput.Log.FilenameInStream", meta.getDynamicFilenameField(), filename));
}
data.file = KettleVFS.getFileObject(filename, getTransMeta());
// Check if file exists!
}
// Check if file is empty
// long fileSize= data.file.getContent().getSize();
data.filename = KettleVFS.getFilename(data.file);
// Add additional fields?
if (meta.getShortFileNameField() != null && meta.getShortFileNameField().length() > 0) {
data.shortFilename = data.file.getName().getBaseName();
}
if (meta.getPathField() != null && meta.getPathField().length() > 0) {
data.path = KettleVFS.getFilename(data.file.getParent());
}
if (meta.isHiddenField() != null && meta.isHiddenField().length() > 0) {
data.hidden = data.file.isHidden();
}
if (meta.getExtensionField() != null && meta.getExtensionField().length() > 0) {
data.extension = data.file.getName().getExtension();
}
if (meta.getLastModificationDateField() != null && meta.getLastModificationDateField().length() > 0) {
data.lastModificationDateTime = new Date(data.file.getContent().getLastModifiedTime());
}
if (meta.getUriField() != null && meta.getUriField().length() > 0) {
data.uriName = data.file.getName().getURI();
}
if (meta.getRootUriField() != null && meta.getRootUriField().length() > 0) {
data.rootUriName = data.file.getName().getRootURI();
}
if (meta.getSizeField() != null && meta.getSizeField().length() > 0) {
data.size = new Long(data.file.getContent().getSize());
}
if (meta.resetRowNumber()) {
data.rownr = 0;
}
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "PropertyInput.Log.OpeningFile", data.file.toString()));
}
if (meta.isAddResultFile()) {
// Add this to the result file names...
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, data.file, getTransMeta().getName(), getStepname());
resultFile.setComment(BaseMessages.getString(PKG, "PropertyInput.Log.FileAddedResult"));
addResultFile(resultFile);
}
fis = data.file.getContent().getInputStream();
if (data.propfiles) {
// load properties file
data.pro = new Properties();
data.pro.load(fis);
data.it = data.pro.keySet().iterator();
} else {
// create wini object
data.wini = new Wini();
if (!Utils.isEmpty(data.realEncoding)) {
data.wini.getConfig().setFileEncoding(Charset.forName(data.realEncoding));
}
// load INI file
data.wini.load(fis);
if (data.realSection != null) {
// just one section
data.iniSection = data.wini.get(data.realSection);
if (data.iniSection == null) {
throw new KettleException(BaseMessages.getString(PKG, "PropertyInput.Error.CanNotFindSection", data.realSection, "" + data.file.getName()));
}
} else {
// We need to fetch all sections
data.itSection = data.wini.keySet().iterator();
data.iniSection = data.wini.get(data.itSection.next().toString());
}
data.iniIt = data.iniSection.keySet().iterator();
}
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "PropertyInput.Log.FileOpened", data.file.toString()));
logDetailed(BaseMessages.getString(PKG, "PropertyInput.log.TotalKey", "" + (data.propfiles ? data.pro.size() : data.iniSection.size()), KettleVFS.getFilename(data.file)));
}
} catch (Exception e) {
logError(BaseMessages.getString(PKG, "PropertyInput.Log.UnableToOpenFile", "" + data.filenr, data.file.toString(), e.toString()));
stopAll();
setErrors(1);
return false;
} finally {
BaseStep.closeQuietly(fis);
}
return true;
}
use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.
the class PropertyOutput method closeFile.
private boolean closeFile() {
if (data.file == null) {
return true;
}
boolean retval = false;
try (OutputStream propsFile = KettleVFS.getOutputStream(data.file, false)) {
data.pro.store(propsFile, environmentSubstitute(meta.getComment()));
if (meta.isAddToResult()) {
// Add this to the result file names...
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, data.file, getTransMeta().getName(), getStepname());
resultFile.setComment(BaseMessages.getString(PKG, "PropertyOutput.Log.FileAddedResult"));
addResultFile(resultFile);
}
data.KeySet.clear();
retval = true;
} catch (Exception e) {
logError("Exception trying to close file [" + data.file.getName() + "]! :" + e.toString());
setErrors(1);
} finally {
if (data.file != null) {
try {
data.file.close();
data.file = null;
} catch (Exception e) {
/* Ignore */
logDetailed("Exception trying to close file [" + data.file.getName() + "]! :", e);
}
}
if (data.pro != null) {
data.pro = null;
}
}
return retval;
}
use of org.pentaho.di.core.ResultFile in project pentaho-metaverse by pentaho.
the class JobEntryExternalResourceListener method afterExecution.
@Override
public void afterExecution(Job job, JobEntryCopy jobEntryCopy, JobEntryInterface jobEntryInterface, Result result) {
IExecutionProfile executionProfile = JobLineageHolderMap.getInstance().getLineageHolder(job).getExecutionProfile();
IExecutionData executionData = executionProfile.getExecutionData();
// Get input files (aka Resource Dependencies)
JobMeta jobMeta = job.getJobMeta();
if (jobMeta != null) {
List<ResourceReference> dependencies = jobEntryInterface.getResourceDependencies(jobMeta);
if (dependencies != null) {
for (ResourceReference ref : dependencies) {
List<ResourceEntry> resourceEntries = ref.getEntries();
if (resourceEntries != null) {
for (ResourceEntry entry : resourceEntries) {
executionData.addExternalResource(jobEntryInterface.getName(), ExternalResourceInfoFactory.createResource(entry, true));
}
}
}
}
}
// Get output files (aka result files)
if (result != null) {
List<ResultFile> resultFiles = result.getResultFilesList();
if (resultFiles != null) {
for (ResultFile resultFile : resultFiles) {
executionData.addExternalResource(jobEntryInterface.getName(), ExternalResourceInfoFactory.createFileResource(resultFile.getFile(), false));
}
}
}
}
use of org.pentaho.di.core.ResultFile in project pentaho-metaverse by pentaho.
the class JobEntryExternalResourceListenerTest method testBeforeAfterExecution.
@Test
public void testBeforeAfterExecution() throws Exception {
IJobEntryExternalResourceConsumer consumer = mock(IJobEntryExternalResourceConsumer.class);
JobMeta mockJobMeta = mock(JobMeta.class);
Job job = mock(Job.class);
when(job.getJobMeta()).thenReturn(mockJobMeta);
JobEntryInterface jobEntryInterface = mock(JobEntryInterface.class);
when(jobEntryInterface.getParentJob()).thenReturn(job);
when(jobEntryInterface.getResourceDependencies(mockJobMeta)).thenReturn(Collections.singletonList(new ResourceReference(null, Collections.singletonList(new ResourceEntry("myFile", ResourceEntry.ResourceType.FILE)))));
JobEntryCopy jobEntryCopy = mock(JobEntryCopy.class);
IExecutionProfile executionProfile = mock(IExecutionProfile.class);
IExecutionData executionData = mock(IExecutionData.class);
when(executionProfile.getExecutionData()).thenReturn(executionData);
JobLineageHolderMap.getInstance().getLineageHolder(job).setExecutionProfile(executionProfile);
JobEntryExternalResourceListener listener = new JobEntryExternalResourceListener(consumer);
FileObject mockFile = mock(FileObject.class);
FileName mockFilename = mock(FileName.class);
when(mockFilename.getPath()).thenReturn("/path/to/file");
when(mockFile.getName()).thenReturn(mockFilename);
ResultFile resultFile = mock(ResultFile.class);
when(resultFile.getFile()).thenReturn(mockFile);
List<ResultFile> resultFiles = Collections.singletonList(resultFile);
Result result = mock(Result.class);
when(result.getResultFilesList()).thenReturn(resultFiles);
// Call beforeExecution for coverage
listener.beforeExecution(null, null, null);
listener.afterExecution(job, jobEntryCopy, jobEntryInterface, result);
}
Aggregations