use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.
the class JobExecutor method executeJob.
private void executeJob() throws KettleException {
//
if (data.groupBuffer.isEmpty()) {
return;
}
data.groupTimeStart = System.currentTimeMillis();
if (first) {
discardLogLines(data);
}
data.executorJob = createJob(meta.getRepository(), data.executorJobMeta, this);
data.executorJob.shareVariablesWith(data.executorJobMeta);
data.executorJob.setParentTrans(getTrans());
data.executorJob.setLogLevel(getLogLevel());
data.executorJob.setInternalKettleVariables(this);
data.executorJob.copyParametersFrom(data.executorJobMeta);
data.executorJob.setArguments(getTrans().getArguments());
// data.executorJob.setInteractive(); TODO: pass interactivity through the transformation too for drill-down.
// TODO
/*
* if (data.executorJob.isInteractive()) {
* data.executorJob.getJobEntryListeners().addAll(parentJob.getJobEntryListeners()); }
*/
// Pass the accumulated rows
//
data.executorJob.setSourceRows(data.groupBuffer);
// Pass parameter values
//
passParametersToJob();
// keep track for drill down in Spoon...
//
getTrans().getActiveSubjobs().put(getStepname(), data.executorJob);
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobStart.id, data.executorJob);
data.executorJob.beginProcessing();
Result result = new Result();
//
for (DelegationListener delegationListener : getTrans().getDelegationListeners()) {
// TODO: copy some settings in the job execution configuration, not strictly needed
// but the execution configuration information is useful in case of a job re-start on Carte
//
delegationListener.jobDelegationStarted(data.executorJob, new JobExecutionConfiguration());
}
//
try {
result = data.executorJob.execute(0, result);
} catch (KettleException e) {
log.logError("An error occurred executing the job: ", e);
result.setResult(false);
result.setNrErrors(1);
} finally {
try {
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobFinish.id, data.executorJob);
data.executorJob.fireJobFinishListeners();
} catch (KettleException e) {
result.setNrErrors(1);
result.setResult(false);
log.logError(BaseMessages.getString(PKG, "JobExecutor.Log.ErrorExecJob", e.getMessage()), e);
}
}
//
if (meta.getExecutionResultTargetStepMeta() != null) {
Object[] outputRow = RowDataUtil.allocateRowData(data.executionResultsOutputRowMeta.size());
int idx = 0;
if (!Utils.isEmpty(meta.getExecutionTimeField())) {
outputRow[idx++] = Long.valueOf(System.currentTimeMillis() - data.groupTimeStart);
}
if (!Utils.isEmpty(meta.getExecutionResultField())) {
outputRow[idx++] = Boolean.valueOf(result.getResult());
}
if (!Utils.isEmpty(meta.getExecutionNrErrorsField())) {
outputRow[idx++] = Long.valueOf(result.getNrErrors());
}
if (!Utils.isEmpty(meta.getExecutionLinesReadField())) {
outputRow[idx++] = Long.valueOf(result.getNrLinesRead());
}
if (!Utils.isEmpty(meta.getExecutionLinesWrittenField())) {
outputRow[idx++] = Long.valueOf(result.getNrLinesWritten());
}
if (!Utils.isEmpty(meta.getExecutionLinesInputField())) {
outputRow[idx++] = Long.valueOf(result.getNrLinesInput());
}
if (!Utils.isEmpty(meta.getExecutionLinesOutputField())) {
outputRow[idx++] = Long.valueOf(result.getNrLinesOutput());
}
if (!Utils.isEmpty(meta.getExecutionLinesRejectedField())) {
outputRow[idx++] = Long.valueOf(result.getNrLinesRejected());
}
if (!Utils.isEmpty(meta.getExecutionLinesUpdatedField())) {
outputRow[idx++] = Long.valueOf(result.getNrLinesUpdated());
}
if (!Utils.isEmpty(meta.getExecutionLinesDeletedField())) {
outputRow[idx++] = Long.valueOf(result.getNrLinesDeleted());
}
if (!Utils.isEmpty(meta.getExecutionFilesRetrievedField())) {
outputRow[idx++] = Long.valueOf(result.getNrFilesRetrieved());
}
if (!Utils.isEmpty(meta.getExecutionExitStatusField())) {
outputRow[idx++] = Long.valueOf(result.getExitStatus());
}
if (!Utils.isEmpty(meta.getExecutionLogTextField())) {
String channelId = data.executorJob.getLogChannelId();
String logText = KettleLogStore.getAppender().getBuffer(channelId, false).toString();
outputRow[idx++] = logText;
}
if (!Utils.isEmpty(meta.getExecutionLogChannelIdField())) {
outputRow[idx++] = data.executorJob.getLogChannelId();
}
putRowTo(data.executionResultsOutputRowMeta, outputRow, data.executionResultRowSet);
}
//
if (meta.getResultRowsTargetStepMeta() != null && result.getRows() != null) {
for (RowMetaAndData row : result.getRows()) {
Object[] targetRow = RowDataUtil.allocateRowData(data.resultRowsOutputRowMeta.size());
for (int i = 0; i < meta.getResultRowsField().length; i++) {
ValueMetaInterface valueMeta = row.getRowMeta().getValueMeta(i);
if (valueMeta.getType() != meta.getResultRowsType()[i]) {
throw new KettleException(BaseMessages.getString(PKG, "JobExecutor.IncorrectDataTypePassed", valueMeta.getTypeDesc(), ValueMetaFactory.getValueMetaName(meta.getResultRowsType()[i])));
}
targetRow[i] = row.getData()[i];
}
putRowTo(data.resultRowsOutputRowMeta, targetRow, data.resultRowsRowSet);
}
}
if (meta.getResultFilesTargetStepMeta() != null && result.getResultFilesList() != null) {
for (ResultFile resultFile : result.getResultFilesList()) {
Object[] targetRow = RowDataUtil.allocateRowData(data.resultFilesOutputRowMeta.size());
int idx = 0;
targetRow[idx++] = resultFile.getFile().getName().toString();
// TODO: time, origin, ...
putRowTo(data.resultFilesOutputRowMeta, targetRow, data.resultFilesRowSet);
}
}
data.groupBuffer.clear();
}
use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.
the class CubeInput method init.
public boolean init(StepMetaInterface smi, StepDataInterface sdi) {
meta = (CubeInputMeta) smi;
data = (CubeInputData) sdi;
if (super.init(smi, sdi)) {
try {
String filename = environmentSubstitute(meta.getFilename());
// Add filename to result filenames ?
if (meta.isAddResultFile()) {
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(filename, getTransMeta()), getTransMeta().getName(), toString());
resultFile.setComment("File was read by a Cube Input step");
addResultFile(resultFile);
}
data.fis = KettleVFS.getInputStream(filename, this);
data.zip = new GZIPInputStream(data.fis);
data.dis = new DataInputStream(data.zip);
try {
data.meta = new RowMeta(data.dis);
return true;
} catch (KettleFileException kfe) {
logError(BaseMessages.getString(PKG, "CubeInput.Log.UnableToReadMetadata"), kfe);
return false;
}
} catch (Exception e) {
logError(BaseMessages.getString(PKG, "CubeInput.Log.ErrorReadingFromDataCube"), e);
}
}
return false;
}
use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.
the class CubeOutput method prepareFile.
private void prepareFile() throws KettleFileException {
try {
String filename = environmentSubstitute(meta.getFilename());
if (meta.isAddToResultFiles()) {
// Add this to the result file names...
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(filename, getTransMeta()), getTransMeta().getName(), getStepname());
resultFile.setComment("This file was created with a cube file output step");
addResultFile(resultFile);
}
data.fos = KettleVFS.getOutputStream(filename, getTransMeta(), false);
data.zip = new GZIPOutputStream(data.fos);
data.dos = new DataOutputStream(data.zip);
} catch (Exception e) {
throw new KettleFileException(e);
}
}
use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.
the class JobEntryWaitForFile method execute.
public Result execute(Result previousResult, int nr) {
Result result = previousResult;
result.setResult(false);
// starttime (in seconds)
long timeStart = System.currentTimeMillis() / 1000;
if (filename != null) {
FileObject fileObject = null;
String realFilename = getRealFilename();
// Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
if (parentJobMeta.getNamedClusterEmbedManager() != null) {
parentJobMeta.getNamedClusterEmbedManager().passEmbeddedMetastoreKey(this, parentJobMeta.getEmbeddedMetastoreProviderKey());
}
try {
fileObject = KettleVFS.getFileObject(realFilename, this);
long iMaximumTimeout = Const.toInt(getRealMaximumTimeout(), Const.toInt(DEFAULT_MAXIMUM_TIMEOUT, 0));
long iCycleTime = Const.toInt(getRealCheckCycleTime(), Const.toInt(DEFAULT_CHECK_CYCLE_TIME, 0));
//
if (iMaximumTimeout < 0) {
iMaximumTimeout = Const.toInt(DEFAULT_MAXIMUM_TIMEOUT, 0);
if (log.isBasic()) {
logBasic("Maximum timeout invalid, reset to " + iMaximumTimeout);
}
}
if (iCycleTime < 1) {
// If lower than 1 set to the default
iCycleTime = Const.toInt(DEFAULT_CHECK_CYCLE_TIME, 1);
if (log.isBasic()) {
logBasic("Check cycle time invalid, reset to " + iCycleTime);
}
}
if (iMaximumTimeout == 0) {
if (log.isBasic()) {
logBasic("Waiting indefinitely for file [" + realFilename + "]");
}
} else {
if (log.isBasic()) {
logBasic("Waiting " + iMaximumTimeout + " seconds for file [" + realFilename + "]");
}
}
boolean continueLoop = true;
while (continueLoop && !parentJob.isStopped()) {
fileObject = KettleVFS.getFileObject(realFilename, this);
if (fileObject.exists()) {
// file exists, we're happy to exit
if (log.isBasic()) {
logBasic("Detected file [" + realFilename + "] within timeout");
}
result.setResult(true);
continueLoop = false;
// add filename to result filenames
if (addFilenameToResult && fileObject.getType() == FileType.FILE) {
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, fileObject, parentJob.getJobname(), toString());
resultFile.setComment(BaseMessages.getString(PKG, "JobWaitForFile.FilenameAdded"));
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
} else {
long now = System.currentTimeMillis() / 1000;
if ((iMaximumTimeout > 0) && (now > (timeStart + iMaximumTimeout))) {
continueLoop = false;
// file doesn't exist after timeout, either true or false
if (isSuccessOnTimeout()) {
if (log.isBasic()) {
logBasic("Didn't detect file [" + realFilename + "] before timeout, success");
}
result.setResult(true);
} else {
if (log.isBasic()) {
logBasic("Didn't detect file [" + realFilename + "] before timeout, failure");
}
result.setResult(false);
}
}
// sleep algorithm
long sleepTime = 0;
if (iMaximumTimeout == 0) {
sleepTime = iCycleTime;
} else {
if ((now + iCycleTime) < (timeStart + iMaximumTimeout)) {
sleepTime = iCycleTime;
} else {
sleepTime = iCycleTime - ((now + iCycleTime) - (timeStart + iMaximumTimeout));
}
}
try {
if (sleepTime > 0) {
if (log.isDetailed()) {
logDetailed("Sleeping " + sleepTime + " seconds before next check for file [" + realFilename + "]");
}
Thread.sleep(sleepTime * 1000);
}
} catch (InterruptedException e) {
// something strange happened
result.setResult(false);
continueLoop = false;
}
}
}
if (!parentJob.isStopped() && fileObject.exists() && isFileSizeCheck()) {
long oldSize = -1;
long newSize = fileObject.getContent().getSize();
if (log.isDetailed()) {
logDetailed("File [" + realFilename + "] is " + newSize + " bytes long");
}
if (log.isBasic()) {
logBasic("Waiting until file [" + realFilename + "] stops growing for " + iCycleTime + " seconds");
}
while (oldSize != newSize && !parentJob.isStopped()) {
try {
if (log.isDetailed()) {
logDetailed("Sleeping " + iCycleTime + " seconds, waiting for file [" + realFilename + "] to stop growing");
}
Thread.sleep(iCycleTime * 1000);
} catch (InterruptedException e) {
// something strange happened
result.setResult(false);
continueLoop = false;
}
oldSize = newSize;
newSize = fileObject.getContent().getSize();
if (log.isDetailed()) {
logDetailed("File [" + realFilename + "] is " + newSize + " bytes long");
}
}
if (log.isBasic()) {
logBasic("Stopped waiting for file [" + realFilename + "] to stop growing");
}
}
if (parentJob.isStopped()) {
result.setResult(false);
}
} catch (Exception e) {
logBasic("Exception while waiting for file [" + realFilename + "] to stop growing", e);
} finally {
if (fileObject != null) {
try {
fileObject.close();
} catch (Exception e) {
// Ignore errors
}
}
}
} else {
logError("No filename is defined.");
}
return result;
}
use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.
the class JobEntryHTTP method execute.
/**
* We made this one synchronized in the JVM because otherwise, this is not thread safe. In that case if (on an
* application server for example) several HTTP's are running at the same time, you get into problems because the
* System.setProperty() calls are system wide!
*/
@Override
public synchronized Result execute(Result previousResult, int nr) {
Result result = previousResult;
result.setResult(false);
logBasic(BaseMessages.getString(PKG, "JobHTTP.StartJobEntry"));
// Get previous result rows...
List<RowMetaAndData> resultRows;
String urlFieldnameToUse, uploadFieldnameToUse, destinationFieldnameToUse;
if (Utils.isEmpty(urlFieldname)) {
urlFieldnameToUse = URL_FIELDNAME;
} else {
urlFieldnameToUse = urlFieldname;
}
if (Utils.isEmpty(uploadFieldname)) {
uploadFieldnameToUse = UPLOADFILE_FIELDNAME;
} else {
uploadFieldnameToUse = uploadFieldname;
}
if (Utils.isEmpty(destinationFieldname)) {
destinationFieldnameToUse = TARGETFILE_FIELDNAME;
} else {
destinationFieldnameToUse = destinationFieldname;
}
if (runForEveryRow) {
resultRows = previousResult.getRows();
if (resultRows == null) {
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobHTTP.Error.UnableGetResultPrevious"));
return result;
}
} else {
resultRows = new ArrayList<RowMetaAndData>();
RowMetaAndData row = new RowMetaAndData();
row.addValue(new ValueMetaString(urlFieldnameToUse), environmentSubstitute(url));
row.addValue(new ValueMetaString(uploadFieldnameToUse), environmentSubstitute(uploadFilename));
row.addValue(new ValueMetaString(destinationFieldnameToUse), environmentSubstitute(targetFilename));
resultRows.add(row);
}
URL server = null;
String beforeProxyHost = System.getProperty("http.proxyHost");
String beforeProxyPort = System.getProperty("http.proxyPort");
String beforeNonProxyHosts = System.getProperty("http.nonProxyHosts");
for (int i = 0; i < resultRows.size() && result.getNrErrors() == 0; i++) {
RowMetaAndData row = resultRows.get(i);
OutputStream outputFile = null;
OutputStream uploadStream = null;
BufferedInputStream fileStream = null;
InputStream input = null;
try {
String urlToUse = environmentSubstitute(row.getString(urlFieldnameToUse, ""));
String realUploadFile = environmentSubstitute(row.getString(uploadFieldnameToUse, ""));
String realTargetFile = environmentSubstitute(row.getString(destinationFieldnameToUse, ""));
logBasic(BaseMessages.getString(PKG, "JobHTTP.Log.ConnectingURL", urlToUse));
if (!Utils.isEmpty(proxyHostname)) {
System.setProperty("http.proxyHost", environmentSubstitute(proxyHostname));
System.setProperty("http.proxyPort", environmentSubstitute(proxyPort));
if (nonProxyHosts != null) {
System.setProperty("http.nonProxyHosts", environmentSubstitute(nonProxyHosts));
}
}
if (!Utils.isEmpty(username)) {
Authenticator.setDefault(new Authenticator() {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
String realPassword = Encr.decryptPasswordOptionallyEncrypted(environmentSubstitute(password));
return new PasswordAuthentication(environmentSubstitute(username), realPassword != null ? realPassword.toCharArray() : new char[] {});
}
});
}
if (dateTimeAdded) {
SimpleDateFormat daf = new SimpleDateFormat();
Date now = new Date();
daf.applyPattern("yyyMMdd");
realTargetFile += "_" + daf.format(now);
daf.applyPattern("HHmmss");
realTargetFile += "_" + daf.format(now);
if (!Utils.isEmpty(targetFilenameExtension)) {
realTargetFile += "." + environmentSubstitute(targetFilenameExtension);
}
}
// Create the output File...
outputFile = KettleVFS.getOutputStream(realTargetFile, this, fileAppended);
// Get a stream for the specified URL
server = new URL(urlToUse);
URLConnection connection = server.openConnection();
// if we have HTTP headers, add them
if (!Utils.isEmpty(headerName)) {
if (log.isDebug()) {
log.logDebug(BaseMessages.getString(PKG, "JobHTTP.Log.HeadersProvided"));
}
for (int j = 0; j < headerName.length; j++) {
if (!Utils.isEmpty(headerValue[j])) {
connection.setRequestProperty(environmentSubstitute(headerName[j]), environmentSubstitute(headerValue[j]));
if (log.isDebug()) {
log.logDebug(BaseMessages.getString(PKG, "JobHTTP.Log.HeaderSet", environmentSubstitute(headerName[j]), environmentSubstitute(headerValue[j])));
}
}
}
}
connection.setDoOutput(true);
// See if we need to send a file over?
if (!Utils.isEmpty(realUploadFile)) {
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobHTTP.Log.SendingFile", realUploadFile));
}
// Grab an output stream to upload data to web server
uploadStream = connection.getOutputStream();
fileStream = new BufferedInputStream(new FileInputStream(new File(realUploadFile)));
try {
int c;
while ((c = fileStream.read()) >= 0) {
uploadStream.write(c);
}
} finally {
// Close upload and file
if (uploadStream != null) {
uploadStream.close();
uploadStream = null;
}
if (fileStream != null) {
fileStream.close();
fileStream = null;
}
}
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobHTTP.Log.FinishedSendingFile"));
}
}
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobHTTP.Log.StartReadingReply"));
}
// Read the result from the server...
input = connection.getInputStream();
Date date = new Date(connection.getLastModified());
logBasic(BaseMessages.getString(PKG, "JobHTTP.Log.ReplayInfo", connection.getContentType(), date));
int oneChar;
long bytesRead = 0L;
while ((oneChar = input.read()) != -1) {
outputFile.write(oneChar);
bytesRead++;
}
logBasic(BaseMessages.getString(PKG, "JobHTTP.Log.FinisedWritingReply", bytesRead, realTargetFile));
if (addfilenameresult) {
// Add to the result files...
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(realTargetFile, this), parentJob.getJobname(), toString());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
result.setResult(true);
} catch (MalformedURLException e) {
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobHTTP.Error.NotValidURL", url, e.getMessage()));
logError(Const.getStackTracker(e));
} catch (IOException e) {
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobHTTP.Error.CanNotSaveHTTPResult", e.getMessage()));
logError(Const.getStackTracker(e));
} catch (Exception e) {
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobHTTP.Error.ErrorGettingFromHTTP", e.getMessage()));
logError(Const.getStackTracker(e));
} finally {
// Close it all
try {
if (uploadStream != null) {
// just to make sure
uploadStream.close();
}
if (fileStream != null) {
// just to make sure
fileStream.close();
}
if (input != null) {
input.close();
}
if (outputFile != null) {
outputFile.close();
}
} catch (Exception e) {
logError(BaseMessages.getString(PKG, "JobHTTP.Error.CanNotCloseStream", e.getMessage()));
result.setNrErrors(1);
}
// Set the proxy settings back as they were on the system!
System.setProperty("http.proxyHost", Const.NVL(beforeProxyHost, ""));
System.setProperty("http.proxyPort", Const.NVL(beforeProxyPort, ""));
System.setProperty("http.nonProxyHosts", Const.NVL(beforeNonProxyHosts, ""));
}
}
return result;
}
Aggregations