use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.
the class JobEntryTruncateTables method execute.
public Result execute(Result previousResult, int nr) {
Result result = previousResult;
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
result.setResult(true);
nrErrors = 0;
continueProcess = true;
nrSuccess = 0;
if (argFromPrevious) {
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobEntryTruncateTables.FoundPreviousRows", String.valueOf((rows != null ? rows.size() : 0))));
}
if (rows.size() == 0) {
return result;
}
}
if (connection != null) {
Database db = new Database(this, connection);
db.shareVariablesWith(this);
try {
db.connect(parentJob.getTransactionId(), null);
if (argFromPrevious && rows != null) {
for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped() && continueProcess; iteration++) {
resultRow = rows.get(iteration);
// Get values from previous result
String tablename_previous = resultRow.getString(0, null);
String schemaname_previous = resultRow.getString(1, null);
if (!Utils.isEmpty(tablename_previous)) {
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobEntryTruncateTables.ProcessingRow", tablename_previous, schemaname_previous));
}
// let's truncate table
if (truncateTables(tablename_previous, schemaname_previous, db)) {
updateSuccess();
} else {
updateErrors();
}
} else {
logError(BaseMessages.getString(PKG, "JobEntryTruncateTables.RowEmpty"));
}
}
} else if (arguments != null) {
for (int i = 0; i < arguments.length && !parentJob.isStopped() && continueProcess; i++) {
String realTablename = environmentSubstitute(arguments[i]);
String realSchemaname = environmentSubstitute(schemaname[i]);
if (!Utils.isEmpty(realTablename)) {
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobEntryTruncateTables.ProcessingArg", arguments[i], schemaname[i]));
}
// let's truncate table
if (truncateTables(realTablename, realSchemaname, db)) {
updateSuccess();
} else {
updateErrors();
}
} else {
logError(BaseMessages.getString(PKG, "JobEntryTruncateTables.ArgEmpty", arguments[i], schemaname[i]));
}
}
}
} catch (Exception dbe) {
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobEntryTruncateTables.Error.RunningEntry", dbe.getMessage()));
} finally {
if (db != null) {
db.disconnect();
}
}
} else {
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobEntryTruncateTables.NoDbConnection"));
}
result.setNrErrors(nrErrors);
result.setNrLinesDeleted(nrSuccess);
result.setResult(nrErrors == 0);
return result;
}
use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.
the class JobEntryUnZip method execute.
public Result execute(Result previousResult, int nr) {
Result result = previousResult;
result.setResult(false);
result.setNrErrors(1);
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
String realFilenameSource = environmentSubstitute(zipFilename);
String realWildcardSource = environmentSubstitute(wildcardSource);
String realWildcard = environmentSubstitute(wildcard);
String realWildcardExclude = environmentSubstitute(wildcardexclude);
String realTargetdirectory = environmentSubstitute(sourcedirectory);
String realMovetodirectory = environmentSubstitute(movetodirectory);
// Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
if (parentJobMeta.getNamedClusterEmbedManager() != null) {
parentJobMeta.getNamedClusterEmbedManager().passEmbeddedMetastoreKey(this, parentJobMeta.getEmbeddedMetastoreProviderKey());
}
limitFiles = Const.toInt(environmentSubstitute(getLimit()), 10);
NrErrors = 0;
NrSuccess = 0;
successConditionBroken = false;
successConditionBrokenExit = false;
if (isfromprevious) {
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobUnZip.Log.ArgFromPrevious.Found", (rows != null ? rows.size() : 0) + ""));
}
if (rows.size() == 0) {
return result;
}
} else {
if (Utils.isEmpty(zipFilename)) {
// Zip file/folder is missing
logError(BaseMessages.getString(PKG, "JobUnZip.No_ZipFile_Defined.Label"));
return result;
}
}
FileObject fileObject = null;
FileObject targetdir = null;
FileObject movetodir = null;
try {
if (Utils.isEmpty(realTargetdirectory)) {
logError(BaseMessages.getString(PKG, "JobUnZip.Error.TargetFolderMissing"));
return result;
}
boolean exitjobentry = false;
// Target folder
targetdir = KettleVFS.getFileObject(realTargetdirectory, this);
if (!targetdir.exists()) {
if (createfolder) {
targetdir.createFolder();
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobUnZip.Log.TargetFolderCreated", realTargetdirectory));
}
} else {
log.logError(BaseMessages.getString(PKG, "JobUnZip.TargetFolderNotFound.Label"));
exitjobentry = true;
}
} else {
if (!(targetdir.getType() == FileType.FOLDER)) {
log.logError(BaseMessages.getString(PKG, "JobUnZip.TargetFolderNotFolder.Label", realTargetdirectory));
exitjobentry = true;
} else {
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobUnZip.TargetFolderExists.Label", realTargetdirectory));
}
}
}
// movetodirectory must be provided
if (afterunzip == 2) {
if (Utils.isEmpty(movetodirectory)) {
log.logError(BaseMessages.getString(PKG, "JobUnZip.MoveToDirectoryEmpty.Label"));
exitjobentry = true;
} else {
movetodir = KettleVFS.getFileObject(realMovetodirectory, this);
if (!(movetodir.exists()) || !(movetodir.getType() == FileType.FOLDER)) {
if (createMoveToDirectory) {
movetodir.createFolder();
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobUnZip.Log.MoveToFolderCreated", realMovetodirectory));
}
} else {
log.logError(BaseMessages.getString(PKG, "JobUnZip.MoveToDirectoryNotExists.Label"));
exitjobentry = true;
}
}
}
}
// We found errors...now exit
if (exitjobentry) {
return result;
}
if (isfromprevious) {
if (rows != null) {
// Copy the input row to the (command line) arguments
for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++) {
if (successConditionBroken) {
if (!successConditionBrokenExit) {
logError(BaseMessages.getString(PKG, "JobUnZip.Error.SuccessConditionbroken", "" + NrErrors));
successConditionBrokenExit = true;
}
result.setNrErrors(NrErrors);
return result;
}
resultRow = rows.get(iteration);
// Get sourcefile/folder and wildcard
realFilenameSource = resultRow.getString(0, null);
realWildcardSource = resultRow.getString(1, null);
fileObject = KettleVFS.getFileObject(realFilenameSource, this);
if (fileObject.exists()) {
processOneFile(result, parentJob, fileObject, realTargetdirectory, realWildcard, realWildcardExclude, movetodir, realMovetodirectory, realWildcardSource);
} else {
updateErrors();
logError(BaseMessages.getString(PKG, "JobUnZip.Error.CanNotFindFile", realFilenameSource));
}
}
}
} else {
fileObject = KettleVFS.getFileObject(realFilenameSource, this);
if (!fileObject.exists()) {
log.logError(BaseMessages.getString(PKG, "JobUnZip.ZipFile.NotExists.Label", realFilenameSource));
return result;
}
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobUnZip.Zip_FileExists.Label", realFilenameSource));
}
if (Utils.isEmpty(sourcedirectory)) {
log.logError(BaseMessages.getString(PKG, "JobUnZip.SourceFolderNotFound.Label"));
return result;
}
processOneFile(result, parentJob, fileObject, realTargetdirectory, realWildcard, realWildcardExclude, movetodir, realMovetodirectory, realWildcardSource);
}
} catch (Exception e) {
log.logError(BaseMessages.getString(PKG, "JobUnZip.ErrorUnzip.Label", realFilenameSource, e.getMessage()));
updateErrors();
} finally {
if (fileObject != null) {
try {
fileObject.close();
} catch (IOException ex) {
/* Ignore */
}
}
if (targetdir != null) {
try {
targetdir.close();
} catch (IOException ex) {
/* Ignore */
}
}
if (movetodir != null) {
try {
movetodir.close();
} catch (IOException ex) {
/* Ignore */
}
}
}
result.setNrErrors(NrErrors);
result.setNrLinesWritten(NrSuccess);
if (getSuccessStatus()) {
result.setResult(true);
}
displayResults();
return result;
}
use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.
the class JobEntryWaitForFile method execute.
public Result execute(Result previousResult, int nr) {
Result result = previousResult;
result.setResult(false);
// starttime (in seconds)
long timeStart = System.currentTimeMillis() / 1000;
if (filename != null) {
FileObject fileObject = null;
String realFilename = getRealFilename();
// Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
if (parentJobMeta.getNamedClusterEmbedManager() != null) {
parentJobMeta.getNamedClusterEmbedManager().passEmbeddedMetastoreKey(this, parentJobMeta.getEmbeddedMetastoreProviderKey());
}
try {
fileObject = KettleVFS.getFileObject(realFilename, this);
long iMaximumTimeout = Const.toInt(getRealMaximumTimeout(), Const.toInt(DEFAULT_MAXIMUM_TIMEOUT, 0));
long iCycleTime = Const.toInt(getRealCheckCycleTime(), Const.toInt(DEFAULT_CHECK_CYCLE_TIME, 0));
//
if (iMaximumTimeout < 0) {
iMaximumTimeout = Const.toInt(DEFAULT_MAXIMUM_TIMEOUT, 0);
if (log.isBasic()) {
logBasic("Maximum timeout invalid, reset to " + iMaximumTimeout);
}
}
if (iCycleTime < 1) {
// If lower than 1 set to the default
iCycleTime = Const.toInt(DEFAULT_CHECK_CYCLE_TIME, 1);
if (log.isBasic()) {
logBasic("Check cycle time invalid, reset to " + iCycleTime);
}
}
if (iMaximumTimeout == 0) {
if (log.isBasic()) {
logBasic("Waiting indefinitely for file [" + realFilename + "]");
}
} else {
if (log.isBasic()) {
logBasic("Waiting " + iMaximumTimeout + " seconds for file [" + realFilename + "]");
}
}
boolean continueLoop = true;
while (continueLoop && !parentJob.isStopped()) {
fileObject = KettleVFS.getFileObject(realFilename, this);
if (fileObject.exists()) {
// file exists, we're happy to exit
if (log.isBasic()) {
logBasic("Detected file [" + realFilename + "] within timeout");
}
result.setResult(true);
continueLoop = false;
// add filename to result filenames
if (addFilenameToResult && fileObject.getType() == FileType.FILE) {
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, fileObject, parentJob.getJobname(), toString());
resultFile.setComment(BaseMessages.getString(PKG, "JobWaitForFile.FilenameAdded"));
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
} else {
long now = System.currentTimeMillis() / 1000;
if ((iMaximumTimeout > 0) && (now > (timeStart + iMaximumTimeout))) {
continueLoop = false;
// file doesn't exist after timeout, either true or false
if (isSuccessOnTimeout()) {
if (log.isBasic()) {
logBasic("Didn't detect file [" + realFilename + "] before timeout, success");
}
result.setResult(true);
} else {
if (log.isBasic()) {
logBasic("Didn't detect file [" + realFilename + "] before timeout, failure");
}
result.setResult(false);
}
}
// sleep algorithm
long sleepTime = 0;
if (iMaximumTimeout == 0) {
sleepTime = iCycleTime;
} else {
if ((now + iCycleTime) < (timeStart + iMaximumTimeout)) {
sleepTime = iCycleTime;
} else {
sleepTime = iCycleTime - ((now + iCycleTime) - (timeStart + iMaximumTimeout));
}
}
try {
if (sleepTime > 0) {
if (log.isDetailed()) {
logDetailed("Sleeping " + sleepTime + " seconds before next check for file [" + realFilename + "]");
}
Thread.sleep(sleepTime * 1000);
}
} catch (InterruptedException e) {
// something strange happened
result.setResult(false);
continueLoop = false;
}
}
}
if (!parentJob.isStopped() && fileObject.exists() && isFileSizeCheck()) {
long oldSize = -1;
long newSize = fileObject.getContent().getSize();
if (log.isDetailed()) {
logDetailed("File [" + realFilename + "] is " + newSize + " bytes long");
}
if (log.isBasic()) {
logBasic("Waiting until file [" + realFilename + "] stops growing for " + iCycleTime + " seconds");
}
while (oldSize != newSize && !parentJob.isStopped()) {
try {
if (log.isDetailed()) {
logDetailed("Sleeping " + iCycleTime + " seconds, waiting for file [" + realFilename + "] to stop growing");
}
Thread.sleep(iCycleTime * 1000);
} catch (InterruptedException e) {
// something strange happened
result.setResult(false);
continueLoop = false;
}
oldSize = newSize;
newSize = fileObject.getContent().getSize();
if (log.isDetailed()) {
logDetailed("File [" + realFilename + "] is " + newSize + " bytes long");
}
}
if (log.isBasic()) {
logBasic("Stopped waiting for file [" + realFilename + "] to stop growing");
}
}
if (parentJob.isStopped()) {
result.setResult(false);
}
} catch (Exception e) {
logBasic("Exception while waiting for file [" + realFilename + "] to stop growing", e);
} finally {
if (fileObject != null) {
try {
fileObject.close();
} catch (Exception e) {
// Ignore errors
}
}
}
} else {
logError("No filename is defined.");
}
return result;
}
use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.
the class JobEntryWaitForSQL method execute.
@Override
public Result execute(Result previousResult, int nr) {
Result result = previousResult;
result.setResult(false);
result.setNrErrors(1);
String realCustomSQL = null;
String realTablename = environmentSubstitute(tablename);
String realSchemaname = environmentSubstitute(schemaname);
if (connection == null) {
logError(BaseMessages.getString(PKG, "JobEntryWaitForSQL.NoDbConnection"));
return result;
}
if (iscustomSQL) {
// clear result list rows
if (isClearResultList) {
result.getRows().clear();
}
realCustomSQL = customSQL;
if (isUseVars) {
realCustomSQL = environmentSubstitute(realCustomSQL);
}
if (log.isDebug()) {
logDebug(BaseMessages.getString(PKG, "JobEntryWaitForSQL.Log.EnteredCustomSQL", realCustomSQL));
}
if (Utils.isEmpty(realCustomSQL)) {
logError(BaseMessages.getString(PKG, "JobEntryWaitForSQL.Error.NoCustomSQL"));
return result;
}
} else {
if (Utils.isEmpty(realTablename)) {
logError(BaseMessages.getString(PKG, "JobEntryWaitForSQL.Error.NoTableName"));
return result;
}
}
try {
// check connection
// connect and disconnect
checkConnection();
// starttime (in seconds)
long timeStart = System.currentTimeMillis() / 1000;
int nrRowsLimit = Const.toInt(environmentSubstitute(rowsCountValue), 0);
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobEntryWaitForSQL.Log.nrRowsLimit", "" + nrRowsLimit));
}
long iMaximumTimeout = Const.toInt(environmentSubstitute(maximumTimeout), Const.toInt(DEFAULT_MAXIMUM_TIMEOUT, 0));
long iCycleTime = Const.toInt(environmentSubstitute(checkCycleTime), Const.toInt(DEFAULT_CHECK_CYCLE_TIME, 0));
//
if (iMaximumTimeout < 0) {
iMaximumTimeout = Const.toInt(DEFAULT_MAXIMUM_TIMEOUT, 0);
logBasic("Maximum timeout invalid, reset to " + iMaximumTimeout);
}
if (iCycleTime < 1) {
// If lower than 1 set to the default
iCycleTime = Const.toInt(DEFAULT_CHECK_CYCLE_TIME, 1);
logBasic("Check cycle time invalid, reset to " + iCycleTime);
}
if (iMaximumTimeout == 0) {
logBasic("Waiting indefinitely for SQL data");
} else {
logBasic("Waiting " + iMaximumTimeout + " seconds for SQL data");
}
boolean continueLoop = true;
while (continueLoop && !parentJob.isStopped()) {
if (SQLDataOK(result, nrRowsLimit, realSchemaname, realTablename, realCustomSQL)) {
// SQL data exists, we're happy to exit
logBasic("Detected SQL data within timeout");
result.setResult(true);
continueLoop = false;
} else {
long now = System.currentTimeMillis() / 1000;
if ((iMaximumTimeout > 0) && (now > (timeStart + iMaximumTimeout))) {
continueLoop = false;
// SQL data doesn't exist after timeout, either true or false
if (isSuccessOnTimeout()) {
logBasic("Didn't detect SQL data before timeout, success");
result.setResult(true);
} else {
logBasic("Didn't detect SQL data before timeout, failure");
result.setResult(false);
}
}
// sleep algorithm
long sleepTime = 0;
if (iMaximumTimeout == 0) {
sleepTime = iCycleTime;
} else {
if ((now + iCycleTime) < (timeStart + iMaximumTimeout)) {
sleepTime = iCycleTime;
} else {
sleepTime = iCycleTime - ((now + iCycleTime) - (timeStart + iMaximumTimeout));
}
}
try {
if (sleepTime > 0) {
if (log.isDetailed()) {
logDetailed("Sleeping " + sleepTime + " seconds before next check for SQL data");
}
Thread.sleep(sleepTime * 1000);
}
} catch (InterruptedException e) {
// something strange happened
result.setResult(false);
continueLoop = false;
}
}
}
} catch (Exception e) {
logBasic("Exception while waiting for SQL data: " + e.getMessage());
}
if (result.getResult()) {
// Remove error count set at the beginning of the method
// PDI-15437
result.setNrErrors(0);
}
return result;
}
use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.
the class JobEntryWebServiceAvailable method execute.
public Result execute(Result previousResult, int nr) {
Result result = previousResult;
result.setResult(false);
String realURL = environmentSubstitute(getURL());
if (!Utils.isEmpty(realURL)) {
int connectTimeOut = Const.toInt(environmentSubstitute(getConnectTimeOut()), 0);
int readTimeOut = Const.toInt(environmentSubstitute(getReadTimeOut()), 0);
InputStream in = null;
try {
URLConnection conn = new URL(realURL).openConnection();
conn.setConnectTimeout(connectTimeOut);
conn.setReadTimeout(readTimeOut);
in = conn.getInputStream();
// Web service is available
result.setResult(true);
} catch (Exception e) {
result.setNrErrors(1);
String message = BaseMessages.getString(PKG, "JobEntryWebServiceAvailable.ERROR_0004_Exception", realURL, e.toString());
logError(message);
result.setLogText(message);
} finally {
if (in != null) {
try {
in.close();
} catch (Exception e) {
/* Ignore */
}
}
}
} else {
result.setNrErrors(1);
String message = BaseMessages.getString(PKG, "JobEntryWebServiceAvailable.ERROR_0005_No_URL_Defined");
logError(message);
result.setLogText(message);
}
return result;
}
Aggregations