use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class Job method beginProcessing.
/**
* Handle logging at start
*
* @return true if it went OK.
*
* @throws KettleException
*/
public boolean beginProcessing() throws KettleException {
currentDate = new Date();
logDate = new Date();
startDate = Const.MIN_DATE;
endDate = currentDate;
resetErrors();
final JobLogTable jobLogTable = jobMeta.getJobLogTable();
int intervalInSeconds = Const.toInt(environmentSubstitute(jobLogTable.getLogInterval()), -1);
if (jobLogTable.isDefined()) {
DatabaseMeta logcon = jobMeta.getJobLogTable().getDatabaseMeta();
String schemaName = environmentSubstitute(jobMeta.getJobLogTable().getActualSchemaName());
String tableName = environmentSubstitute(jobMeta.getJobLogTable().getActualTableName());
String schemaAndTable = jobMeta.getJobLogTable().getDatabaseMeta().getQuotedSchemaTableCombination(schemaName, tableName);
Database ldb = new Database(this, logcon);
ldb.shareVariablesWith(this);
ldb.connect();
ldb.setCommit(logCommitSize);
try {
// See if we have to add a batch id...
Long id_batch = 1L;
if (jobMeta.getJobLogTable().isBatchIdUsed()) {
id_batch = logcon.getNextBatchId(ldb, schemaName, tableName, jobLogTable.getKeyField().getFieldName());
setBatchId(id_batch.longValue());
if (getPassedBatchId() <= 0) {
setPassedBatchId(id_batch.longValue());
}
}
Object[] lastr = ldb.getLastLogDate(schemaAndTable, jobMeta.getName(), true, LogStatus.END);
if (!Utils.isEmpty(lastr)) {
Date last;
try {
last = ldb.getReturnRowMeta().getDate(lastr, 0);
} catch (KettleValueException e) {
throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.ConversionError", "" + tableName), e);
}
if (last != null) {
startDate = last;
}
}
depDate = currentDate;
ldb.writeLogRecord(jobMeta.getJobLogTable(), LogStatus.START, this, null);
if (!ldb.isAutoCommit()) {
ldb.commitLog(true, jobMeta.getJobLogTable());
}
ldb.disconnect();
//
if (intervalInSeconds > 0) {
final Timer timer = new Timer(getName() + " - interval logging timer");
TimerTask timerTask = new TimerTask() {
@Override
public void run() {
try {
endProcessing();
} catch (Exception e) {
log.logError(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformIntervalLogging"), e);
// Also stop the show...
//
errors.incrementAndGet();
stopAll();
}
}
};
timer.schedule(timerTask, intervalInSeconds * 1000, intervalInSeconds * 1000);
addJobListener(new JobAdapter() {
@Override
public void jobFinished(Job job) {
timer.cancel();
}
});
}
// Add a listener at the end of the job to take of writing the final job
// log record...
//
addJobListener(new JobAdapter() {
@Override
public void jobFinished(Job job) throws KettleException {
try {
endProcessing();
} catch (KettleJobException e) {
log.logError(BaseMessages.getString(PKG, "Job.Exception.UnableToWriteToLoggingTable", jobLogTable.toString()), e);
// job is failed in case log database record is failed!
throw new KettleException(e);
}
}
});
} catch (KettleDatabaseException dbe) {
// This is even before actual execution
addErrors(1);
throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.UnableToProcessLoggingStart", "" + tableName), dbe);
} finally {
ldb.disconnect();
}
}
// If we need to write out the job entry logging information, do so at the end of the job:
//
JobEntryLogTable jobEntryLogTable = jobMeta.getJobEntryLogTable();
if (jobEntryLogTable.isDefined()) {
addJobListener(new JobAdapter() {
@Override
public void jobFinished(Job job) throws KettleException {
try {
writeJobEntryLogInformation();
} catch (KettleException e) {
throw new KettleException(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformJobEntryLoggingAtJobEnd"), e);
}
}
});
}
// If we need to write the log channel hierarchy and lineage information,
// add a listener for that too...
//
ChannelLogTable channelLogTable = jobMeta.getChannelLogTable();
if (channelLogTable.isDefined()) {
addJobListener(new JobAdapter() {
@Override
public void jobFinished(Job job) throws KettleException {
try {
writeLogChannelInformation();
} catch (KettleException e) {
throw new KettleException(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformLoggingAtTransEnd"), e);
}
}
});
}
JobExecutionExtension extension = new JobExecutionExtension(this, result, null, false);
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobBeginProcessing.id, extension);
return true;
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class Job method writeLogChannelInformation.
/**
* Write log channel information.
*
* @throws KettleException
* the kettle exception
*/
protected void writeLogChannelInformation() throws KettleException {
Database db = null;
ChannelLogTable channelLogTable = jobMeta.getChannelLogTable();
// PDI-7070: If parent job has the same channel logging info, don't duplicate log entries
Job j = getParentJob();
if (j != null) {
if (channelLogTable.equals(j.getJobMeta().getChannelLogTable())) {
return;
}
}
try {
db = new Database(this, channelLogTable.getDatabaseMeta());
db.shareVariablesWith(this);
db.connect();
db.setCommit(logCommitSize);
List<LoggingHierarchy> loggingHierarchyList = getLoggingHierarchy();
for (LoggingHierarchy loggingHierarchy : loggingHierarchyList) {
db.writeLogRecord(channelLogTable, LogStatus.START, loggingHierarchy, null);
}
// Also time-out the log records in here...
//
db.cleanupLogRecords(channelLogTable, getJobname());
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "Trans.Exception.UnableToWriteLogChannelInformationToLogTable"), e);
} finally {
if (!db.isAutoCommit()) {
db.commit(true);
}
db.disconnect();
}
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class InsertUpdate method init.
public boolean init(StepMetaInterface smi, StepDataInterface sdi) {
meta = (InsertUpdateMeta) smi;
data = (InsertUpdateData) sdi;
if (super.init(smi, sdi)) {
try {
if (meta.getDatabaseMeta() == null) {
logError(BaseMessages.getString(PKG, "InsertUpdate.Init.ConnectionMissing", getStepname()));
return false;
}
data.db = new Database(this, meta.getDatabaseMeta());
data.db.shareVariablesWith(this);
if (getTransMeta().isUsingUniqueConnections()) {
synchronized (getTrans()) {
data.db.connect(getTrans().getTransactionId(), getPartitionID());
}
} else {
data.db.connect(getPartitionID());
}
data.db.setCommit(meta.getCommitSize(this));
return true;
} catch (KettleException ke) {
logError(BaseMessages.getString(PKG, "InsertUpdate.Log.ErrorOccurredDuringStepInitialize") + ke.getMessage());
}
}
return false;
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class InsertUpdateMeta method getSQLStatements.
public SQLStatement getSQLStatements(TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore) throws KettleStepException {
// default: nothing to do!
SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null);
if (databaseMeta != null) {
if (prev != null && prev.size() > 0) {
// Copy the row
RowMetaInterface tableFields = RowMetaUtils.getRowMetaForUpdate(prev, keyLookup, keyStream, updateLookup, updateStream);
if (!Utils.isEmpty(tableName)) {
Database db = new Database(loggingObject, databaseMeta);
db.shareVariablesWith(transMeta);
try {
db.connect();
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(schemaName, tableName);
String cr_table = db.getDDL(schemaTable, tableFields, null, false, null, true);
String cr_index = "";
String[] idx_fields = null;
if (keyLookup != null && keyLookup.length > 0) {
idx_fields = new String[keyLookup.length];
for (int i = 0; i < keyLookup.length; i++) {
idx_fields[i] = keyLookup[i];
}
} else {
retval.setError(BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.MissingKeyFields"));
}
// Key lookup dimensions...
if (idx_fields != null && idx_fields.length > 0 && !db.checkIndexExists(schemaName, tableName, idx_fields)) {
String indexname = "idx_" + tableName + "_lookup";
cr_index = db.getCreateIndexStatement(schemaTable, indexname, idx_fields, false, false, false, true);
}
String sql = cr_table + cr_index;
if (sql.length() == 0) {
retval.setSQL(null);
} else {
retval.setSQL(sql);
}
} catch (KettleException e) {
retval.setError(BaseMessages.getString(PKG, "InsertUpdateMeta.ReturnValue.ErrorOccurred") + e.getMessage());
}
} else {
retval.setError(BaseMessages.getString(PKG, "InsertUpdateMeta.ReturnValue.NoTableDefinedOnConnection"));
}
} else {
retval.setError(BaseMessages.getString(PKG, "InsertUpdateMeta.ReturnValue.NotReceivingAnyFields"));
}
} else {
retval.setError(BaseMessages.getString(PKG, "InsertUpdateMeta.ReturnValue.NoConnectionDefined"));
}
return retval;
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class InsertUpdateMeta method check.
public void check(List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore) {
CheckResult cr;
String error_message = "";
if (databaseMeta != null) {
Database db = new Database(loggingObject, databaseMeta);
db.shareVariablesWith(transMeta);
try {
db.connect();
if (!Utils.isEmpty(tableName)) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.TableNameOK"), stepMeta);
remarks.add(cr);
boolean first = true;
boolean error_found = false;
error_message = "";
// Check fields in table
RowMetaInterface r = db.getTableFieldsMeta(schemaName, tableName);
if (r != null) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.TableExists"), stepMeta);
remarks.add(cr);
for (int i = 0; i < keyLookup.length; i++) {
String lufield = keyLookup[i];
ValueMetaInterface v = r.searchValueMeta(lufield);
if (v == null) {
if (first) {
first = false;
error_message += BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.MissingCompareFieldsInTargetTable") + Const.CR;
}
error_found = true;
error_message += "\t\t" + lufield + Const.CR;
}
}
if (error_found) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.AllLookupFieldsFound"), stepMeta);
}
remarks.add(cr);
// How about the fields to insert/update in the table?
first = true;
error_found = false;
error_message = "";
for (int i = 0; i < updateLookup.length; i++) {
String lufield = updateLookup[i];
ValueMetaInterface v = r.searchValueMeta(lufield);
if (v == null) {
if (first) {
first = false;
error_message += BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.MissingFieldsToUpdateInTargetTable") + Const.CR;
}
error_found = true;
error_message += "\t\t" + lufield + Const.CR;
}
}
if (error_found) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.AllFieldsToUpdateFoundInTargetTable"), stepMeta);
}
remarks.add(cr);
} else {
error_message = BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.CouldNotReadTableInfo");
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
}
// Look up fields in the input stream <prev>
if (prev != null && prev.size() > 0) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.StepReceivingDatas", prev.size() + ""), stepMeta);
remarks.add(cr);
boolean first = true;
error_message = "";
boolean error_found = false;
for (int i = 0; i < keyStream.length; i++) {
ValueMetaInterface v = prev.searchValueMeta(keyStream[i]);
if (v == null) {
if (first) {
first = false;
error_message += BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.MissingFieldsInInput") + Const.CR;
}
error_found = true;
error_message += "\t\t" + keyStream[i] + Const.CR;
}
}
for (int i = 0; i < keyStream2.length; i++) {
if (keyStream2[i] != null && keyStream2[i].length() > 0) {
ValueMetaInterface v = prev.searchValueMeta(keyStream2[i]);
if (v == null) {
if (first) {
first = false;
error_message += BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.MissingFieldsInInput") + Const.CR;
}
error_found = true;
error_message += "\t\t" + keyStream[i] + Const.CR;
}
}
}
if (error_found) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.AllFieldsFoundInInput"), stepMeta);
}
remarks.add(cr);
// How about the fields to insert/update the table with?
first = true;
error_found = false;
error_message = "";
for (int i = 0; i < updateStream.length; i++) {
String lufield = updateStream[i];
ValueMetaInterface v = prev.searchValueMeta(lufield);
if (v == null) {
if (first) {
first = false;
error_message += BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.MissingInputStreamFields") + Const.CR;
}
error_found = true;
error_message += "\t\t" + lufield + Const.CR;
}
}
if (error_found) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.AllFieldsFoundInInput2"), stepMeta);
}
remarks.add(cr);
} else {
error_message = BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.MissingFieldsInInput3") + Const.CR;
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
} catch (KettleException e) {
error_message = BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.DatabaseErrorOccurred") + e.getMessage();
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
} finally {
db.disconnect();
}
} else {
error_message = BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.InvalidConnection");
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
// See if we have input streams leading to this step!
if (input.length > 0) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.StepReceivingInfoFromOtherSteps"), stepMeta);
remarks.add(cr);
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "InsertUpdateMeta.CheckResult.NoInputError"), stepMeta);
remarks.add(cr);
}
}
Aggregations