use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryJobEntryDelegate method loadJobEntryCopy.
/**
* Load the chef graphical entry from repository We load type, name & description if no entry can be found.
*
* @param log
* the logging channel
* @param rep
* the Repository
* @param jobId
* The job ID
* @param jobEntryCopyId
* The jobentry copy ID
* @param jobentries
* A list with all jobentries
* @param databases
* A list with all defined databases
*/
public JobEntryCopy loadJobEntryCopy(ObjectId jobId, ObjectId jobEntryCopyId, List<JobEntryInterface> jobentries, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, String jobname) throws KettleException {
JobEntryCopy jobEntryCopy = new JobEntryCopy();
try {
jobEntryCopy.setObjectId(jobEntryCopyId);
// Handle GUI information: nr, location, ...
RowMetaAndData r = getJobEntryCopy(jobEntryCopyId);
if (r != null) {
// These are the jobentry_copy fields...
//
ObjectId jobEntryId = new LongObjectId(r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY, 0));
ObjectId jobEntryTypeId = new LongObjectId(r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_TYPE, 0));
jobEntryCopy.setNr((int) r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_NR, 0));
int locx = (int) r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_X, 0);
int locy = (int) r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_Y, 0);
boolean isdrawn = r.getBoolean(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_DRAW, false);
boolean isparallel = r.getBoolean(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_PARALLEL, false);
// Do we have the jobentry already?
//
jobEntryCopy.setEntry(JobMeta.findJobEntry(jobentries, jobEntryId));
if (jobEntryCopy.getEntry() == null) {
// What type of jobentry do we load now?
// Get the jobentry type code
//
RowMetaAndData rt = getJobEntryType(new LongObjectId(jobEntryTypeId));
if (rt != null) {
String jet_code = rt.getString(KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE, null);
JobEntryInterface jobEntry = null;
PluginRegistry registry = PluginRegistry.getInstance();
PluginInterface jobPlugin = registry.findPluginWithId(JobEntryPluginType.class, jet_code);
if (jobPlugin == null) {
jobEntry = new MissingEntry(jobname, jet_code);
} else {
jobEntry = (JobEntryInterface) registry.loadClass(jobPlugin);
}
if (jobEntry != null) {
jobEntryCopy.setEntry(jobEntry);
//
if (jobEntry instanceof JobEntryBase) {
loadJobEntryBase((JobEntryBase) jobEntry, jobEntryId, databases, slaveServers);
((JobEntryBase) jobEntry).setAttributesMap(loadJobEntryAttributesMap(jobId, jobEntryId));
}
compatibleJobEntryLoadRep(jobEntry, repository, jobEntryTypeId, databases, slaveServers);
jobEntry.loadRep(repository, repository.metaStore, jobEntryId, databases, slaveServers);
jobEntryCopy.getEntry().setObjectId(jobEntryId);
jobentries.add(jobEntryCopy.getEntry());
} else {
throw new KettleException("JobEntryLoader was unable to find Job Entry Plugin with description [" + jet_code + "].");
}
} else {
throw new KettleException("Unable to find Job Entry Type with id=" + jobEntryTypeId + " in the repository");
}
}
jobEntryCopy.setLocation(locx, locy);
jobEntryCopy.setDrawn(isdrawn);
jobEntryCopy.setLaunchingInParallel(isparallel);
return jobEntryCopy;
} else {
throw new KettleException("Unable to find job entry copy in repository with id_jobentry_copy=" + jobEntryCopyId);
}
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to load job entry copy from repository with id_jobentry_copy=" + jobEntryCopyId, dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class KettleDatabaseRepository method create.
@Override
public void create() {
if (repositoryMeta.getConnection() != null) {
if (repositoryMeta.getConnection().getAccessType() == DatabaseMeta.TYPE_ACCESS_ODBC) {
// This will change in a future story
log.logDebug("ODBC type is not advised for repository use");
}
try {
if (!getDatabaseMeta().getDatabaseInterface().supportsRepository()) {
// show error about not being valid
log.logError("This database type does not support being a repository");
}
connectionDelegate.connect(true, true);
boolean upgrade = false;
try {
String userTableName = getDatabaseMeta().quoteField(KettleDatabaseRepository.TABLE_R_USER);
upgrade = getDatabase().checkTableExists(userTableName);
if (upgrade) {
// This will change in future story
log.logDebug("Database upgrade will now take place");
}
} catch (KettleDatabaseException dbe) {
// Roll back the connection: this is required for certain databases like PGSQL
// Otherwise we can't execute any other DDL statement.
//
rollback();
// Don't show an error anymore, just go ahead and propose to create the repository!
}
if (upgrade) {
// authenticate as admin before upgrade
reconnectAsAdminForUpgrade();
}
createRepositorySchema(null, upgrade, new ArrayList<String>(), false);
disconnect();
} catch (KettleException ke) {
log.logError("An error has occurred creating a repository");
}
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class XulDatabaseExplorerController method getDDLForOther.
public void getDDLForOther() {
if (databases != null) {
try {
// Now select the other connection...
// Only take non-SAP ERP connections....
List<DatabaseMeta> dbs = new ArrayList<DatabaseMeta>();
for (int i = 0; i < databases.size(); i++) {
if (((databases.get(i)).getDatabaseInterface().isExplorable())) {
dbs.add(databases.get(i));
}
}
String[] conn = new String[dbs.size()];
for (int i = 0; i < conn.length; i++) {
conn[i] = (dbs.get(i)).getName();
}
EnterSelectionDialog esd = new EnterSelectionDialog(this.dbExplorerDialog.getShell(), conn, BaseMessages.getString(PKG, "DatabaseExplorerDialog.TargetDatabase.Title"), BaseMessages.getString(PKG, "DatabaseExplorerDialog.TargetDatabase.Message"));
String target = esd.open();
if (target != null) {
DatabaseMeta targetdbi = DatabaseMeta.findDatabase(dbs, target);
Database targetdb = new Database(null, targetdbi);
try {
targetdb.connect();
String tableName = getSchemaAndTable(model);
RowMetaInterface r = targetdb.getTableFields(tableName);
String sql = targetdb.getCreateTableStatement(tableName, r, null, false, null, true);
SQLEditor se = new SQLEditor(this.getDatabaseMeta(), this.dbExplorerDialog.getShell(), SWT.NONE, this.model.getDatabaseMeta(), this.dbcache, sql);
se.open();
} finally {
targetdb.disconnect();
}
}
} catch (KettleDatabaseException dbe) {
new ErrorDialog(this.dbExplorerDialog.getShell(), BaseMessages.getString(PKG, "Dialog.Error.Header"), BaseMessages.getString(PKG, "DatabaseExplorerDialog.Error.GenDDL"), dbe);
}
} else {
MessageBox mb = new MessageBox(this.dbExplorerDialog.getShell(), SWT.NONE | SWT.ICON_INFORMATION);
mb.setMessage(BaseMessages.getString(PKG, "DatabaseExplorerDialog.NoConnectionsKnown.Message"));
mb.setText(BaseMessages.getString(PKG, "DatabaseExplorerDialog.NoConnectionsKnown.Title"));
mb.open();
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class SQLStatementsDialog method exec.
private void exec() {
int[] idx = wFields.table.getSelectionIndices();
// None selected: don't waste users time: select them all!
if (idx.length == 0) {
idx = new int[stats.size()];
for (int i = 0; i < stats.size(); i++) {
idx[i] = i;
}
}
int errors = 0;
for (int i = 0; i < idx.length; i++) {
SQLStatement stat = stats.get(idx[i]);
if (stat.hasError()) {
errors++;
}
}
if (errors == 0) {
for (int i = 0; i < idx.length; i++) {
SQLStatement stat = stats.get(idx[i]);
DatabaseMeta di = stat.getDatabase();
if (di != null && !stat.hasError()) {
Database db = new Database(loggingObject, di);
try {
db.connect();
try {
db.execStatements(stat.getSQL());
} catch (KettleDatabaseException dbe) {
errors++;
new ErrorDialog(shell, BaseMessages.getString(PKG, "SQLStatementDialog.Error.Title"), BaseMessages.getString(PKG, "SQLStatementDialog.Error.CouldNotExec", stat.getSQL()), dbe);
}
} catch (KettleDatabaseException dbe) {
new ErrorDialog(shell, BaseMessages.getString(PKG, "SQLStatementDialog.Error.Title"), BaseMessages.getString(PKG, "SQLStatementDialog.Error.CouldNotConnect", (di == null ? "" : di.getName())), dbe);
} finally {
db.disconnect();
}
}
}
if (errors == 0) {
MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION);
mb.setMessage(BaseMessages.getString(PKG, "SQLStatementDialog.Success.Message", Integer.toString(idx.length)));
mb.setText(BaseMessages.getString(PKG, "SQLStatementDialog.Success.Title"));
mb.open();
}
} else {
MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR);
mb.setMessage(BaseMessages.getString(PKG, "SQLStatementDialog.Error.Message", Integer.toString(errors)));
mb.setText(BaseMessages.getString(PKG, "SQLStatementDialog.Error.Title"));
mb.open();
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class Job method beginProcessing.
/**
* Handle logging at start
*
* @return true if it went OK.
*
* @throws KettleException
*/
public boolean beginProcessing() throws KettleException {
currentDate = new Date();
logDate = new Date();
startDate = Const.MIN_DATE;
endDate = currentDate;
resetErrors();
final JobLogTable jobLogTable = jobMeta.getJobLogTable();
int intervalInSeconds = Const.toInt(environmentSubstitute(jobLogTable.getLogInterval()), -1);
if (jobLogTable.isDefined()) {
DatabaseMeta logcon = jobMeta.getJobLogTable().getDatabaseMeta();
String schemaName = environmentSubstitute(jobMeta.getJobLogTable().getActualSchemaName());
String tableName = environmentSubstitute(jobMeta.getJobLogTable().getActualTableName());
String schemaAndTable = jobMeta.getJobLogTable().getDatabaseMeta().getQuotedSchemaTableCombination(schemaName, tableName);
Database ldb = new Database(this, logcon);
ldb.shareVariablesWith(this);
ldb.connect();
ldb.setCommit(logCommitSize);
try {
// See if we have to add a batch id...
Long id_batch = new Long(1);
if (jobMeta.getJobLogTable().isBatchIdUsed()) {
id_batch = logcon.getNextBatchId(ldb, schemaName, tableName, jobLogTable.getKeyField().getFieldName());
setBatchId(id_batch.longValue());
if (getPassedBatchId() <= 0) {
setPassedBatchId(id_batch.longValue());
}
}
Object[] lastr = ldb.getLastLogDate(schemaAndTable, jobMeta.getName(), true, LogStatus.END);
if (!Utils.isEmpty(lastr)) {
Date last;
try {
last = ldb.getReturnRowMeta().getDate(lastr, 0);
} catch (KettleValueException e) {
throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.ConversionError", "" + tableName), e);
}
if (last != null) {
startDate = last;
}
}
depDate = currentDate;
ldb.writeLogRecord(jobMeta.getJobLogTable(), LogStatus.START, this, null);
if (!ldb.isAutoCommit()) {
ldb.commitLog(true, jobMeta.getJobLogTable());
}
ldb.disconnect();
//
if (intervalInSeconds > 0) {
final Timer timer = new Timer(getName() + " - interval logging timer");
TimerTask timerTask = new TimerTask() {
public void run() {
try {
endProcessing();
} catch (Exception e) {
log.logError(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformIntervalLogging"), e);
// Also stop the show...
//
errors.incrementAndGet();
stopAll();
}
}
};
timer.schedule(timerTask, intervalInSeconds * 1000, intervalInSeconds * 1000);
addJobListener(new JobAdapter() {
public void jobFinished(Job job) {
timer.cancel();
}
});
}
// Add a listener at the end of the job to take of writing the final job
// log record...
//
addJobListener(new JobAdapter() {
public void jobFinished(Job job) throws KettleException {
try {
endProcessing();
} catch (KettleJobException e) {
log.logError(BaseMessages.getString(PKG, "Job.Exception.UnableToWriteToLoggingTable", jobLogTable.toString()), e);
// job is failed in case log database record is failed!
throw new KettleException(e);
}
}
});
} catch (KettleDatabaseException dbe) {
// This is even before actual execution
addErrors(1);
throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.UnableToProcessLoggingStart", "" + tableName), dbe);
} finally {
ldb.disconnect();
}
}
// If we need to write out the job entry logging information, do so at the end of the job:
//
JobEntryLogTable jobEntryLogTable = jobMeta.getJobEntryLogTable();
if (jobEntryLogTable.isDefined()) {
addJobListener(new JobAdapter() {
public void jobFinished(Job job) throws KettleException {
try {
writeJobEntryLogInformation();
} catch (KettleException e) {
throw new KettleException(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformJobEntryLoggingAtJobEnd"), e);
}
}
});
}
// If we need to write the log channel hierarchy and lineage information,
// add a listener for that too...
//
ChannelLogTable channelLogTable = jobMeta.getChannelLogTable();
if (channelLogTable.isDefined()) {
addJobListener(new JobAdapter() {
public void jobFinished(Job job) throws KettleException {
try {
writeLogChannelInformation();
} catch (KettleException e) {
throw new KettleException(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformLoggingAtTransEnd"), e);
}
}
});
}
JobExecutionExtension extension = new JobExecutionExtension(this, result, null, false);
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobBeginProcessing.id, extension);
return true;
}
Aggregations