use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class TableInputMeta method check.
public void check(List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore) {
CheckResult cr;
if (databaseMeta != null) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, "Connection exists", stepMeta);
remarks.add(cr);
Database db = new Database(loggingObject, databaseMeta);
db.shareVariablesWith(transMeta);
// keep track of it for canceling purposes...
super.databases = new Database[] { db };
try {
db.connect();
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, "Connection to database OK", stepMeta);
remarks.add(cr);
if (sql != null && sql.length() != 0) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, "SQL statement is entered", stepMeta);
remarks.add(cr);
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, "SQL statement is missing.", stepMeta);
remarks.add(cr);
}
} catch (KettleException e) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, "An error occurred: " + e.getMessage(), stepMeta);
remarks.add(cr);
} finally {
db.disconnect();
}
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, "Please select or create a connection to use", stepMeta);
remarks.add(cr);
}
// See if we have an informative step...
StreamInterface infoStream = getStepIOMeta().getInfoStreams().get(0);
if (!Utils.isEmpty(infoStream.getStepname())) {
boolean found = false;
for (int i = 0; i < input.length; i++) {
if (infoStream.getStepname().equalsIgnoreCase(input[i])) {
found = true;
}
}
if (found) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, "Previous step to read info from [" + infoStream.getStepname() + "] is found.", stepMeta);
remarks.add(cr);
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, "Previous step to read info from [" + infoStream.getStepname() + "] is not found.", stepMeta);
remarks.add(cr);
}
// Count the number of ? in the SQL string:
int count = 0;
for (int i = 0; i < sql.length(); i++) {
char c = sql.charAt(i);
if (c == '\'') {
// skip to next quote!
do {
i++;
c = sql.charAt(i);
} while (c != '\'');
}
if (c == '?') {
count++;
}
}
// Verify with the number of informative fields...
if (info != null) {
if (count == info.size()) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, "This step is expecting and receiving " + info.size() + " fields of input from the previous step.", stepMeta);
remarks.add(cr);
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, "This step is receiving " + info.size() + " but not the expected " + count + " fields of input from the previous step.", stepMeta);
remarks.add(cr);
}
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, "Input step name is not recognized!", stepMeta);
remarks.add(cr);
}
} else {
if (input.length > 0) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, "Step is not expecting info from input steps.", stepMeta);
remarks.add(cr);
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, "No input expected, no input provided.", stepMeta);
remarks.add(cr);
}
}
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class TableOutputMeta method check.
public void check(List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore) {
if (databaseMeta != null) {
CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.ConnectionExists"), stepMeta);
remarks.add(cr);
Database db = new Database(loggingObject, databaseMeta);
db.shareVariablesWith(transMeta);
try {
db.connect();
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.ConnectionOk"), stepMeta);
remarks.add(cr);
if (!Utils.isEmpty(tableName)) {
String realSchemaName = db.environmentSubstitute(schemaName);
String realTableName = db.environmentSubstitute(tableName);
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(realSchemaName, realTableName);
// Check if this table exists...
if (db.checkTableExists(realSchemaName, realTableName)) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.TableAccessible", schemaTable), stepMeta);
remarks.add(cr);
RowMetaInterface r = db.getTableFieldsMeta(realSchemaName, realTableName);
if (r != null) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.TableOk", schemaTable), stepMeta);
remarks.add(cr);
String error_message = "";
boolean error_found = false;
// Now see what we can find as previous step...
if (prev != null && prev.size() > 0) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.FieldsReceived", "" + prev.size()), stepMeta);
remarks.add(cr);
if (!specifyFields()) {
// Starting from prev...
for (int i = 0; i < prev.size(); i++) {
ValueMetaInterface pv = prev.getValueMeta(i);
int idx = r.indexOfValue(pv.getName());
if (idx < 0) {
error_message += "\t\t" + pv.getName() + " (" + pv.getTypeDesc() + ")" + Const.CR;
error_found = true;
}
}
if (error_found) {
error_message = BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.FieldsNotFoundInOutput", error_message);
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.AllFieldsFoundInOutput"), stepMeta);
remarks.add(cr);
}
} else {
// Specifying the column names explicitly
for (int i = 0; i < getFieldDatabase().length; i++) {
int idx = r.indexOfValue(getFieldDatabase()[i]);
if (idx < 0) {
error_message += "\t\t" + getFieldDatabase()[i] + Const.CR;
error_found = true;
}
}
if (error_found) {
error_message = BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.FieldsSpecifiedNotInTable", error_message);
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.AllFieldsFoundInOutput"), stepMeta);
remarks.add(cr);
}
}
error_message = "";
if (!specifyFields()) {
// Starting from table fields in r...
for (int i = 0; i < getFieldDatabase().length; i++) {
ValueMetaInterface rv = r.getValueMeta(i);
int idx = prev.indexOfValue(rv.getName());
if (idx < 0) {
error_message += "\t\t" + rv.getName() + " (" + rv.getTypeDesc() + ")" + Const.CR;
error_found = true;
}
}
if (error_found) {
error_message = BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.FieldsNotFound", error_message);
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_WARNING, error_message, stepMeta);
remarks.add(cr);
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.AllFieldsFound"), stepMeta);
remarks.add(cr);
}
} else {
// Specifying the column names explicitly
for (int i = 0; i < getFieldStream().length; i++) {
int idx = prev.indexOfValue(getFieldStream()[i]);
if (idx < 0) {
error_message += "\t\t" + getFieldStream()[i] + Const.CR;
error_found = true;
}
}
if (error_found) {
error_message = BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.FieldsSpecifiedNotFound", error_message);
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.AllFieldsFound"), stepMeta);
remarks.add(cr);
}
}
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.NoFields"), stepMeta);
remarks.add(cr);
}
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.TableNotAccessible"), stepMeta);
remarks.add(cr);
}
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.TableError", schemaTable), stepMeta);
remarks.add(cr);
}
} else {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.NoTableName"), stepMeta);
remarks.add(cr);
}
} catch (KettleException e) {
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.UndefinedError", e.getMessage()), stepMeta);
remarks.add(cr);
} finally {
db.disconnect();
}
} else {
CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.NoConnection"), stepMeta);
remarks.add(cr);
}
// See if we have input streams leading to this step!
if (input.length > 0) {
CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.ExpectedInputOk"), stepMeta);
remarks.add(cr);
} else {
CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TableOutputMeta.CheckResult.ExpectedInputError"), stepMeta);
remarks.add(cr);
}
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class TableOutputMeta method getRequiredFields.
public RowMetaInterface getRequiredFields(VariableSpace space) throws KettleException {
String realTableName = space.environmentSubstitute(tableName);
String realSchemaName = space.environmentSubstitute(schemaName);
if (databaseMeta != null) {
Database db = new Database(loggingObject, databaseMeta);
try {
db.connect();
if (!Utils.isEmpty(realTableName)) {
// Check if this table exists...
if (db.checkTableExists(realSchemaName, realTableName)) {
return db.getTableFieldsMeta(realSchemaName, realTableName);
} else {
throw new KettleException(BaseMessages.getString(PKG, "TableOutputMeta.Exception.TableNotFound"));
}
} else {
throw new KettleException(BaseMessages.getString(PKG, "TableOutputMeta.Exception.TableNotSpecified"));
}
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "TableOutputMeta.Exception.ErrorGettingFields"), e);
} finally {
db.disconnect();
}
} else {
throw new KettleException(BaseMessages.getString(PKG, "TableOutputMeta.Exception.ConnectionNotDefined"));
}
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class AddSequenceDialog method getSequences.
private void getSequences() {
DatabaseMeta databaseMeta = transMeta.findDatabase(wConnection.getText());
if (databaseMeta != null) {
Database database = new Database(loggingObject, databaseMeta);
try {
database.connect();
String[] sequences = database.getSequences();
if (null != sequences && sequences.length > 0) {
sequences = Const.sortStrings(sequences);
EnterSelectionDialog dialog = new EnterSelectionDialog(shell, sequences, BaseMessages.getString(PKG, "AddSequenceDialog.SelectSequence.Title", wConnection.getText()), BaseMessages.getString(PKG, "AddSequenceDialog.SelectSequence.Message"));
String d = dialog.open();
if (d != null) {
wSeqname.setText(Const.NVL(d.toString(), ""));
}
} else {
MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR);
mb.setMessage(BaseMessages.getString(PKG, "AddSequenceDialog.NoSequence.Message"));
mb.setText(BaseMessages.getString(PKG, "AddSequenceDialog.NoSequence.Title"));
mb.open();
}
} catch (Exception e) {
new ErrorDialog(shell, BaseMessages.getString(PKG, "System.Dialog.Error.Title"), BaseMessages.getString(PKG, "AddSequenceDialog.ErrorGettingSequences"), e);
} finally {
if (database != null) {
database.disconnect();
database = null;
}
}
}
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class JobMeta method getSQLStatements.
/**
* Builds a list of all the SQL statements that this transformation needs in order to work properly.
*
* @return An ArrayList of SQLStatement objects.
*/
public List<SQLStatement> getSQLStatements(Repository repository, IMetaStore metaStore, ProgressMonitorListener monitor) throws KettleException {
if (monitor != null) {
monitor.beginTask(BaseMessages.getString(PKG, "JobMeta.Monitor.GettingSQLNeededForThisJob"), nrJobEntries() + 1);
}
List<SQLStatement> stats = new ArrayList<SQLStatement>();
for (int i = 0; i < nrJobEntries(); i++) {
JobEntryCopy copy = getJobEntry(i);
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.GettingSQLForJobEntryCopy") + copy + "]");
}
stats.addAll(copy.getEntry().getSQLStatements(repository, metaStore, this));
stats.addAll(compatibleGetEntrySQLStatements(copy.getEntry(), repository));
stats.addAll(compatibleGetEntrySQLStatements(copy.getEntry(), repository, this));
if (monitor != null) {
monitor.worked(1);
}
}
// Also check the sql for the logtable...
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.GettingSQLStatementsForJobLogTables"));
}
if (jobLogTable.getDatabaseMeta() != null && !Utils.isEmpty(jobLogTable.getTableName())) {
Database db = new Database(this, jobLogTable.getDatabaseMeta());
try {
db.connect();
RowMetaInterface fields = jobLogTable.getLogRecord(LogStatus.START, null, null).getRowMeta();
String sql = db.getDDL(jobLogTable.getTableName(), fields);
if (sql != null && sql.length() > 0) {
SQLStatement stat = new SQLStatement(BaseMessages.getString(PKG, "JobMeta.SQLFeedback.ThisJob"), jobLogTable.getDatabaseMeta(), sql);
stats.add(stat);
}
} catch (KettleDatabaseException dbe) {
SQLStatement stat = new SQLStatement(BaseMessages.getString(PKG, "JobMeta.SQLFeedback.ThisJob"), jobLogTable.getDatabaseMeta(), null);
stat.setError(BaseMessages.getString(PKG, "JobMeta.SQLFeedback.ErrorObtainingJobLogTableInfo") + dbe.getMessage());
stats.add(stat);
} finally {
db.disconnect();
}
}
if (monitor != null) {
monitor.worked(1);
}
if (monitor != null) {
monitor.done();
}
return stats;
}
Aggregations