use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class SQLFileOutputMeta method getSQLStatements.
public SQLStatement getSQLStatements(TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore) {
// default: nothing to do!
SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null);
if (databaseMeta != null) {
if (prev != null && prev.size() > 0) {
if (!Utils.isEmpty(tablename)) {
Database db = new Database(loggingObject, databaseMeta);
db.shareVariablesWith(transMeta);
try {
db.connect();
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(schemaName, tablename);
String cr_table = db.getDDL(schemaTable, prev);
// Empty string means: nothing to do: set it to null...
if (cr_table == null || cr_table.length() == 0) {
cr_table = null;
}
retval.setSQL(cr_table);
} catch (KettleDatabaseException dbe) {
retval.setError(BaseMessages.getString(PKG, "SQLFileOutputMeta.Error.ErrorConnecting", dbe.getMessage()));
} finally {
db.disconnect();
}
} else {
retval.setError(BaseMessages.getString(PKG, "SQLFileOutputMeta.Exception.TableNotSpecified"));
}
} else {
retval.setError(BaseMessages.getString(PKG, "SQLFileOutputMeta.Error.NoInput"));
}
} else {
retval.setError(BaseMessages.getString(PKG, "SQLFileOutputMeta.Error.NoConnection"));
}
return retval;
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class SQLFileOutputMeta method check.
public void check(List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore) {
if (databaseMeta != null) {
CheckResult cr = new CheckResult(CheckResult.TYPE_RESULT_OK, BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.ConnectionExists"), stepMeta);
remarks.add(cr);
Database db = new Database(loggingObject, databaseMeta);
try {
db.connect();
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.ConnectionOk"), stepMeta);
remarks.add(cr);
if (!Utils.isEmpty(tablename)) {
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(schemaName, tablename);
// Check if this table exists...
if (db.checkTableExists(schemaName, tablename)) {
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.TableAccessible", schemaTable), stepMeta);
remarks.add(cr);
RowMetaInterface r = db.getTableFieldsMeta(schemaName, tablename);
if (r != null) {
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.TableOk", schemaTable), stepMeta);
remarks.add(cr);
String error_message = "";
boolean error_found = false;
// Now see what we can find as previous step...
if (prev != null && prev.size() > 0) {
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.FieldsReceived", "" + prev.size()), stepMeta);
remarks.add(cr);
// Starting from prev...
for (int i = 0; i < prev.size(); i++) {
ValueMetaInterface pv = prev.getValueMeta(i);
int idx = r.indexOfValue(pv.getName());
if (idx < 0) {
error_message += "\t\t" + pv.getName() + " (" + pv.getTypeDesc() + ")" + Const.CR;
error_found = true;
}
}
if (error_found) {
error_message = BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.FieldsNotFoundInOutput", error_message);
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
} else {
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.AllFieldsFoundInOutput"), stepMeta);
remarks.add(cr);
}
// Starting from table fields in r...
for (int i = 0; i < r.size(); i++) {
ValueMetaInterface rv = r.getValueMeta(i);
int idx = prev.indexOfValue(rv.getName());
if (idx < 0) {
error_message += "\t\t" + rv.getName() + " (" + rv.getTypeDesc() + ")" + Const.CR;
error_found = true;
}
}
if (error_found) {
error_message = BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.FieldsNotFound", error_message);
cr = new CheckResult(CheckResult.TYPE_RESULT_WARNING, error_message, stepMeta);
remarks.add(cr);
} else {
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.AllFieldsFound"), stepMeta);
remarks.add(cr);
}
} else {
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.NoFields"), stepMeta);
remarks.add(cr);
}
} else {
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.TableNotAccessible"), stepMeta);
remarks.add(cr);
}
} else {
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.TableError", schemaTable), stepMeta);
remarks.add(cr);
}
} else {
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.NoTableName"), stepMeta);
remarks.add(cr);
}
} catch (KettleException e) {
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.UndefinedError", e.getMessage()), stepMeta);
remarks.add(cr);
} finally {
db.disconnect();
}
} else {
CheckResult cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.NoConnection"), stepMeta);
remarks.add(cr);
}
// See if we have input streams leading to this step!
if (input.length > 0) {
CheckResult cr = new CheckResult(CheckResult.TYPE_RESULT_OK, BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.ExpectedInputOk"), stepMeta);
remarks.add(cr);
} else {
CheckResult cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "SQLFileOutputMeta.CheckResult.ExpectedInputError"), stepMeta);
remarks.add(cr);
}
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class ExecSQL method init.
@Override
public boolean init(StepMetaInterface smi, StepDataInterface sdi) {
meta = (ExecSQLMeta) smi;
data = (ExecSQLData) sdi;
if (super.init(smi, sdi)) {
if (meta.getDatabaseMeta() == null) {
logError(BaseMessages.getString(PKG, "ExecSQL.Init.ConnectionMissing", getStepname()));
return false;
}
data.db = new Database(this, meta.getDatabaseMeta());
data.db.shareVariablesWith(this);
// Connect to the database
try {
if (getTransMeta().isUsingUniqueConnections()) {
synchronized (getTrans()) {
data.db.connect(getTrans().getTransactionId(), getPartitionID());
}
} else {
data.db.connect(getPartitionID());
}
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "ExecSQL.Log.ConnectedToDB"));
}
if (meta.isReplaceVariables()) {
data.sql = environmentSubstitute(meta.getSql());
} else {
data.sql = meta.getSql();
}
// somewhere.
if (!meta.isExecutedEachInputRow()) {
if (meta.isSingleStatement()) {
data.result = data.db.execStatement(data.sql);
} else {
data.result = data.db.execStatements(data.sql);
}
if (!data.db.isAutoCommit()) {
data.db.commit();
}
}
return true;
} catch (KettleException e) {
logError(BaseMessages.getString(PKG, "ExecSQL.Log.ErrorOccurred") + e.getMessage());
setErrors(1);
stopAll();
}
}
return false;
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class BaseStepMetaCloningTest method testClone.
@Test
public void testClone() throws Exception {
final Database db1 = mock(Database.class);
final Database db2 = mock(Database.class);
final Repository repository = mock(Repository.class);
final StepMeta stepMeta = mock(StepMeta.class);
BaseStepMeta meta = new BaseStepMeta();
meta.setChanged(true);
meta.databases = new Database[] { db1, db2 };
StepIOMetaInterface ioMeta = new StepIOMeta(true, false, false, false, false, false);
meta.setStepIOMeta(ioMeta);
meta.repository = repository;
meta.parentStepMeta = stepMeta;
BaseStepMeta clone = (BaseStepMeta) meta.clone();
assertTrue(clone.hasChanged());
// is it OK ?
assertTrue(clone.databases == meta.databases);
assertArrayEquals(meta.databases, clone.databases);
assertEquals(meta.repository, clone.repository);
assertEquals(meta.parentStepMeta, clone.parentStepMeta);
StepIOMetaInterface cloneIOMeta = clone.getStepIOMeta();
assertNotNull(cloneIOMeta);
assertEquals(ioMeta.isInputAcceptor(), cloneIOMeta.isInputAcceptor());
assertEquals(ioMeta.isInputDynamic(), cloneIOMeta.isInputDynamic());
assertEquals(ioMeta.isInputOptional(), cloneIOMeta.isInputOptional());
assertEquals(ioMeta.isOutputDynamic(), cloneIOMeta.isOutputDynamic());
assertEquals(ioMeta.isOutputProducer(), cloneIOMeta.isOutputProducer());
assertEquals(ioMeta.isSortedDataRequired(), cloneIOMeta.isSortedDataRequired());
assertNotNull(cloneIOMeta.getInfoStreams());
assertEquals(0, cloneIOMeta.getInfoStreams().size());
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class BaseStepMetaCloningTest method testCloneWithInfoSteps.
@Test
public void testCloneWithInfoSteps() throws Exception {
final Database db1 = mock(Database.class);
final Database db2 = mock(Database.class);
final Repository repository = mock(Repository.class);
final StepMeta stepMeta = mock(StepMeta.class);
BaseStepMeta meta = new BaseStepMeta();
meta.setChanged(true);
meta.databases = new Database[] { db1, db2 };
StepIOMetaInterface ioMeta = new StepIOMeta(true, false, false, false, false, false);
meta.setStepIOMeta(ioMeta);
final String refStepName = "referenced step";
final StepMeta refStepMeta = mock(StepMeta.class);
doReturn(refStepName).when(refStepMeta).getName();
StreamInterface stream = new Stream(StreamInterface.StreamType.INFO, refStepMeta, null, null, refStepName);
ioMeta.addStream(stream);
meta.repository = repository;
meta.parentStepMeta = stepMeta;
BaseStepMeta clone = (BaseStepMeta) meta.clone();
assertTrue(clone.hasChanged());
// is it OK ?
assertTrue(clone.databases == meta.databases);
assertArrayEquals(meta.databases, clone.databases);
assertEquals(meta.repository, clone.repository);
assertEquals(meta.parentStepMeta, clone.parentStepMeta);
StepIOMetaInterface cloneIOMeta = clone.getStepIOMeta();
assertNotNull(cloneIOMeta);
assertEquals(ioMeta.isInputAcceptor(), cloneIOMeta.isInputAcceptor());
assertEquals(ioMeta.isInputDynamic(), cloneIOMeta.isInputDynamic());
assertEquals(ioMeta.isInputOptional(), cloneIOMeta.isInputOptional());
assertEquals(ioMeta.isOutputDynamic(), cloneIOMeta.isOutputDynamic());
assertEquals(ioMeta.isOutputProducer(), cloneIOMeta.isOutputProducer());
assertEquals(ioMeta.isSortedDataRequired(), cloneIOMeta.isSortedDataRequired());
final List<StreamInterface> clonedInfoStreams = cloneIOMeta.getInfoStreams();
assertNotNull(clonedInfoStreams);
assertEquals(1, clonedInfoStreams.size());
final StreamInterface clonedStream = clonedInfoStreams.get(0);
assertNotSame(stream, clonedStream);
assertEquals(stream.getStreamType(), clonedStream.getStreamType());
assertEquals(refStepName, clonedStream.getStepname());
// PDI-15799
assertSame(refStepMeta, clonedStream.getStepMeta());
}
Aggregations