use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class TableOutputIT method testTableOutputNormal.
/**
* Test case for normal table output case.
*/
@SuppressWarnings("deprecation")
public void testTableOutputNormal() throws Exception {
KettleEnvironment.init();
//
// Create a new transformation...
//
TransMeta transMeta = new TransMeta();
transMeta.setName("table output normal test");
// Add the database connections
for (int i = 0; i < databasesXML.length; i++) {
DatabaseMeta databaseMeta = new DatabaseMeta(databasesXML[i]);
transMeta.addDatabase(databaseMeta);
}
DatabaseMeta dbInfo = transMeta.findDatabase("db");
// Execute our setup SQLs in the database.
Database database = new Database(transMeta, dbInfo);
database.connect();
createTable(database, target_table, createSourceRowMetaInterface1());
PluginRegistry registry = PluginRegistry.getInstance();
//
// create an injector step...
//
String injectorStepname = "injector step";
InjectorMeta im = new InjectorMeta();
// Set the information of the injector.
String injectorPid = registry.getPluginId(StepPluginType.class, im);
StepMeta injectorStep = new StepMeta(injectorPid, injectorStepname, im);
transMeta.addStep(injectorStep);
//
// create the source step...
//
String outputname = "output to [" + target_table + "]";
TableOutputMeta tom = new TableOutputMeta();
tom.setDatabaseMeta(transMeta.findDatabase("db"));
tom.setTablename(target_table);
String fromid = registry.getPluginId(StepPluginType.class, tom);
StepMeta fromstep = new StepMeta(fromid, outputname, tom);
fromstep.setDescription("write data to table [" + target_table + "] on database [" + dbInfo + "]");
transMeta.addStep(fromstep);
TransHopMeta hi = new TransHopMeta(injectorStep, fromstep);
transMeta.addTransHop(hi);
// Now execute the transformation...
Trans trans = new Trans(transMeta);
trans.prepareExecution(null);
StepInterface si = trans.getStepInterface(outputname, 0);
RowStepCollector rc = new RowStepCollector();
si.addRowListener(rc);
RowProducer rp = trans.addRowProducer(injectorStepname, 0);
trans.startThreads();
// add rows
List<RowMetaAndData> inputList = createNormalDataRows();
for (RowMetaAndData rm : inputList) {
rp.putRow(rm.getRowMeta(), rm.getData());
}
rp.finished();
trans.waitUntilFinished();
List<RowMetaAndData> resultRows = rc.getRowsWritten();
List<RowMetaAndData> goldRows = createNormalDataRows();
checkRows(goldRows, resultRows);
checkResultsNormal(database);
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class TableOutputIT method testTableOutputJIRA2733.
/**
* Test case for commitSize see PDI2733 in JIRA.
*/
@SuppressWarnings("deprecation")
public void testTableOutputJIRA2733() throws Exception {
KettleEnvironment.init();
//
// Create a new transformation...
//
TransMeta transMeta = new TransMeta();
transMeta.setName("table output JIRA2733 test");
// Add the database connections
for (int i = 0; i < databasesXML.length; i++) {
DatabaseMeta databaseMeta = new DatabaseMeta(databasesXML[i]);
transMeta.addDatabase(databaseMeta);
}
DatabaseMeta dbInfo = transMeta.findDatabase("db");
// Execute our setup SQLs in the database.
Database database = new Database(transMeta, dbInfo);
database.connect();
createTable(database, target_table3, createSourceRowMetaInterface1());
// Add "ts" timestamp field to target_table with a default value of NOW()
database.execStatement("ALTER TABLE " + target_table3 + " ADD COLUMN ts TIMESTAMP DEFAULT NOW() ");
PluginRegistry registry = PluginRegistry.getInstance();
//
// create an injector step...
//
String injectorStepname = "injector step";
InjectorMeta im = new InjectorMeta();
// Set the information of the injector.
String injectorPid = registry.getPluginId(StepPluginType.class, im);
StepMeta injectorStep = new StepMeta(injectorPid, injectorStepname, im);
transMeta.addStep(injectorStep);
//
// create the source step...
//
String outputname = "output to [" + target_table3 + "]";
TableOutputMeta tom = new TableOutputMeta();
tom.setDatabaseMeta(transMeta.findDatabase("db"));
tom.setTablename(target_table3);
tom.setTruncateTable(true);
tom.setUseBatchUpdate(true);
String fromid = registry.getPluginId(StepPluginType.class, tom);
StepMeta fromstep = new StepMeta(fromid, outputname, tom);
fromstep.setDescription("write data to table [" + target_table3 + "] on database [" + dbInfo + "]");
transMeta.addStep(fromstep);
TransHopMeta hi = new TransHopMeta(injectorStep, fromstep);
transMeta.addTransHop(hi);
// With seven rows these are the number of commits that need to made
// for "commitSize"s ranging between 0 and 8. (0=auto-commit=no commits)
int[] goldRowCounts = { 1, 8, 4, 3, 2, 2, 2, 2, 1 };
for (int commitSize = 0; commitSize <= 8; commitSize++) {
tom.setCommitSize(commitSize);
// Now execute the transformation...
Trans trans = new Trans(transMeta);
trans.prepareExecution(null);
StepInterface si = trans.getStepInterface(outputname, 0);
RowStepCollector rc = new RowStepCollector();
si.addRowListener(rc);
RowProducer rp = trans.addRowProducer(injectorStepname, 0);
trans.startThreads();
// add rows
List<RowMetaAndData> inputList = createNormalDataRows();
for (RowMetaAndData rm : inputList) {
rp.putRow(rm.getRowMeta(), rm.getData());
}
rp.finished();
trans.waitUntilFinished();
// Get the number of commits from the DB connection
// in the table output step...
//
TableOutputData data = (TableOutputData) trans.findDataInterface(outputname);
int exp = goldRowCounts[commitSize];
// remove 1 commit too many in the dispose method.
//
int act = data.db.getNrExecutedCommits() - 1;
assertEquals("Incorrect number of commits with commitSize=" + commitSize + Const.CR, exp, act);
}
dropTable(database, target_table3);
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class TableCompareIT method setUp.
@Before
public void setUp() throws Exception {
InputStream input = TableCompareIT.class.getClassLoader().getResourceAsStream(PKG + "PDI-7255.sql");
String sql = TestUtilities.getStringFromInput(input);
Database db = new Database(log, databaseMeta);
db.connect();
db.execStatements(sql);
db.commit(true);
db.disconnect();
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class TableCompareIT method executeSqlPrecondition.
private void executeSqlPrecondition(String sqlFile) throws IOException, KettleDatabaseException {
String path = PKG + sqlFile;
InputStream input = TableCompareIT.class.getClassLoader().getResourceAsStream(PKG + sqlFile);
if (input == null) {
throw new IOException("Resource not found in classpath: " + path);
}
String sql = TestUtilities.getStringFromInput(input);
Database db = new Database(log, databaseMeta);
db.connect();
db.execStatements(sql);
db.commit(true);
db.disconnect();
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class JobEntryWaitForSQL method SQLDataOK.
protected boolean SQLDataOK(Result result, long nrRowsLimit, String realSchemaName, String realTableName, String customSQL) throws KettleException {
String countStatement = null;
long rowsCount = 0;
boolean successOK = false;
List<Object[]> ar = null;
RowMetaInterface rowMeta = null;
Database db = new Database(this, connection);
db.shareVariablesWith(this);
try {
db.connect(parentJob.getTransactionId(), null);
if (iscustomSQL) {
countStatement = customSQL;
} else {
if (!Utils.isEmpty(realSchemaName)) {
countStatement = selectCount + db.getDatabaseMeta().getQuotedSchemaTableCombination(realSchemaName, realTableName);
} else {
countStatement = selectCount + db.getDatabaseMeta().quoteField(realTableName);
}
}
if (countStatement != null) {
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobEntryWaitForSQL.Log.RunSQLStatement", countStatement));
}
if (iscustomSQL) {
ar = db.getRows(countStatement, 0);
if (ar != null) {
rowsCount = ar.size();
} else {
if (log.isDebug()) {
logDebug(BaseMessages.getString(PKG, "JobEntryWaitForSQL.Log.customSQLreturnedNothing", countStatement));
}
}
} else {
RowMetaAndData row = db.getOneRow(countStatement);
if (row != null) {
rowsCount = row.getInteger(0);
}
}
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobEntryWaitForSQL.Log.NrRowsReturned", "" + rowsCount));
}
switch(successCondition) {
case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_EQUAL:
successOK = (rowsCount == nrRowsLimit);
break;
case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_DIFFERENT:
successOK = (rowsCount != nrRowsLimit);
break;
case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_SMALLER:
successOK = (rowsCount < nrRowsLimit);
break;
case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_SMALLER_EQUAL:
successOK = (rowsCount <= nrRowsLimit);
break;
case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_GREATER:
successOK = (rowsCount > nrRowsLimit);
break;
case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_GREATER_EQUAL:
successOK = (rowsCount >= nrRowsLimit);
break;
default:
break;
}
}
// end if countStatement!=null
} catch (KettleDatabaseException dbe) {
logError(BaseMessages.getString(PKG, "JobEntryWaitForSQL.Error.RunningEntry", dbe.getMessage()));
} finally {
if (db != null) {
if (isAddRowsResult && iscustomSQL && ar != null) {
rowMeta = db.getQueryFields(countStatement, false);
}
db.disconnect();
}
}
if (successOK) {
// ad rows to result
if (isAddRowsResult && iscustomSQL && ar != null) {
List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>();
for (int i = 0; i < ar.size(); i++) {
rows.add(new RowMetaAndData(rowMeta, ar.get(i)));
}
if (rows != null) {
result.getRows().addAll(rows);
}
}
}
return successOK;
}
Aggregations