use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class MySQLBulkLoader method execute.
public boolean execute(MySQLBulkLoaderMeta meta) throws KettleException {
Runtime rt = Runtime.getRuntime();
try {
// 1) Create the FIFO file using the "mkfifo" command...
// Make sure to log all the possible output, also from STDERR
//
data.fifoFilename = environmentSubstitute(meta.getFifoFileName());
File fifoFile = new File(data.fifoFilename);
if (!fifoFile.exists()) {
// MKFIFO!
//
String mkFifoCmd = "mkfifo " + data.fifoFilename;
//
logBasic(BaseMessages.getString(PKG, "MySQLBulkLoader.Message.CREATINGFIFO", data.dbDescription, mkFifoCmd));
Process mkFifoProcess = rt.exec(mkFifoCmd);
StreamLogger errorLogger = new StreamLogger(log, mkFifoProcess.getErrorStream(), "mkFifoError");
StreamLogger outputLogger = new StreamLogger(log, mkFifoProcess.getInputStream(), "mkFifoOuptut");
new Thread(errorLogger).start();
new Thread(outputLogger).start();
int result = mkFifoProcess.waitFor();
if (result != 0) {
throw new Exception(BaseMessages.getString(PKG, "MySQLBulkLoader.Message.ERRORFIFORC", result, mkFifoCmd));
}
String chmodCmd = "chmod 666 " + data.fifoFilename;
logBasic(BaseMessages.getString(PKG, "MySQLBulkLoader.Message.SETTINGPERMISSIONSFIFO", data.dbDescription, chmodCmd));
Process chmodProcess = rt.exec(chmodCmd);
errorLogger = new StreamLogger(log, chmodProcess.getErrorStream(), "chmodError");
outputLogger = new StreamLogger(log, chmodProcess.getInputStream(), "chmodOuptut");
new Thread(errorLogger).start();
new Thread(outputLogger).start();
result = chmodProcess.waitFor();
if (result != 0) {
throw new Exception(BaseMessages.getString(PKG, "MySQLBulkLoader.Message.ERRORFIFORC", result, chmodCmd));
}
}
// 2) Make a connection to MySQL for sending SQL commands
// (Also, we need a clear cache for getting up-to-date target metadata)
DBCache.getInstance().clear(meta.getDatabaseMeta().getName());
if (meta.getDatabaseMeta() == null) {
logError(BaseMessages.getString(PKG, "MySQLBulkLoader.Init.ConnectionMissing", getStepname()));
return false;
}
data.db = new Database(this, meta.getDatabaseMeta());
data.db.shareVariablesWith(this);
PluginInterface dbPlugin = PluginRegistry.getInstance().getPlugin(DatabasePluginType.class, meta.getDatabaseMeta().getDatabaseInterface());
data.dbDescription = (dbPlugin != null) ? dbPlugin.getDescription() : BaseMessages.getString(PKG, "MySQLBulkLoader.UnknownDB");
// Connect to the database
if (getTransMeta().isUsingUniqueConnections()) {
synchronized (getTrans()) {
data.db.connect(getTrans().getTransactionId(), getPartitionID());
}
} else {
data.db.connect(getPartitionID());
}
logBasic(BaseMessages.getString(PKG, "MySQLBulkLoader.Message.CONNECTED", data.dbDescription));
// 3) Now we are ready to run the load command...
//
executeLoadCommand();
} catch (Exception ex) {
throw new KettleException(ex);
}
return true;
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class MySQLBulkLoaderMeta method getRequiredFields.
public RowMetaInterface getRequiredFields(VariableSpace space) throws KettleException {
String realTableName = space.environmentSubstitute(tableName);
String realSchemaName = space.environmentSubstitute(schemaName);
if (databaseMeta != null) {
Database db = new Database(loggingObject, databaseMeta);
try {
db.connect();
if (!Utils.isEmpty(realTableName)) {
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(realSchemaName, realTableName);
// Check if this table exists...
if (db.checkTableExists(schemaTable)) {
return db.getTableFields(schemaTable);
} else {
throw new KettleException(BaseMessages.getString(PKG, "MySQLBulkLoaderMeta.Exception.TableNotFound"));
}
} else {
throw new KettleException(BaseMessages.getString(PKG, "MySQLBulkLoaderMeta.Exception.TableNotSpecified"));
}
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "MySQLBulkLoaderMeta.Exception.ErrorGettingFields"), e);
} finally {
db.disconnect();
}
} else {
throw new KettleException(BaseMessages.getString(PKG, "MySQLBulkLoaderMeta.Exception.ConnectionNotDefined"));
}
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class UpdateIT method setUp.
@Override
@Before
public void setUp() throws Exception {
KettleEnvironment.init();
/* SET UP TRANSFORMATION */
// Create a new transformation...
TransMeta transMeta = new TransMeta();
transMeta.setName("update test");
// Add the database connections
for (int i = 0; i < databasesXML.length; i++) {
DatabaseMeta databaseMeta = new DatabaseMeta(databasesXML[i]);
transMeta.addDatabase(databaseMeta);
}
DatabaseMeta dbInfo = transMeta.findDatabase("db");
/* SET UP DATABASE */
// Create target table
db = new Database(transMeta, dbInfo);
db.connect();
String source = db.getCreateTableStatement(TARGET_TABLE, getTargetTableRowMeta(), null, false, null, true);
db.execStatement(source);
// populate target table
for (String sql : insertStatement) {
db.execStatement(sql);
}
/* SET UP TRANSFORMATION STEPS */
PluginRegistry registry = PluginRegistry.getInstance();
// create an injector step...
String injectorStepName = "injector step";
InjectorMeta im = new InjectorMeta();
// Set the information of the injector.
String injectorPid = registry.getPluginId(StepPluginType.class, im);
StepMeta injectorStep = new StepMeta(injectorPid, injectorStepName, im);
transMeta.addStep(injectorStep);
// create the update step...
String updateStepName = "update [" + TARGET_TABLE + "]";
upd = new UpdateMeta();
upd.setDatabaseMeta(transMeta.findDatabase("db"));
upd.setTableName(TARGET_TABLE);
upd.setUpdateLookup(new String[] { "VALUE" });
upd.setUpdateStream(new String[] { "VALUE" });
upd.setErrorIgnored(true);
String fromid = registry.getPluginId(StepPluginType.class, upd);
StepMeta updateStep = new StepMeta(fromid, updateStepName, upd);
updateStep.setDescription("update data in table [" + TARGET_TABLE + "] on database [" + dbInfo + "]");
transMeta.addStep(updateStep);
TransHopMeta hi = new TransHopMeta(injectorStep, updateStep);
transMeta.addTransHop(hi);
/* PREPARE TRANSFORMATION EXECUTION */
trans = new Trans(transMeta);
trans.prepareExecution(null);
StepInterface si = trans.getStepInterface(updateStepName, 0);
rc = new RowStepCollector();
si.addRowListener(rc);
rp = trans.addRowProducer(injectorStepName, 0);
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class TableInputIT method testTableInputWithParam.
/**
* Test case for table input which is taking its input from a hop. This is a regression test case for JIRA PDI-588.
*
* The query in the table input step has one '?' and this parameter is filled by values read from an input hop.
*/
public void testTableInputWithParam() throws Exception {
KettleEnvironment.init();
//
// Create a new transformation...
//
TransMeta transMeta = new TransMeta();
transMeta.setName("transname");
// Add the database connections
for (int i = 0; i < databasesXML.length; i++) {
DatabaseMeta databaseMeta = new DatabaseMeta(databasesXML[i]);
transMeta.addDatabase(databaseMeta);
}
DatabaseMeta dbInfo = transMeta.findDatabase("db");
// Execute our setup SQLs in the database.
Database database = new Database(transMeta, dbInfo);
database.connect();
createTables(database);
createData(database);
PluginRegistry registry = PluginRegistry.getInstance();
//
// create an injector step...
//
String injectorStepname = "injector step";
InjectorMeta im = new InjectorMeta();
// Set the information of the injector.
String injectorPid = registry.getPluginId(StepPluginType.class, im);
StepMeta injectorStep = new StepMeta(injectorPid, injectorStepname, im);
transMeta.addStep(injectorStep);
//
// create the source step...
//
String fromstepname = "read from [" + source_table + "]";
TableInputMeta tii = new TableInputMeta();
tii.setDatabaseMeta(transMeta.findDatabase("db"));
tii.setLookupFromStep(injectorStep);
tii.setExecuteEachInputRow(true);
String selectSQL = "SELECT " + Const.CR;
selectSQL += "ID, CODE ";
selectSQL += "FROM " + source_table + " WHERE CODE = ? ORDER BY ID, CODE;";
tii.setSQL(selectSQL);
String fromstepid = registry.getPluginId(StepPluginType.class, tii);
StepMeta fromstep = new StepMeta(fromstepid, fromstepname, tii);
fromstep.setDescription("Reads information from table [" + source_table + "] on database [" + dbInfo + "]");
transMeta.addStep(fromstep);
TransHopMeta hi = new TransHopMeta(injectorStep, fromstep);
transMeta.addTransHop(hi);
// Now execute the transformation...
Trans trans = new Trans(transMeta);
trans.prepareExecution(null);
StepInterface si = trans.getStepInterface(fromstepname, 0);
RowStepCollector rc = new RowStepCollector();
si.addRowListener(rc);
RowProducer rp = trans.addRowProducer(injectorStepname, 0);
trans.startThreads();
// add rows
List<RowMetaAndData> inputList = createDataRows();
for (RowMetaAndData rm : inputList) {
rp.putRow(rm.getRowMeta(), rm.getData());
}
rp.finished();
trans.waitUntilFinished();
List<RowMetaAndData> resultRows = rc.getRowsWritten();
List<RowMetaAndData> goldRows = createResultDataRows();
checkRows(goldRows, resultRows);
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class TableOutputIT method testTableOutputJIRA897.
/**
* Test case for normal table output where the table is included in the instream, but the tablename is not stored in
* the table.
*/
public void testTableOutputJIRA897() throws Exception {
KettleEnvironment.init();
//
// Create a new transformation...
//
TransMeta transMeta = new TransMeta();
transMeta.setName("table output JIRA897 test");
// Add the database connections
for (int i = 0; i < databasesXML.length; i++) {
DatabaseMeta databaseMeta = new DatabaseMeta(databasesXML[i]);
transMeta.addDatabase(databaseMeta);
}
DatabaseMeta dbInfo = transMeta.findDatabase("db");
// Execute our setup SQLs in the database.
Database database = new Database(transMeta, dbInfo);
database.connect();
createTable(database, target_table1, createSourceRowMetaInterface1());
createTable(database, target_table2, createSourceRowMetaInterface1());
PluginRegistry registry = PluginRegistry.getInstance();
//
// create an injector step...
//
String injectorStepname = "injector step";
InjectorMeta im = new InjectorMeta();
// Set the information of the injector.
String injectorPid = registry.getPluginId(StepPluginType.class, im);
StepMeta injectorStep = new StepMeta(injectorPid, injectorStepname, im);
transMeta.addStep(injectorStep);
//
// create the source step...
//
String outputname = "output to [" + target_table1 + "] and [" + target_table2 + "]";
TableOutputMeta tom = new TableOutputMeta();
tom.setDatabaseMeta(transMeta.findDatabase("db"));
tom.setTableNameInField(true);
tom.setTableNameField("TABLE");
tom.setTableNameInTable(false);
String fromid = registry.getPluginId(StepPluginType.class, tom);
StepMeta fromstep = new StepMeta(fromid, outputname, tom);
fromstep.setDescription("write data to tables on database [" + dbInfo + "]");
transMeta.addStep(fromstep);
TransHopMeta hi = new TransHopMeta(injectorStep, fromstep);
transMeta.addTransHop(hi);
// Now execute the transformation...
Trans trans = new Trans(transMeta);
trans.prepareExecution(null);
StepInterface si = trans.getStepInterface(outputname, 0);
RowStepCollector rc = new RowStepCollector();
si.addRowListener(rc);
RowProducer rp = trans.addRowProducer(injectorStepname, 0);
trans.startThreads();
// add rows
List<RowMetaAndData> inputList = createJIRA897DataRows();
for (RowMetaAndData rm : inputList) {
rp.putRow(rm.getRowMeta(), rm.getData());
}
rp.finished();
trans.waitUntilFinished();
List<RowMetaAndData> resultRows = rc.getRowsWritten();
// The name of the table should still be in here.
List<RowMetaAndData> goldRows = createJIRA897DataRows();
checkRows(goldRows, resultRows);
checkResultsJIRA897(database);
}
Aggregations