use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class JobEntryColumnsExistTest method setUp.
@Before
public void setUp() {
Job parentJob = new Job(null, new JobMeta());
jobEntry = spy(new JobEntryColumnsExist(""));
parentJob.getJobMeta().addJobEntry(new JobEntryCopy(jobEntry));
parentJob.setStopped(false);
jobEntry.setParentJob(parentJob);
parentJob.setLogLevel(LogLevel.NOTHING);
DatabaseMeta dbMeta = mock(DatabaseMeta.class);
jobEntry.setDatabase(dbMeta);
db = spy(new Database(jobEntry, dbMeta));
jobEntry.setParentJob(parentJob);
jobEntry.setTablename(TABLENAME);
jobEntry.setArguments(COLUMNS);
jobEntry.setSchemaname(SCHEMANAME);
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class PDI_11152_Test method testInputLazyConversion.
@Test
public void testInputLazyConversion() throws KettleException {
Database db = mock(Database.class);
RowMeta returnRowMeta = new RowMeta();
doReturn(new Object[] { new Timestamp(System.currentTimeMillis()) }).when(db).getLookup(any(PreparedStatement.class));
returnRowMeta.addValueMeta(new ValueMetaDate("TimeStamp"));
doReturn(returnRowMeta).when(db).getReturnRowMeta();
ValueMetaString storageMetadata = new ValueMetaString("Date");
storageMetadata.setConversionMask("yyyy-MM-dd");
ValueMetaDate valueMeta = new ValueMetaDate("Date");
valueMeta.setStorageType(ValueMetaInterface.STORAGE_TYPE_BINARY_STRING);
valueMeta.setStorageMetadata(storageMetadata);
RowMeta inputRowMeta = new RowMeta();
inputRowMeta.addValueMeta(valueMeta);
UpdateMeta stepMeta = smh.processRowsStepMetaInterface;
UpdateData stepData = smh.processRowsStepDataInterface;
stepData.lookupParameterRowMeta = inputRowMeta;
stepData.db = db;
stepData.keynrs = stepData.valuenrs = new int[] { 0 };
stepData.keynrs2 = new int[] { -1 };
stepData.updateParameterRowMeta = when(mock(RowMeta.class).size()).thenReturn(2).getMock();
Update step = new Update(smh.stepMeta, smh.stepDataInterface, 0, smh.transMeta, smh.trans);
step.setInputRowMeta(inputRowMeta);
step.addRowSetToInputRowSets(smh.getMockInputRowSet(new Object[] { "2013-12-20".getBytes() }));
step.init(smh.initStepMetaInterface, smh.initStepDataInterface);
step.first = false;
Assert.assertTrue("Failure during row processing", step.processRow(stepMeta, stepData));
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class TableInputMetaTest method testGetFields.
@Test
public void testGetFields() throws Exception {
TableInputMetaHandler meta = new TableInputMetaHandler();
meta.setLazyConversionActive(true);
DatabaseMeta dbMeta = mock(DatabaseMeta.class);
meta.setDatabaseMeta(dbMeta);
Database mockDB = meta.getDatabase();
when(mockDB.getQueryFields(anyString(), anyBoolean())).thenReturn(createMockFields());
RowMetaInterface expectedRowMeta = new RowMeta();
ValueMetaInterface valueMeta = new ValueMetaString("field1");
valueMeta.setStorageMetadata(new ValueMetaString("field1"));
valueMeta.setStorageType(ValueMetaInterface.STORAGE_TYPE_BINARY_STRING);
expectedRowMeta.addValueMeta(valueMeta);
VariableSpace space = mock(VariableSpace.class);
RowMetaInterface rowMetaInterface = new RowMeta();
meta.getFields(rowMetaInterface, "TABLE_INPUT_META", null, null, space, null, null);
assertEquals(expectedRowMeta.toString(), rowMetaInterface.toString());
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class BaseLogTable method generateTableSQL.
public StringBuilder generateTableSQL(LogTableInterface logTable, AbstractMeta meta) throws KettleException {
StringBuilder ddl = new StringBuilder();
if (logTable.getDatabaseMeta() != null && !Utils.isEmpty(logTable.getTableName())) {
// OK, we have something to work with!
try (Database db = new Database(meta, logTable.getDatabaseMeta())) {
db.shareVariablesWith(meta);
db.connect();
RowMetaInterface columns = logTable.getLogRecord(LogStatus.START, null, null).getRowMeta();
String logTableName = db.environmentSubstitute(logTable.getTableName());
String schemaTable = logTable.getDatabaseMeta().getQuotedSchemaTableCombination(db.environmentSubstitute(logTable.getSchemaName()), logTableName);
String createTable = db.getDDL(schemaTable, columns);
if (!Utils.isEmpty(createTable)) {
ddl.append("-- ").append(logTable.getLogTableType()).append(Const.CR);
ddl.append("--").append(Const.CR).append(Const.CR);
ddl.append(createTable).append(Const.CR);
}
ddl.append(addIndicesToTable(logTable, schemaTable, db));
}
}
return ddl;
}
use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.
the class AsyncDatabaseAction method internalExec.
private static Void internalExec(DatabaseMeta databaseMeta, Consumer<Database> dbAction, LoggingObjectInterface log) {
if (databaseMeta != null) {
try (Database db = new Database(log, databaseMeta)) {
db.connect();
dbAction.accept(db);
} catch (KettleDatabaseException e) {
logError(databaseMeta, e);
dbAction.accept(null);
}
}
return null;
}
Aggregations