use of org.pentaho.di.core.database.Database in project pentaho-platform by pentaho.
the class Custom1 method execSQL.
@SuppressWarnings("unused")
private void execSQL(TransMeta transMeta, String targetDatabaseName) throws KettleStepException, KettleDatabaseException {
// OK, What's the SQL we need to execute to generate the target table?
String sql = transMeta.getSQLStatementsString();
// Execute the SQL on the target table:
Database targetDatabase = new Database(new LoggingObject("Custom1"), transMeta.findDatabase(targetDatabaseName));
targetDatabase.connect();
targetDatabase.execStatements(sql);
}
use of org.pentaho.di.core.database.Database in project pdi-dataservice-server-plugin by pentaho.
the class TableInputParameterGenerationTest method setUp.
@Before
public void setUp() throws Exception {
// Setup Mock Step and Data
data = new TableInputData();
when(stepInterface.getLogLevel()).thenReturn(LogLevel.NOTHING);
data.db = new Database(stepInterface, databaseMeta);
// Add mock connection to connection map, prevent an actual connection attempt
data.db.setConnection(mock(Connection.class));
data.db.setConnectionGroup(MOCK_CONNECTION_GROUP);
data.db.setPartitionId(MOCK_PARTITION_ID);
when(stepInterface.getStepDataInterface()).thenReturn(data);
service.dbMeta = databaseMeta;
when(databaseMeta.quoteField(anyString())).thenAnswer(new Answer<String>() {
@Override
public String answer(InvocationOnMock invocation) throws Throwable {
return (String) invocation.getArguments()[0];
}
});
DatabaseConnectionMap connectionMap = DatabaseConnectionMap.getInstance();
connectionMap.getMap().clear();
connectionMap.storeDatabase(MOCK_CONNECTION_GROUP, MOCK_PARTITION_ID, data.db);
setupValueMetaResolverMock();
}
use of org.pentaho.di.core.database.Database in project data-access by pentaho.
the class MultitableDatasourceService method retrieveSchemas.
public List<String> retrieveSchemas(IDatabaseConnection connection) throws DatasourceServiceException {
List<String> schemas = new ArrayList<String>();
try {
DatabaseMeta databaseMeta = this.getDatabaseMeta(connection);
Database database = new Database(null, databaseMeta);
database.connect();
Map<String, Collection<String>> tableMap = database.getTableMap(null, this.isDataServicesConnection(connection) ? new HashMap<String, String>() {
{
put("STREAMING", "N");
}
} : null);
// database.getSchemas()
Set<String> schemaNames = tableMap.keySet();
schemas.addAll(schemaNames);
database.disconnect();
} catch (KettleDatabaseException e) {
logger.error("Error creating database object", e);
throw new DatasourceServiceException(e);
} catch (ConnectionServiceException e) {
logger.error("Error getting database meta", e);
throw new DatasourceServiceException(e);
}
return schemas;
}
use of org.pentaho.di.core.database.Database in project data-access by pentaho.
the class CsvTransformGeneratorIT method testLoadTable1.
public void testLoadTable1() throws Exception {
IPentahoSession session = new StandaloneSession("test");
KettleSystemListener.environmentInit(session);
ModelInfo info = createModel();
CsvTransformGenerator gen = new CsvTransformGenerator(info, getDatabaseMeta());
// create the model
String tableName = info.getStageTableName();
try {
gen.execSqlStatement(getDropTableStatement(tableName), getDatabaseMeta(), null);
} catch (CsvTransformGeneratorException e) {
// table might not be there yet, it is OK
}
// generate the database table
gen.createOrModifyTable(session);
// load the table
loadTable(gen, info, true, session);
// check the results
long rowCount = this.getRowCount(tableName);
assertEquals((long) 235, rowCount);
DatabaseMeta databaseMeta = getDatabaseMeta();
assertNotNull(databaseMeta);
Database database = new Database(databaseMeta);
assertNotNull(database);
database.connect();
Connection connection = null;
Statement stmt = null;
ResultSet sqlResult = null;
try {
connection = database.getConnection();
assertNotNull(connection);
stmt = database.getConnection().createStatement();
// check the first row
Date testDate = new Date();
testDate.setDate(1);
testDate.setHours(0);
testDate.setMinutes(0);
testDate.setMonth(0);
testDate.setSeconds(0);
testDate.setYear(110);
boolean ok = stmt.execute("select * from " + tableName);
assertTrue(ok);
sqlResult = stmt.getResultSet();
assertNotNull(sqlResult);
ok = sqlResult.next();
assertTrue(ok);
// test the values
assertEquals((long) 3, sqlResult.getLong(1));
assertEquals(25677.96525, sqlResult.getDouble(2));
assertEquals((long) 1231, sqlResult.getLong(3));
assertEquals(testDate.getYear(), sqlResult.getDate(4).getYear());
assertEquals(testDate.getMonth(), sqlResult.getDate(4).getMonth());
assertEquals(testDate.getDate(), sqlResult.getDate(4).getDate());
assertEquals(testDate.getHours(), sqlResult.getTime(4).getHours());
// assertEquals( testDate.getMinutes(), ((Date)cells[3]).getMinutes() ); this fails, a bug in the PDI date
// parsing?
assertEquals(testDate.getSeconds(), sqlResult.getTime(4).getSeconds());
// assertEquals( testDate, cells[3] );
assertEquals("Afghanistan", sqlResult.getString(5));
assertEquals((long) 11, sqlResult.getLong(6));
assertEquals(111.9090909, sqlResult.getDouble(7));
assertEquals(false, sqlResult.getBoolean(8));
} finally {
sqlResult.close();
stmt.close();
connection.close();
}
}
use of org.pentaho.di.core.database.Database in project data-access by pentaho.
the class CsvTransformGeneratorIT method getRowCount.
private long getRowCount(String tableName) throws Exception {
DatabaseMeta databaseMeta = getDatabaseMeta();
assertNotNull(databaseMeta);
Database database = new Database(databaseMeta);
assertNotNull(database);
database.connect();
Connection connection = null;
Statement stmt = null;
ResultSet sqlResult = null;
try {
connection = database.getConnection();
assertNotNull(connection);
stmt = database.getConnection().createStatement();
boolean ok = stmt.execute("select count(*) from " + tableName);
assertTrue(ok);
sqlResult = stmt.getResultSet();
assertNotNull(sqlResult);
ok = sqlResult.next();
assertTrue(ok);
return sqlResult.getLong(1);
} finally {
if (sqlResult != null) {
sqlResult.close();
}
if (stmt != null) {
stmt.close();
}
if (connection != null) {
connection.close();
}
}
}
Aggregations