use of org.pentaho.di.core.SQLStatement in project pentaho-kettle by pentaho.
the class SQLFileOutputMeta method getSQLStatements.
public SQLStatement getSQLStatements(TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore) {
// default: nothing to do!
SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null);
if (databaseMeta != null) {
if (prev != null && prev.size() > 0) {
if (!Utils.isEmpty(tablename)) {
Database db = new Database(loggingObject, databaseMeta);
db.shareVariablesWith(transMeta);
try {
db.connect();
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(schemaName, tablename);
String cr_table = db.getDDL(schemaTable, prev);
// Empty string means: nothing to do: set it to null...
if (cr_table == null || cr_table.length() == 0) {
cr_table = null;
}
retval.setSQL(cr_table);
} catch (KettleDatabaseException dbe) {
retval.setError(BaseMessages.getString(PKG, "SQLFileOutputMeta.Error.ErrorConnecting", dbe.getMessage()));
} finally {
db.disconnect();
}
} else {
retval.setError(BaseMessages.getString(PKG, "SQLFileOutputMeta.Exception.TableNotSpecified"));
}
} else {
retval.setError(BaseMessages.getString(PKG, "SQLFileOutputMeta.Error.NoInput"));
}
} else {
retval.setError(BaseMessages.getString(PKG, "SQLFileOutputMeta.Error.NoConnection"));
}
return retval;
}
use of org.pentaho.di.core.SQLStatement in project pentaho-kettle by pentaho.
the class SynchronizeAfterMergeMeta method getSQLStatements.
public SQLStatement getSQLStatements(TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore) throws KettleStepException {
// default: nothing to do!
SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null);
if (databaseMeta != null) {
if (prev != null && prev.size() > 0) {
// Copy the row
RowMetaInterface tableFields = new RowMeta();
// the key fields
if (keyLookup != null) {
for (int i = 0; i < keyLookup.length; i++) {
ValueMetaInterface v = prev.searchValueMeta(keyStream[i]);
if (v != null) {
ValueMetaInterface tableField = v.clone();
tableField.setName(keyLookup[i]);
tableFields.addValueMeta(tableField);
} else {
throw new KettleStepException("Unable to find field [" + keyStream[i] + "] in the input rows");
}
}
}
// the lookup fields
for (int i = 0; i < updateLookup.length; i++) {
ValueMetaInterface v = prev.searchValueMeta(updateStream[i]);
if (v != null) {
ValueMetaInterface vk = tableFields.searchValueMeta(updateStream[i]);
if (vk == null) {
// do not add again when already added as key fields
ValueMetaInterface tableField = v.clone();
tableField.setName(updateLookup[i]);
tableFields.addValueMeta(tableField);
}
} else {
throw new KettleStepException("Unable to find field [" + updateStream[i] + "] in the input rows");
}
}
if (!Utils.isEmpty(tableName)) {
Database db = new Database(loggingObject, databaseMeta);
try {
db.connect();
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(schemaName, tableName);
String cr_table = db.getDDL(schemaTable, tableFields, null, false, null, true);
String cr_index = "";
String[] idx_fields = null;
if (keyLookup != null && keyLookup.length > 0) {
idx_fields = new String[keyLookup.length];
for (int i = 0; i < keyLookup.length; i++) {
idx_fields[i] = keyLookup[i];
}
} else {
retval.setError(BaseMessages.getString(PKG, "SynchronizeAfterMergeMeta.CheckResult.MissingKeyFields"));
}
// Key lookup dimensions...
if (idx_fields != null && idx_fields.length > 0 && !db.checkIndexExists(schemaName, tableName, idx_fields)) {
String indexname = "idx_" + tableName + "_lookup";
cr_index = db.getCreateIndexStatement(schemaTable, indexname, idx_fields, false, false, false, true);
}
String sql = cr_table + cr_index;
if (sql.length() == 0) {
retval.setSQL(null);
} else {
retval.setSQL(sql);
}
} catch (KettleException e) {
retval.setError(BaseMessages.getString(PKG, "SynchronizeAfterMergeMeta.ReturnValue.ErrorOccurred") + e.getMessage());
}
} else {
retval.setError(BaseMessages.getString(PKG, "SynchronizeAfterMergeMeta.ReturnValue.NoTableDefinedOnConnection"));
}
} else {
retval.setError(BaseMessages.getString(PKG, "SynchronizeAfterMergeMeta.ReturnValue.NotReceivingAnyFields"));
}
} else {
retval.setError(BaseMessages.getString(PKG, "SynchronizeAfterMergeMeta.ReturnValue.NoConnectionDefined"));
}
return retval;
}
use of org.pentaho.di.core.SQLStatement in project pentaho-kettle by pentaho.
the class TableAgileMart method adjustSchema.
@Override
public boolean adjustSchema() {
TableOutputMeta meta = getMeta();
TableOutputData data = getData();
TransMeta transMeta = getTransMeta();
StepMeta stepMeta = meta.getParentStepMeta();
DBCache dbcache = transMeta.getDbCache();
StringBuilder messageBuffer = new StringBuilder();
try {
RowMetaInterface prev = transMeta.getPrevStepFields(stepMeta.getName());
if (log.isDetailed()) {
logDetailed("Attempting to auto adjust table structure");
}
if (log.isDetailed()) {
logDetailed("getTransMeta: " + getTransMeta());
}
if (log.isDetailed()) {
logDetailed("getStepname: " + getStepname());
}
SQLStatement statement = meta.getSQLStatements(transMeta, stepMeta, prev, repository, metaStore);
if (log.isDetailed()) {
logDetailed("Statement: " + statement);
}
if (log.isDetailed() && statement != null) {
logDetailed("Statement has SQL: " + statement.hasSQL());
}
if (statement != null && statement.hasSQL()) {
String sql = statement.getSQL();
if (log.isDetailed()) {
logDetailed("Trying: " + sql);
}
try {
log.logDetailed("Executing SQL: " + Const.CR + sql);
data.db.execStatement(sql);
// Clear the database cache, in case we're using one...
if (dbcache != null) {
dbcache.clear(data.databaseMeta.getName());
}
} catch (Exception dbe) {
String error = BaseMessages.getString(PKG, "SQLEditor.Log.SQLExecError", sql, dbe.toString());
messageBuffer.append(error).append(Const.CR);
return false;
}
if (log.isDetailed()) {
logDetailed("Successfull: " + sql);
}
} else if (statement.getError() == null) {
// there were no changes to be made
return true;
} else {
this.message = statement.getError();
logError(statement.getError());
return false;
}
} catch (Exception e) {
logError("An error ocurred trying to adjust the table schema", e);
}
return true;
}
use of org.pentaho.di.core.SQLStatement in project data-access by pentaho.
the class StagingTransformGenerator method executeSql.
protected void executeSql(TableOutputMeta meta, StepMeta stepMeta, TransMeta transMeta) throws CsvTransformGeneratorException {
try {
RowMetaInterface prev = transMeta.getPrevStepFields(TABLE_OUTPUT);
SQLStatement sqlStatement = meta.getSQLStatements(transMeta, stepMeta, prev, null, false, null);
if (!sqlStatement.hasError()) {
if (sqlStatement.hasSQL()) {
// now we can execute the SQL
String sqlScript = sqlStatement.getSQL();
execSqlStatement(sqlScript, meta.getDatabaseMeta(), null);
} else {
// No SQL was generated
// $NON-NLS-1$
error("No SQL generated");
// $NON-NLS-1$
throw new CsvTransformGeneratorException("No SQL generated");
}
} else {
error(sqlStatement.getError());
throw new CsvTransformGeneratorException(sqlStatement.getError());
}
} catch (KettleException ke) {
// $NON-NLS-1$
error("Exception encountered", ke);
throw new CsvTransformGeneratorException("Exception encountered", ke, // $NON-NLS-1$
getStackTraceAsString(ke));
}
}
use of org.pentaho.di.core.SQLStatement in project pentaho-kettle by pentaho.
the class UpdateMeta method getSQLStatements.
@Override
public SQLStatement getSQLStatements(TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore) throws KettleStepException {
// default: nothing to do!
SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null);
if (databaseMeta != null) {
if (prev != null && prev.size() > 0) {
// Copy the row
RowMetaInterface tableFields = RowMetaUtils.getRowMetaForUpdate(prev, keyLookup, keyStream, updateLookup, updateStream);
if (!Utils.isEmpty(tableName)) {
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(schemaName, tableName);
Database db = new Database(loggingObject, databaseMeta);
db.shareVariablesWith(transMeta);
try {
db.connect();
if (getIgnoreFlagField() != null && getIgnoreFlagField().length() > 0) {
prev.addValueMeta(new ValueMetaBoolean(getIgnoreFlagField()));
}
String cr_table = db.getDDL(schemaTable, tableFields, null, false, null, true);
String cr_index = "";
String[] idx_fields = null;
if (keyLookup != null && keyLookup.length > 0) {
idx_fields = new String[keyLookup.length];
for (int i = 0; i < keyLookup.length; i++) {
idx_fields[i] = keyLookup[i];
}
} else {
retval.setError(BaseMessages.getString(PKG, "UpdateMeta.CheckResult.MissingKeyFields"));
}
// Key lookup dimensions...
if (idx_fields != null && idx_fields.length > 0 && !db.checkIndexExists(schemaTable, idx_fields)) {
String indexname = "idx_" + tableName + "_lookup";
cr_index = db.getCreateIndexStatement(schemaTable, indexname, idx_fields, false, false, false, true);
}
String sql = cr_table + cr_index;
if (sql.length() == 0) {
retval.setSQL(null);
} else {
retval.setSQL(sql);
}
} catch (KettleException e) {
retval.setError(BaseMessages.getString(PKG, "UpdateMeta.ReturnValue.ErrorOccurred") + e.getMessage());
}
} else {
retval.setError(BaseMessages.getString(PKG, "UpdateMeta.ReturnValue.NoTableDefinedOnConnection"));
}
} else {
retval.setError(BaseMessages.getString(PKG, "UpdateMeta.ReturnValue.NotReceivingAnyFields"));
}
} else {
retval.setError(BaseMessages.getString(PKG, "UpdateMeta.ReturnValue.NoConnectionDefined"));
}
return retval;
}
Aggregations