Search in sources :

Example 76 with DatabaseMeta

use of org.pentaho.di.core.database.DatabaseMeta in project pentaho-kettle by pentaho.

the class DimensionLookup method dimInsert.

/**
 * This inserts new record into dimension Optionally, if the entry already exists, update date range from previous
 * version of the entry.
 */
public Long dimInsert(RowMetaInterface inputRowMeta, Object[] row, Long technicalKey, boolean newEntry, Long versionNr, Date dateFrom, Date dateTo) throws KettleException {
    DatabaseMeta databaseMeta = meta.getDatabaseMeta();
    if (data.prepStatementInsert == null && data.prepStatementUpdate == null) {
        // first time: construct prepared statement
        RowMetaInterface insertRowMeta = new RowMeta();
        /*
       * Construct the SQL statement...
       *
       * INSERT INTO d_customer(keyfield, versionfield, datefrom, dateto, key[], fieldlookup[], last_updated,
       * last_inserted, last_version) VALUES (val_key ,val_version , val_datfrom, val_datto, keynrs[], fieldnrs[],
       * last_updated, last_inserted, last_version) ;
       */
        String sql = "INSERT INTO " + data.schemaTable + "( ";
        if (!isAutoIncrement()) {
            // NO
            sql += databaseMeta.quoteField(meta.getKeyField()) + ", ";
            // AUTOINCREMENT
            // the first return value
            insertRowMeta.addValueMeta(data.outputRowMeta.getValueMeta(inputRowMeta.size()));
        // after the input
        } else {
            if (databaseMeta.needsPlaceHolder()) {
                // placeholder on informix!
                sql += "0, ";
            }
        }
        sql += databaseMeta.quoteField(meta.getVersionField()) + ", " + databaseMeta.quoteField(meta.getDateFrom()) + ", " + databaseMeta.quoteField(meta.getDateTo());
        insertRowMeta.addValueMeta(new ValueMetaInteger(meta.getVersionField()));
        insertRowMeta.addValueMeta(new ValueMetaDate(meta.getDateFrom()));
        insertRowMeta.addValueMeta(new ValueMetaDate(meta.getDateTo()));
        for (int i = 0; i < meta.getKeyLookup().length; i++) {
            sql += ", " + databaseMeta.quoteField(meta.getKeyLookup()[i]);
            insertRowMeta.addValueMeta(inputRowMeta.getValueMeta(data.keynrs[i]));
        }
        for (int i = 0; i < meta.getFieldLookup().length; i++) {
            // 
            if (!DimensionLookupMeta.isUpdateTypeWithoutArgument(meta.isUpdate(), meta.getFieldUpdate()[i])) {
                sql += ", " + databaseMeta.quoteField(meta.getFieldLookup()[i]);
                insertRowMeta.addValueMeta(inputRowMeta.getValueMeta(data.fieldnrs[i]));
            }
        }
        // 
        for (int i = 0; i < meta.getFieldUpdate().length; i++) {
            ValueMetaInterface valueMeta = null;
            switch(meta.getFieldUpdate()[i]) {
                case DimensionLookupMeta.TYPE_UPDATE_DATE_INSUP:
                case DimensionLookupMeta.TYPE_UPDATE_DATE_INSERTED:
                    valueMeta = new ValueMetaDate(meta.getFieldLookup()[i]);
                    break;
                case DimensionLookupMeta.TYPE_UPDATE_LAST_VERSION:
                    valueMeta = new ValueMetaBoolean(meta.getFieldLookup()[i]);
                    break;
                default:
                    break;
            }
            if (valueMeta != null) {
                sql += ", " + databaseMeta.quoteField(valueMeta.getName());
                insertRowMeta.addValueMeta(valueMeta);
            }
        }
        sql += ") VALUES (";
        if (!isAutoIncrement()) {
            sql += "?, ";
        }
        sql += "?, ?, ?";
        for (int i = 0; i < data.keynrs.length; i++) {
            sql += ", ?";
        }
        for (int i = 0; i < meta.getFieldLookup().length; i++) {
            // 
            if (!DimensionLookupMeta.isUpdateTypeWithoutArgument(meta.isUpdate(), meta.getFieldUpdate()[i])) {
                sql += ", ?";
            }
        }
        // 
        for (int i = 0; i < meta.getFieldUpdate().length; i++) {
            switch(meta.getFieldUpdate()[i]) {
                case DimensionLookupMeta.TYPE_UPDATE_DATE_INSUP:
                case DimensionLookupMeta.TYPE_UPDATE_DATE_INSERTED:
                case DimensionLookupMeta.TYPE_UPDATE_LAST_VERSION:
                    sql += ", ?";
                    break;
                default:
                    break;
            }
        }
        sql += " )";
        try {
            if (technicalKey == null && databaseMeta.supportsAutoGeneratedKeys()) {
                logDetailed("SQL w/ return keys=[" + sql + "]");
                data.prepStatementInsert = data.db.getConnection().prepareStatement(databaseMeta.stripCR(sql), Statement.RETURN_GENERATED_KEYS);
            } else {
                logDetailed("SQL=[" + sql + "]");
                data.prepStatementInsert = data.db.getConnection().prepareStatement(databaseMeta.stripCR(sql));
            }
        // pstmt=con.prepareStatement(sql, new String[] { "klant_tk" } );
        } catch (SQLException ex) {
            throw new KettleDatabaseException("Unable to prepare dimension insert :" + Const.CR + sql, ex);
        }
        /*
       * UPDATE d_customer SET dateto = val_datnow, last_updated = <now> last_version = false WHERE keylookup[] =
       * keynrs[] AND versionfield = val_version - 1 ;
       */
        RowMetaInterface updateRowMeta = new RowMeta();
        String sql_upd = "UPDATE " + data.schemaTable + Const.CR;
        // The end of the date range
        // 
        sql_upd += "SET " + databaseMeta.quoteField(meta.getDateTo()) + " = ?" + Const.CR;
        updateRowMeta.addValueMeta(new ValueMetaDate(meta.getDateTo()));
        // 
        for (int i = 0; i < meta.getFieldUpdate().length; i++) {
            ValueMetaInterface valueMeta = null;
            switch(meta.getFieldUpdate()[i]) {
                case DimensionLookupMeta.TYPE_UPDATE_DATE_INSUP:
                case DimensionLookupMeta.TYPE_UPDATE_DATE_UPDATED:
                    valueMeta = new ValueMetaDate(meta.getFieldLookup()[i]);
                    break;
                case DimensionLookupMeta.TYPE_UPDATE_LAST_VERSION:
                    valueMeta = new ValueMetaBoolean(meta.getFieldLookup()[i]);
                    break;
                default:
                    break;
            }
            if (valueMeta != null) {
                sql_upd += ", " + databaseMeta.quoteField(valueMeta.getName()) + " = ?" + Const.CR;
                updateRowMeta.addValueMeta(valueMeta);
            }
        }
        sql_upd += "WHERE ";
        for (int i = 0; i < meta.getKeyLookup().length; i++) {
            if (i > 0) {
                sql_upd += "AND   ";
            }
            sql_upd += databaseMeta.quoteField(meta.getKeyLookup()[i]) + " = ?" + Const.CR;
            updateRowMeta.addValueMeta(inputRowMeta.getValueMeta(data.keynrs[i]));
        }
        sql_upd += "AND   " + databaseMeta.quoteField(meta.getVersionField()) + " = ? ";
        updateRowMeta.addValueMeta(new ValueMetaInteger(meta.getVersionField()));
        try {
            logDetailed("Preparing update: " + Const.CR + sql_upd + Const.CR);
            data.prepStatementUpdate = data.db.getConnection().prepareStatement(databaseMeta.stripCR(sql_upd));
        } catch (SQLException ex) {
            throw new KettleDatabaseException("Unable to prepare dimension update :" + Const.CR + sql_upd, ex);
        }
        data.insertRowMeta = insertRowMeta;
        data.updateRowMeta = updateRowMeta;
    }
    Object[] insertRow = new Object[data.insertRowMeta.size()];
    int insertIndex = 0;
    if (!isAutoIncrement()) {
        insertRow[insertIndex++] = technicalKey;
    }
    // Caller is responsible for setting proper version number depending
    // on if newEntry == true
    insertRow[insertIndex++] = versionNr;
    switch(data.startDateChoice) {
        case DimensionLookupMeta.START_DATE_ALTERNATIVE_NONE:
            insertRow[insertIndex++] = dateFrom;
            break;
        case DimensionLookupMeta.START_DATE_ALTERNATIVE_SYSDATE:
            // use the time the step execution begins as the date from (passed in as dateFrom).
            // before, the current system time was used. this caused an exclusion of the row in the
            // lookup portion of the step that uses this 'valueDate' and not the current time.
            // the result was multiple inserts for what should have been 1 [PDI-4317]
            insertRow[insertIndex++] = dateFrom;
            break;
        case DimensionLookupMeta.START_DATE_ALTERNATIVE_START_OF_TRANS:
            insertRow[insertIndex++] = getTrans().getStartDate();
            break;
        case DimensionLookupMeta.START_DATE_ALTERNATIVE_NULL:
            insertRow[insertIndex++] = null;
            break;
        case DimensionLookupMeta.START_DATE_ALTERNATIVE_COLUMN_VALUE:
            insertRow[insertIndex++] = inputRowMeta.getDate(row, data.startDateFieldIndex);
            break;
        default:
            throw new KettleStepException(BaseMessages.getString(PKG, "DimensionLookup.Exception.IllegalStartDateSelection", Integer.toString(data.startDateChoice)));
    }
    insertRow[insertIndex++] = dateTo;
    for (int i = 0; i < data.keynrs.length; i++) {
        insertRow[insertIndex++] = row[data.keynrs[i]];
    }
    for (int i = 0; i < data.fieldnrs.length; i++) {
        if (data.fieldnrs[i] >= 0) {
            // Ignore last_version, last_updated, etc. These are handled below...
            // 
            insertRow[insertIndex++] = row[data.fieldnrs[i]];
        }
    }
    // 
    for (int i = 0; i < meta.getFieldUpdate().length; i++) {
        switch(meta.getFieldUpdate()[i]) {
            case DimensionLookupMeta.TYPE_UPDATE_DATE_INSUP:
            case DimensionLookupMeta.TYPE_UPDATE_DATE_INSERTED:
                insertRow[insertIndex++] = new Date();
                break;
            case DimensionLookupMeta.TYPE_UPDATE_LAST_VERSION:
                insertRow[insertIndex++] = Boolean.TRUE;
                // Always the last version on insert.
                break;
            default:
                break;
        }
    }
    if (isDebug()) {
        logDebug("rins, size=" + data.insertRowMeta.size() + ", values=" + data.insertRowMeta.getString(insertRow));
    }
    // INSERT NEW VALUE!
    data.db.setValues(data.insertRowMeta, insertRow, data.prepStatementInsert);
    data.db.insertRow(data.prepStatementInsert);
    if (isDebug()) {
        logDebug("Row inserted!");
    }
    if (technicalKey == null && databaseMeta.supportsAutoGeneratedKeys()) {
        try {
            RowMetaAndData keys = data.db.getGeneratedKeys(data.prepStatementInsert);
            if (keys.getRowMeta().size() > 0) {
                technicalKey = keys.getRowMeta().getInteger(keys.getData(), 0);
            } else {
                throw new KettleDatabaseException("Unable to retrieve value of auto-generated technical key : no value found!");
            }
        } catch (Exception e) {
            throw new KettleDatabaseException("Unable to retrieve value of auto-generated technical key : unexpected error: ", e);
        }
    }
    if (!newEntry) {
        // we have to update the previous version in the dimension!
        /*
       * UPDATE d_customer SET dateto = val_datfrom , last_updated = <now> , last_version = false WHERE keylookup[] =
       * keynrs[] AND versionfield = val_version - 1 ;
       */
        Object[] updateRow = new Object[data.updateRowMeta.size()];
        int updateIndex = 0;
        switch(data.startDateChoice) {
            case DimensionLookupMeta.START_DATE_ALTERNATIVE_NONE:
                updateRow[updateIndex++] = dateFrom;
                break;
            case DimensionLookupMeta.START_DATE_ALTERNATIVE_SYSDATE:
                updateRow[updateIndex++] = new Date();
                break;
            case DimensionLookupMeta.START_DATE_ALTERNATIVE_START_OF_TRANS:
                updateRow[updateIndex++] = getTrans().getCurrentDate();
                break;
            case DimensionLookupMeta.START_DATE_ALTERNATIVE_NULL:
                updateRow[updateIndex++] = null;
                break;
            case DimensionLookupMeta.START_DATE_ALTERNATIVE_COLUMN_VALUE:
                updateRow[updateIndex++] = inputRowMeta.getDate(row, data.startDateFieldIndex);
                break;
            default:
                throw new KettleStepException(BaseMessages.getString("DimensionLookup.Exception.IllegalStartDateSelection", Integer.toString(data.startDateChoice)));
        }
        // 
        for (int i = 0; i < meta.getFieldUpdate().length; i++) {
            switch(meta.getFieldUpdate()[i]) {
                case DimensionLookupMeta.TYPE_UPDATE_DATE_INSUP:
                    updateRow[updateIndex++] = new Date();
                    break;
                case DimensionLookupMeta.TYPE_UPDATE_LAST_VERSION:
                    updateRow[updateIndex++] = Boolean.FALSE;
                    // Never the last version on this update
                    break;
                case DimensionLookupMeta.TYPE_UPDATE_DATE_UPDATED:
                    updateRow[updateIndex++] = new Date();
                    break;
                default:
                    break;
            }
        }
        for (int i = 0; i < data.keynrs.length; i++) {
            updateRow[updateIndex++] = row[data.keynrs[i]];
        }
        updateRow[updateIndex++] = versionNr - 1;
        if (isRowLevel()) {
            logRowlevel("UPDATE using rupd=" + data.updateRowMeta.getString(updateRow));
        }
        // UPDATE VALUES
        // set values for update
        // 
        data.db.setValues(data.updateRowMeta, updateRow, data.prepStatementUpdate);
        if (isDebug()) {
            logDebug("Values set for update (" + data.updateRowMeta.size() + ")");
        }
        // do the actual update
        data.db.insertRow(data.prepStatementUpdate);
        if (isDebug()) {
            logDebug("Row updated!");
        }
    }
    return technicalKey;
}
Also used : KettleStepException(org.pentaho.di.core.exception.KettleStepException) RowMeta(org.pentaho.di.core.row.RowMeta) SQLException(java.sql.SQLException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) ValueMetaBoolean(org.pentaho.di.core.row.value.ValueMetaBoolean) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Date(java.util.Date) ValueMetaDate(org.pentaho.di.core.row.value.ValueMetaDate) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) SQLException(java.sql.SQLException) KettleValueException(org.pentaho.di.core.exception.KettleValueException) KettleStepException(org.pentaho.di.core.exception.KettleStepException) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) ValueMetaInteger(org.pentaho.di.core.row.value.ValueMetaInteger) ValueMetaDate(org.pentaho.di.core.row.value.ValueMetaDate)

Example 77 with DatabaseMeta

use of org.pentaho.di.core.database.DatabaseMeta in project pentaho-kettle by pentaho.

the class DimensionLookup method dimPunchThrough.

// This updates all versions of a dimension entry.
// 
public void dimPunchThrough(RowMetaInterface rowMeta, Object[] row) throws KettleDatabaseException {
    if (data.prepStatementPunchThrough == null) {
        // first time: construct prepared statement
        DatabaseMeta databaseMeta = meta.getDatabaseMeta();
        data.punchThroughRowMeta = new RowMeta();
        /*
       * UPDATE table SET punchv1 = fieldx, ... , last_updated = <now> WHERE keylookup[] = keynrs[] ;
       */
        String sql_upd = "UPDATE " + data.schemaTable + Const.CR;
        sql_upd += "SET ";
        boolean first = true;
        for (int i = 0; i < meta.getFieldLookup().length; i++) {
            if (meta.getFieldUpdate()[i] == DimensionLookupMeta.TYPE_UPDATE_DIM_PUNCHTHROUGH) {
                if (!first) {
                    sql_upd += ", ";
                } else {
                    sql_upd += "  ";
                }
                first = false;
                sql_upd += databaseMeta.quoteField(meta.getFieldLookup()[i]) + " = ?" + Const.CR;
                data.punchThroughRowMeta.addValueMeta(rowMeta.getValueMeta(data.fieldnrs[i]));
            }
        }
        // 
        for (int i = 0; i < meta.getFieldUpdate().length; i++) {
            ValueMetaInterface valueMeta = null;
            switch(meta.getFieldUpdate()[i]) {
                case DimensionLookupMeta.TYPE_UPDATE_DATE_INSUP:
                case DimensionLookupMeta.TYPE_UPDATE_DATE_UPDATED:
                    valueMeta = new ValueMetaDate(meta.getFieldLookup()[i]);
                    break;
                default:
                    break;
            }
            if (valueMeta != null) {
                sql_upd += ", " + databaseMeta.quoteField(valueMeta.getName()) + " = ?" + Const.CR;
                data.punchThroughRowMeta.addValueMeta(valueMeta);
            }
        }
        sql_upd += "WHERE ";
        for (int i = 0; i < meta.getKeyLookup().length; i++) {
            if (i > 0) {
                sql_upd += "AND   ";
            }
            sql_upd += databaseMeta.quoteField(meta.getKeyLookup()[i]) + " = ?" + Const.CR;
            data.punchThroughRowMeta.addValueMeta(rowMeta.getValueMeta(data.keynrs[i]));
        }
        try {
            data.prepStatementPunchThrough = data.db.getConnection().prepareStatement(meta.getDatabaseMeta().stripCR(sql_upd));
        } catch (SQLException ex) {
            throw new KettleDatabaseException("Unable to prepare dimension punchThrough update statement : " + Const.CR + sql_upd, ex);
        }
    }
    Object[] punchThroughRow = new Object[data.punchThroughRowMeta.size()];
    int punchIndex = 0;
    for (int i = 0; i < meta.getFieldLookup().length; i++) {
        if (meta.getFieldUpdate()[i] == DimensionLookupMeta.TYPE_UPDATE_DIM_PUNCHTHROUGH) {
            punchThroughRow[punchIndex++] = row[data.fieldnrs[i]];
        }
    }
    for (int i = 0; i < meta.getFieldUpdate().length; i++) {
        switch(meta.getFieldUpdate()[i]) {
            case DimensionLookupMeta.TYPE_UPDATE_DATE_INSUP:
            case DimensionLookupMeta.TYPE_UPDATE_DATE_UPDATED:
                punchThroughRow[punchIndex++] = new Date();
                break;
            default:
                break;
        }
    }
    for (int i = 0; i < data.keynrs.length; i++) {
        punchThroughRow[punchIndex++] = row[data.keynrs[i]];
    }
    // UPDATE VALUES
    // set values for
    data.db.setValues(data.punchThroughRowMeta, punchThroughRow, data.prepStatementPunchThrough);
    // update
    // do the actual punch through update
    data.db.insertRow(data.prepStatementPunchThrough);
}
Also used : RowMeta(org.pentaho.di.core.row.RowMeta) SQLException(java.sql.SQLException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) ValueMetaDate(org.pentaho.di.core.row.value.ValueMetaDate) Date(java.util.Date) ValueMetaDate(org.pentaho.di.core.row.value.ValueMetaDate) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface)

Example 78 with DatabaseMeta

use of org.pentaho.di.core.database.DatabaseMeta in project pentaho-kettle by pentaho.

the class UpdateIT method setUp.

@Override
@Before
public void setUp() throws Exception {
    KettleEnvironment.init();
    /* SET UP TRANSFORMATION */
    // Create a new transformation...
    TransMeta transMeta = new TransMeta();
    transMeta.setName("update test");
    // Add the database connections
    for (int i = 0; i < databasesXML.length; i++) {
        DatabaseMeta databaseMeta = new DatabaseMeta(databasesXML[i]);
        transMeta.addDatabase(databaseMeta);
    }
    DatabaseMeta dbInfo = transMeta.findDatabase("db");
    /* SET UP DATABASE */
    // Create target table
    db = new Database(transMeta, dbInfo);
    db.connect();
    String source = db.getCreateTableStatement(TARGET_TABLE, getTargetTableRowMeta(), null, false, null, true);
    db.execStatement(source);
    // populate target table
    for (String sql : insertStatement) {
        db.execStatement(sql);
    }
    /* SET UP TRANSFORMATION STEPS */
    PluginRegistry registry = PluginRegistry.getInstance();
    // create an injector step...
    String injectorStepName = "injector step";
    InjectorMeta im = new InjectorMeta();
    // Set the information of the injector.
    String injectorPid = registry.getPluginId(StepPluginType.class, im);
    StepMeta injectorStep = new StepMeta(injectorPid, injectorStepName, im);
    transMeta.addStep(injectorStep);
    // create the update step...
    String updateStepName = "update [" + TARGET_TABLE + "]";
    upd = new UpdateMeta();
    upd.setDatabaseMeta(transMeta.findDatabase("db"));
    upd.setTableName(TARGET_TABLE);
    upd.setUpdateLookup(new String[] { "VALUE" });
    upd.setUpdateStream(new String[] { "VALUE" });
    upd.setErrorIgnored(true);
    String fromid = registry.getPluginId(StepPluginType.class, upd);
    StepMeta updateStep = new StepMeta(fromid, updateStepName, upd);
    updateStep.setDescription("update data in table [" + TARGET_TABLE + "] on database [" + dbInfo + "]");
    transMeta.addStep(updateStep);
    TransHopMeta hi = new TransHopMeta(injectorStep, updateStep);
    transMeta.addTransHop(hi);
    /* PREPARE TRANSFORMATION EXECUTION */
    trans = new Trans(transMeta);
    trans.prepareExecution(null);
    StepInterface si = trans.getStepInterface(updateStepName, 0);
    rc = new RowStepCollector();
    si.addRowListener(rc);
    rp = trans.addRowProducer(injectorStepName, 0);
}
Also used : RowStepCollector(org.pentaho.di.trans.RowStepCollector) TransMeta(org.pentaho.di.trans.TransMeta) InjectorMeta(org.pentaho.di.trans.steps.injector.InjectorMeta) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) StepMeta(org.pentaho.di.trans.step.StepMeta) StepInterface(org.pentaho.di.trans.step.StepInterface) PluginRegistry(org.pentaho.di.core.plugins.PluginRegistry) Database(org.pentaho.di.core.database.Database) TransHopMeta(org.pentaho.di.trans.TransHopMeta) Trans(org.pentaho.di.trans.Trans) Before(org.junit.Before)

Example 79 with DatabaseMeta

use of org.pentaho.di.core.database.DatabaseMeta in project pentaho-kettle by pentaho.

the class TableInputIT method testTableInputWithParam.

/**
 * Test case for table input which is taking its input from a hop. This is a regression test case for JIRA PDI-588.
 *
 * The query in the table input step has one '?' and this parameter is filled by values read from an input hop.
 */
public void testTableInputWithParam() throws Exception {
    KettleEnvironment.init();
    // 
    // Create a new transformation...
    // 
    TransMeta transMeta = new TransMeta();
    transMeta.setName("transname");
    // Add the database connections
    for (int i = 0; i < databasesXML.length; i++) {
        DatabaseMeta databaseMeta = new DatabaseMeta(databasesXML[i]);
        transMeta.addDatabase(databaseMeta);
    }
    DatabaseMeta dbInfo = transMeta.findDatabase("db");
    // Execute our setup SQLs in the database.
    Database database = new Database(transMeta, dbInfo);
    database.connect();
    createTables(database);
    createData(database);
    PluginRegistry registry = PluginRegistry.getInstance();
    // 
    // create an injector step...
    // 
    String injectorStepname = "injector step";
    InjectorMeta im = new InjectorMeta();
    // Set the information of the injector.
    String injectorPid = registry.getPluginId(StepPluginType.class, im);
    StepMeta injectorStep = new StepMeta(injectorPid, injectorStepname, im);
    transMeta.addStep(injectorStep);
    // 
    // create the source step...
    // 
    String fromstepname = "read from [" + source_table + "]";
    TableInputMeta tii = new TableInputMeta();
    tii.setDatabaseMeta(transMeta.findDatabase("db"));
    tii.setLookupFromStep(injectorStep);
    tii.setExecuteEachInputRow(true);
    String selectSQL = "SELECT " + Const.CR;
    selectSQL += "ID, CODE ";
    selectSQL += "FROM " + source_table + " WHERE CODE = ? ORDER BY ID, CODE;";
    tii.setSQL(selectSQL);
    String fromstepid = registry.getPluginId(StepPluginType.class, tii);
    StepMeta fromstep = new StepMeta(fromstepid, fromstepname, tii);
    fromstep.setDescription("Reads information from table [" + source_table + "] on database [" + dbInfo + "]");
    transMeta.addStep(fromstep);
    TransHopMeta hi = new TransHopMeta(injectorStep, fromstep);
    transMeta.addTransHop(hi);
    // Now execute the transformation...
    Trans trans = new Trans(transMeta);
    trans.prepareExecution(null);
    StepInterface si = trans.getStepInterface(fromstepname, 0);
    RowStepCollector rc = new RowStepCollector();
    si.addRowListener(rc);
    RowProducer rp = trans.addRowProducer(injectorStepname, 0);
    trans.startThreads();
    // add rows
    List<RowMetaAndData> inputList = createDataRows();
    for (RowMetaAndData rm : inputList) {
        rp.putRow(rm.getRowMeta(), rm.getData());
    }
    rp.finished();
    trans.waitUntilFinished();
    List<RowMetaAndData> resultRows = rc.getRowsWritten();
    List<RowMetaAndData> goldRows = createResultDataRows();
    checkRows(goldRows, resultRows);
}
Also used : RowProducer(org.pentaho.di.trans.RowProducer) RowStepCollector(org.pentaho.di.trans.RowStepCollector) TransMeta(org.pentaho.di.trans.TransMeta) InjectorMeta(org.pentaho.di.trans.steps.injector.InjectorMeta) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) StepMeta(org.pentaho.di.trans.step.StepMeta) StepInterface(org.pentaho.di.trans.step.StepInterface) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) PluginRegistry(org.pentaho.di.core.plugins.PluginRegistry) Database(org.pentaho.di.core.database.Database) TransHopMeta(org.pentaho.di.trans.TransHopMeta) Trans(org.pentaho.di.trans.Trans)

Example 80 with DatabaseMeta

use of org.pentaho.di.core.database.DatabaseMeta in project pentaho-kettle by pentaho.

the class TableOutputIT method testTableOutputJIRA897.

/**
 * Test case for normal table output where the table is included in the instream, but the tablename is not stored in
 * the table.
 */
public void testTableOutputJIRA897() throws Exception {
    KettleEnvironment.init();
    // 
    // Create a new transformation...
    // 
    TransMeta transMeta = new TransMeta();
    transMeta.setName("table output JIRA897 test");
    // Add the database connections
    for (int i = 0; i < databasesXML.length; i++) {
        DatabaseMeta databaseMeta = new DatabaseMeta(databasesXML[i]);
        transMeta.addDatabase(databaseMeta);
    }
    DatabaseMeta dbInfo = transMeta.findDatabase("db");
    // Execute our setup SQLs in the database.
    Database database = new Database(transMeta, dbInfo);
    database.connect();
    createTable(database, target_table1, createSourceRowMetaInterface1());
    createTable(database, target_table2, createSourceRowMetaInterface1());
    PluginRegistry registry = PluginRegistry.getInstance();
    // 
    // create an injector step...
    // 
    String injectorStepname = "injector step";
    InjectorMeta im = new InjectorMeta();
    // Set the information of the injector.
    String injectorPid = registry.getPluginId(StepPluginType.class, im);
    StepMeta injectorStep = new StepMeta(injectorPid, injectorStepname, im);
    transMeta.addStep(injectorStep);
    // 
    // create the source step...
    // 
    String outputname = "output to [" + target_table1 + "] and [" + target_table2 + "]";
    TableOutputMeta tom = new TableOutputMeta();
    tom.setDatabaseMeta(transMeta.findDatabase("db"));
    tom.setTableNameInField(true);
    tom.setTableNameField("TABLE");
    tom.setTableNameInTable(false);
    String fromid = registry.getPluginId(StepPluginType.class, tom);
    StepMeta fromstep = new StepMeta(fromid, outputname, tom);
    fromstep.setDescription("write data to tables on database [" + dbInfo + "]");
    transMeta.addStep(fromstep);
    TransHopMeta hi = new TransHopMeta(injectorStep, fromstep);
    transMeta.addTransHop(hi);
    // Now execute the transformation...
    Trans trans = new Trans(transMeta);
    trans.prepareExecution(null);
    StepInterface si = trans.getStepInterface(outputname, 0);
    RowStepCollector rc = new RowStepCollector();
    si.addRowListener(rc);
    RowProducer rp = trans.addRowProducer(injectorStepname, 0);
    trans.startThreads();
    // add rows
    List<RowMetaAndData> inputList = createJIRA897DataRows();
    for (RowMetaAndData rm : inputList) {
        rp.putRow(rm.getRowMeta(), rm.getData());
    }
    rp.finished();
    trans.waitUntilFinished();
    List<RowMetaAndData> resultRows = rc.getRowsWritten();
    // The name of the table should still be in here.
    List<RowMetaAndData> goldRows = createJIRA897DataRows();
    checkRows(goldRows, resultRows);
    checkResultsJIRA897(database);
}
Also used : RowProducer(org.pentaho.di.trans.RowProducer) RowStepCollector(org.pentaho.di.trans.RowStepCollector) TransMeta(org.pentaho.di.trans.TransMeta) InjectorMeta(org.pentaho.di.trans.steps.injector.InjectorMeta) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) StepMeta(org.pentaho.di.trans.step.StepMeta) StepInterface(org.pentaho.di.trans.step.StepInterface) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) PluginRegistry(org.pentaho.di.core.plugins.PluginRegistry) Database(org.pentaho.di.core.database.Database) TransHopMeta(org.pentaho.di.trans.TransHopMeta) Trans(org.pentaho.di.trans.Trans)

Aggregations

DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)522 Test (org.junit.Test)133 KettleException (org.pentaho.di.core.exception.KettleException)131 Database (org.pentaho.di.core.database.Database)88 MessageBox (org.eclipse.swt.widgets.MessageBox)66 ErrorDialog (org.pentaho.di.ui.core.dialog.ErrorDialog)63 TransMeta (org.pentaho.di.trans.TransMeta)57 StepMeta (org.pentaho.di.trans.step.StepMeta)54 ArrayList (java.util.ArrayList)53 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)48 RowMetaInterface (org.pentaho.di.core.row.RowMetaInterface)44 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)42 SlaveServer (org.pentaho.di.cluster.SlaveServer)33 IMetaStore (org.pentaho.metastore.api.IMetaStore)30 ObjectId (org.pentaho.di.repository.ObjectId)29 DatabaseExplorerDialog (org.pentaho.di.ui.core.database.dialog.DatabaseExplorerDialog)29 JobMeta (org.pentaho.di.job.JobMeta)26 TransHopMeta (org.pentaho.di.trans.TransHopMeta)26 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)24 PluginRegistry (org.pentaho.di.core.plugins.PluginRegistry)24