Search in sources :

Example 1 with TableInputMeta

use of org.pentaho.di.trans.steps.tableinput.TableInputMeta in project pentaho-kettle by pentaho.

the class SpoonDBDelegate method copyTable.

public boolean copyTable(DatabaseMeta sourceDBInfo, DatabaseMeta targetDBInfo, String tablename) {
    try {
        // 
        // Create a new transformation...
        // 
        TransMeta meta = new TransMeta();
        meta.addDatabase(sourceDBInfo);
        meta.addDatabase(targetDBInfo);
        // 
        // Add a note
        // 
        String note = BaseMessages.getString(PKG, "Spoon.Message.Note.ReadInformationFromTableOnDB", tablename, sourceDBInfo.getDatabaseName()) + // "Reads information from table ["+tablename+"]
        Const.CR;
        // on database ["+sourceDBInfo+"]"
        note += BaseMessages.getString(PKG, "Spoon.Message.Note.WriteInformationToTableOnDB", tablename, targetDBInfo.getDatabaseName());
        // the information to table
        // ["+tablename+"] on
        // database
        // ["+targetDBInfo+"]"
        NotePadMeta ni = new NotePadMeta(note, 150, 10, -1, -1);
        meta.addNote(ni);
        // 
        // create the source step...
        // 
        // "read
        String fromstepname = BaseMessages.getString(PKG, "Spoon.Message.Note.ReadFromTable", tablename);
        // from
        // ["+tablename+"]";
        TableInputMeta tii = new TableInputMeta();
        tii.setDatabaseMeta(sourceDBInfo);
        tii.setSQL("SELECT * FROM " + tablename);
        PluginRegistry registry = PluginRegistry.getInstance();
        String fromstepid = registry.getPluginId(StepPluginType.class, tii);
        StepMeta fromstep = new StepMeta(fromstepid, fromstepname, tii);
        fromstep.setLocation(150, 100);
        fromstep.setDraw(true);
        fromstep.setDescription(BaseMessages.getString(PKG, "Spoon.Message.Note.ReadInformationFromTableOnDB", tablename, sourceDBInfo.getDatabaseName()));
        meta.addStep(fromstep);
        // 
        // add logic to rename fields in case any of the field names contain
        // reserved words...
        // Use metadata logic in SelectValues, use SelectValueInfo...
        // 
        Database sourceDB = new Database(loggingObject, sourceDBInfo);
        sourceDB.shareVariablesWith(meta);
        sourceDB.connect();
        try {
            // Get the fields for the input table...
            RowMetaInterface fields = sourceDB.getTableFields(tablename);
            // See if we need to deal with reserved words...
            int nrReserved = targetDBInfo.getNrReservedWords(fields);
            if (nrReserved > 0) {
                SelectValuesMeta svi = new SelectValuesMeta();
                svi.allocate(0, 0, nrReserved);
                int nr = 0;
                // CHECKSTYLE:Indentation:OFF
                for (int i = 0; i < fields.size(); i++) {
                    ValueMetaInterface v = fields.getValueMeta(i);
                    if (targetDBInfo.isReservedWord(v.getName())) {
                        if (svi.getMeta()[nr] == null) {
                            svi.getMeta()[nr] = new SelectMetadataChange(svi);
                        }
                        svi.getMeta()[nr].setName(v.getName());
                        svi.getMeta()[nr].setRename(targetDBInfo.quoteField(v.getName()));
                        nr++;
                    }
                }
                String selstepname = BaseMessages.getString(PKG, "Spoon.Message.Note.HandleReservedWords");
                String selstepid = registry.getPluginId(StepPluginType.class, svi);
                StepMeta selstep = new StepMeta(selstepid, selstepname, svi);
                selstep.setLocation(350, 100);
                selstep.setDraw(true);
                selstep.setDescription(BaseMessages.getString(PKG, "Spoon.Message.Note.RenamesReservedWords", // 
                targetDBInfo.getPluginId()));
                meta.addStep(selstep);
                TransHopMeta shi = new TransHopMeta(fromstep, selstep);
                meta.addTransHop(shi);
                fromstep = selstep;
            }
            // 
            // Create the target step...
            // 
            // 
            // Add the TableOutputMeta step...
            // 
            String tostepname = BaseMessages.getString(PKG, "Spoon.Message.Note.WriteToTable", tablename);
            TableOutputMeta toi = new TableOutputMeta();
            toi.setDatabaseMeta(targetDBInfo);
            toi.setTableName(tablename);
            toi.setCommitSize(200);
            toi.setTruncateTable(true);
            String tostepid = registry.getPluginId(StepPluginType.class, toi);
            StepMeta tostep = new StepMeta(tostepid, tostepname, toi);
            tostep.setLocation(550, 100);
            tostep.setDraw(true);
            tostep.setDescription(BaseMessages.getString(PKG, "Spoon.Message.Note.WriteInformationToTableOnDB2", tablename, targetDBInfo.getDatabaseName()));
            meta.addStep(tostep);
            // 
            // Add a hop between the two steps...
            // 
            TransHopMeta hi = new TransHopMeta(fromstep, tostep);
            meta.addTransHop(hi);
            // OK, if we're still here: overwrite the current transformation...
            // Set a name on this generated transformation
            // 
            String name = "Copy table from [" + sourceDBInfo.getName() + "] to [" + targetDBInfo.getName() + "]";
            String transName = name;
            int nr = 1;
            if (spoon.delegates.trans.getTransformation(transName) != null) {
                nr++;
                transName = name + " " + nr;
            }
            meta.setName(transName);
            spoon.delegates.trans.addTransGraph(meta);
            spoon.refreshGraph();
            spoon.refreshTree();
        } finally {
            sourceDB.disconnect();
        }
    } catch (Exception e) {
        new ErrorDialog(spoon.getShell(), BaseMessages.getString(PKG, "Spoon.Dialog.UnexpectedError.Title"), BaseMessages.getString(PKG, "Spoon.Dialog.UnexpectedError.Message"), new KettleException(e.getMessage(), e));
        return false;
    }
    return true;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) TransMeta(org.pentaho.di.trans.TransMeta) ErrorDialog(org.pentaho.di.ui.core.dialog.ErrorDialog) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) TableOutputMeta(org.pentaho.di.trans.steps.tableoutput.TableOutputMeta) StepMeta(org.pentaho.di.trans.step.StepMeta) TableInputMeta(org.pentaho.di.trans.steps.tableinput.TableInputMeta) KettleException(org.pentaho.di.core.exception.KettleException) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface) SelectValuesMeta(org.pentaho.di.trans.steps.selectvalues.SelectValuesMeta) PluginRegistry(org.pentaho.di.core.plugins.PluginRegistry) Database(org.pentaho.di.core.database.Database) SelectMetadataChange(org.pentaho.di.trans.steps.selectvalues.SelectMetadataChange) NotePadMeta(org.pentaho.di.core.NotePadMeta) TransHopMeta(org.pentaho.di.trans.TransHopMeta)

Example 2 with TableInputMeta

use of org.pentaho.di.trans.steps.tableinput.TableInputMeta in project pentaho-kettle by pentaho.

the class SpoonJobDelegate method ripDB.

public JobMeta ripDB(final List<DatabaseMeta> databases, final String jobname, final RepositoryDirectoryInterface repdir, final String directory, final DatabaseMeta sourceDbInfo, final DatabaseMeta targetDbInfo, final String[] tables) {
    // 
    // Create a new job...
    // 
    final JobMeta jobMeta = new JobMeta();
    jobMeta.setDatabases(databases);
    jobMeta.setFilename(null);
    jobMeta.setName(jobname);
    if (spoon.getRepository() != null) {
        jobMeta.setRepositoryDirectory(repdir);
    } else {
        jobMeta.setFilename(Const.createFilename(directory, jobname, "." + Const.STRING_JOB_DEFAULT_EXT));
    }
    spoon.refreshTree();
    spoon.refreshGraph();
    final Point location = new Point(50, 50);
    // The start entry...
    final JobEntryCopy start = JobMeta.createStartEntry();
    start.setLocation(new Point(location.x, location.y));
    start.setDrawn();
    jobMeta.addJobEntry(start);
    // final Thread parentThread = Thread.currentThread();
    // Create a dialog with a progress indicator!
    IRunnableWithProgress op = monitor -> {
        try {
            // This is running in a new process: copy some KettleVariables
            // info
            // LocalVariables.getInstance().createKettleVariables(Thread.currentThread().getName(),
            // parentThread.getName(), true);
            monitor.beginTask(BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.BuildingNewJob"), tables.length);
            monitor.worked(0);
            JobEntryCopy previous = start;
            // Loop over the table-names...
            for (int i = 0; i < tables.length && !monitor.isCanceled(); i++) {
                monitor.setTaskName(BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.ProcessingTable") + tables[i] + "]...");
                // 
                // Create the new transformation...
                // 
                String transname = BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Transname1") + sourceDbInfo + "].[" + tables[i] + BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Transname2") + targetDbInfo + "]";
                TransMeta transMeta = new TransMeta();
                if (repdir != null) {
                    transMeta.setRepositoryDirectory(repdir);
                } else {
                    transMeta.setFilename(Const.createFilename(directory, transname, "." + Const.STRING_TRANS_DEFAULT_EXT));
                }
                // Add the source & target db
                transMeta.addDatabase(sourceDbInfo);
                transMeta.addDatabase(targetDbInfo);
                // 
                // Add a note
                // 
                String note = BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Note1") + tables[i] + BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Note2") + sourceDbInfo + "]" + Const.CR;
                note += BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Note3") + tables[i] + BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Note4") + targetDbInfo + "]";
                NotePadMeta ni = new NotePadMeta(note, 150, 10, -1, -1);
                transMeta.addNote(ni);
                // 
                // Add the TableInputMeta step...
                // 
                String fromstepname = BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.FromStep.Name") + tables[i] + "]";
                TableInputMeta tii = new TableInputMeta();
                tii.setDefault();
                tii.setDatabaseMeta(sourceDbInfo);
                // It's already quoted!
                tii.setSQL("SELECT * FROM " + tables[i]);
                String fromstepid = PluginRegistry.getInstance().getPluginId(StepPluginType.class, tii);
                StepMeta fromstep = new StepMeta(fromstepid, fromstepname, tii);
                fromstep.setLocation(150, 100);
                fromstep.setDraw(true);
                fromstep.setDescription(BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.FromStep.Description") + tables[i] + BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.FromStep.Description2") + sourceDbInfo + "]");
                transMeta.addStep(fromstep);
                // 
                // Add the TableOutputMeta step...
                // 
                String tostepname = BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.ToStep.Name") + tables[i] + "]";
                TableOutputMeta toi = new TableOutputMeta();
                toi.setDatabaseMeta(targetDbInfo);
                toi.setTableName(tables[i]);
                toi.setCommitSize(100);
                toi.setTruncateTable(true);
                String tostepid = PluginRegistry.getInstance().getPluginId(StepPluginType.class, toi);
                StepMeta tostep = new StepMeta(tostepid, tostepname, toi);
                tostep.setLocation(500, 100);
                tostep.setDraw(true);
                tostep.setDescription(BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.ToStep.Description1") + tables[i] + BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.ToStep.Description2") + targetDbInfo + "]");
                transMeta.addStep(tostep);
                // 
                // Add a hop between the two steps...
                // 
                TransHopMeta hi = new TransHopMeta(fromstep, tostep);
                transMeta.addTransHop(hi);
                // 
                // Now we generate the SQL needed to run for this
                // transformation.
                // 
                // First set the limit to 1 to speed things up!
                String tmpSql = tii.getSQL();
                tii.setSQL(tii.getSQL() + sourceDbInfo.getLimitClause(1));
                String sql;
                try {
                    sql = transMeta.getSQLStatementsString();
                } catch (KettleStepException kse) {
                    throw new InvocationTargetException(kse, BaseMessages.getString(PKG, "Spoon.RipDB.Exception.ErrorGettingSQLFromTransformation") + transMeta + "] : " + kse.getMessage());
                }
                // remove the limit
                tii.setSQL(tmpSql);
                // 
                // Now, save the transformation...
                // 
                boolean ok;
                if (spoon.getRepository() != null) {
                    ok = spoon.saveToRepository(transMeta, false);
                } else {
                    ok = spoon.saveToFile(transMeta);
                }
                if (!ok) {
                    throw new InvocationTargetException(new Exception(BaseMessages.getString(PKG, "Spoon.RipDB.Exception.UnableToSaveTransformationToRepository")), BaseMessages.getString(PKG, "Spoon.RipDB.Exception.UnableToSaveTransformationToRepository"));
                }
                // We can now continue with the population of the job...
                // //////////////////////////////////////////////////////////////////////
                location.x = 250;
                if (i > 0) {
                    location.y += 100;
                }
                // 
                if (!Utils.isEmpty(sql)) {
                    String jesqlname = BaseMessages.getString(PKG, "Spoon.RipDB.JobEntrySQL.Name") + tables[i] + "]";
                    JobEntrySQL jesql = new JobEntrySQL(jesqlname);
                    jesql.setDatabase(targetDbInfo);
                    jesql.setSQL(sql);
                    jesql.setDescription(BaseMessages.getString(PKG, "Spoon.RipDB.JobEntrySQL.Description") + targetDbInfo + "].[" + tables[i] + "]");
                    JobEntryCopy jecsql = new JobEntryCopy();
                    jecsql.setEntry(jesql);
                    jecsql.setLocation(new Point(location.x, location.y));
                    jecsql.setDrawn();
                    jobMeta.addJobEntry(jecsql);
                    // Add the hop too...
                    JobHopMeta jhi = new JobHopMeta(previous, jecsql);
                    jobMeta.addJobHop(jhi);
                    previous = jecsql;
                }
                // 
                // Add the jobentry for the transformation too...
                // 
                String jetransname = BaseMessages.getString(PKG, "Spoon.RipDB.JobEntryTrans.Name") + tables[i] + "]";
                JobEntryTrans jetrans = new JobEntryTrans(jetransname);
                jetrans.setTransname(transMeta.getName());
                if (spoon.getRepository() != null) {
                    jetrans.setSpecificationMethod(ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME);
                    jetrans.setDirectory(transMeta.getRepositoryDirectory().getPath());
                } else {
                    jetrans.setSpecificationMethod(ObjectLocationSpecificationMethod.FILENAME);
                    jetrans.setFileName(Const.createFilename("${" + Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY + "}", transMeta.getName(), "." + Const.STRING_TRANS_DEFAULT_EXT));
                }
                JobEntryCopy jectrans = new JobEntryCopy(jetrans);
                jectrans.setDescription(BaseMessages.getString(PKG, "Spoon.RipDB.JobEntryTrans.Description1") + Const.CR + BaseMessages.getString(PKG, "Spoon.RipDB.JobEntryTrans.Description2") + sourceDbInfo + "].[" + tables[i] + "]" + Const.CR + BaseMessages.getString(PKG, "Spoon.RipDB.JobEntryTrans.Description3") + targetDbInfo + "].[" + tables[i] + "]");
                jectrans.setDrawn();
                location.x += 400;
                jectrans.setLocation(new Point(location.x, location.y));
                jobMeta.addJobEntry(jectrans);
                // Add a hop between the last 2 job entries.
                JobHopMeta jhi2 = new JobHopMeta(previous, jectrans);
                jobMeta.addJobHop(jhi2);
                previous = jectrans;
                monitor.worked(1);
            }
            monitor.worked(100);
            monitor.done();
        } catch (Exception e) {
            new ErrorDialog(spoon.getShell(), "Error", "An unexpected error occurred!", e);
        }
    };
    try {
        ProgressMonitorDialog pmd = new ProgressMonitorDialog(spoon.getShell());
        pmd.run(false, true, op);
    } catch (InvocationTargetException | InterruptedException e) {
        new ErrorDialog(spoon.getShell(), BaseMessages.getString(PKG, "Spoon.ErrorDialog.RipDB.ErrorRippingTheDatabase.Title"), BaseMessages.getString(PKG, "Spoon.ErrorDialog.RipDB.ErrorRippingTheDatabase.Message"), e);
        return null;
    } finally {
        spoon.refreshGraph();
        spoon.refreshTree();
    }
    return jobMeta;
}
Also used : DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) ObjectType(org.pentaho.di.ui.spoon.TabMapEntry.ObjectType) JobGraph(org.pentaho.di.ui.spoon.job.JobGraph) StepPluginType(org.pentaho.di.core.plugins.StepPluginType) TableOutputMeta(org.pentaho.di.trans.steps.tableoutput.TableOutputMeta) Point(org.pentaho.di.core.gui.Point) Date(java.util.Date) JobEntrySQL(org.pentaho.di.job.entries.sql.JobEntrySQL) GUIResource(org.pentaho.di.ui.core.gui.GUIResource) TransMeta(org.pentaho.di.trans.TransMeta) TabMapEntry(org.pentaho.di.ui.spoon.TabMapEntry) Document(org.w3c.dom.Document) Job(org.pentaho.di.job.Job) Map(java.util.Map) NotePadMeta(org.pentaho.di.core.NotePadMeta) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) BaseMessages(org.pentaho.di.i18n.BaseMessages) StepMeta(org.pentaho.di.trans.step.StepMeta) ProgressMonitorDialog(org.eclipse.jface.dialogs.ProgressMonitorDialog) PropsUI(org.pentaho.di.ui.core.PropsUI) Wizard(org.eclipse.jface.wizard.Wizard) TabItem(org.pentaho.xul.swt.tab.TabItem) Utils(org.pentaho.di.core.util.Utils) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) InvocationTargetException(java.lang.reflect.InvocationTargetException) List(java.util.List) JobEntrySpecial(org.pentaho.di.job.entries.special.JobEntrySpecial) JobEntryDialogInterface(org.pentaho.di.job.entry.JobEntryDialogInterface) WizardDialog(org.eclipse.jface.wizard.WizardDialog) SWT(org.eclipse.swt.SWT) KettleStepException(org.pentaho.di.core.exception.KettleStepException) TransAction(org.pentaho.di.core.undo.TransAction) ErrorDialog(org.pentaho.di.ui.core.dialog.ErrorDialog) ObjectLocationSpecificationMethod(org.pentaho.di.core.ObjectLocationSpecificationMethod) JobHopMeta(org.pentaho.di.job.JobHopMeta) KettleException(org.pentaho.di.core.exception.KettleException) HashMap(java.util.HashMap) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) JobEntryPluginType(org.pentaho.di.core.plugins.JobEntryPluginType) Constructor(java.lang.reflect.Constructor) Spoon(org.pentaho.di.ui.spoon.Spoon) ArrayList(java.util.ArrayList) PluginInterface(org.pentaho.di.core.plugins.PluginInterface) Const(org.pentaho.di.core.Const) XMLHandler(org.pentaho.di.core.xml.XMLHandler) PluginRegistry(org.pentaho.di.core.plugins.PluginRegistry) Node(org.w3c.dom.Node) JobExecutionConfiguration(org.pentaho.di.job.JobExecutionConfiguration) RipDatabaseWizardPage2(org.pentaho.di.ui.spoon.wizards.RipDatabaseWizardPage2) RipDatabaseWizardPage1(org.pentaho.di.ui.spoon.wizards.RipDatabaseWizardPage1) RipDatabaseWizardPage3(org.pentaho.di.ui.spoon.wizards.RipDatabaseWizardPage3) RepositoryDirectoryInterface(org.pentaho.di.repository.RepositoryDirectoryInterface) Shell(org.eclipse.swt.widgets.Shell) Repository(org.pentaho.di.repository.Repository) JobMeta(org.pentaho.di.job.JobMeta) DefaultLogLevel(org.pentaho.di.core.logging.DefaultLogLevel) TransHopMeta(org.pentaho.di.trans.TransHopMeta) IRunnableWithProgress(org.eclipse.jface.operation.IRunnableWithProgress) MessageBox(org.eclipse.swt.widgets.MessageBox) VisibleForTesting(com.google.common.annotations.VisibleForTesting) JobEntryTrans(org.pentaho.di.job.entries.trans.JobEntryTrans) TableInputMeta(org.pentaho.di.trans.steps.tableinput.TableInputMeta) JobExecutionConfigurationDialog(org.pentaho.di.ui.job.dialog.JobExecutionConfigurationDialog) ExtensionPointHandler(org.pentaho.di.core.extension.ExtensionPointHandler) JobMeta(org.pentaho.di.job.JobMeta) JobHopMeta(org.pentaho.di.job.JobHopMeta) KettleStepException(org.pentaho.di.core.exception.KettleStepException) ProgressMonitorDialog(org.eclipse.jface.dialogs.ProgressMonitorDialog) TransMeta(org.pentaho.di.trans.TransMeta) ErrorDialog(org.pentaho.di.ui.core.dialog.ErrorDialog) TableOutputMeta(org.pentaho.di.trans.steps.tableoutput.TableOutputMeta) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) JobEntrySQL(org.pentaho.di.job.entries.sql.JobEntrySQL) StepMeta(org.pentaho.di.trans.step.StepMeta) TableInputMeta(org.pentaho.di.trans.steps.tableinput.TableInputMeta) InvocationTargetException(java.lang.reflect.InvocationTargetException) InvocationTargetException(java.lang.reflect.InvocationTargetException) KettleStepException(org.pentaho.di.core.exception.KettleStepException) KettleException(org.pentaho.di.core.exception.KettleException) IRunnableWithProgress(org.eclipse.jface.operation.IRunnableWithProgress) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) StepPluginType(org.pentaho.di.core.plugins.StepPluginType) JobEntryTrans(org.pentaho.di.job.entries.trans.JobEntryTrans) NotePadMeta(org.pentaho.di.core.NotePadMeta) TransHopMeta(org.pentaho.di.trans.TransHopMeta)

Example 3 with TableInputMeta

use of org.pentaho.di.trans.steps.tableinput.TableInputMeta in project pentaho-kettle by pentaho.

the class JobGenerator method generateDimensionTransformation.

/**
 * Generates a template
 * @param databaseMeta
 * @param logicalModel
 * @return
 */
public TransMeta generateDimensionTransformation(DatabaseMeta databaseMeta, LogicalTable logicalTable) {
    TransMeta transMeta = new TransMeta();
    String tableName = ConceptUtil.getName(logicalTable, locale);
    String tableDescription = ConceptUtil.getDescription(logicalTable, locale);
    DimensionType dimensionType = ConceptUtil.getDimensionType(logicalTable);
    transMeta.setName("Update dimension '" + tableName + "'");
    transMeta.setDescription(tableDescription);
    // Let's not forget to add the target database
    // 
    transMeta.addDatabase(databaseMeta);
    Point location = new Point(GRAPH_LEFT, GRAPH_TOP);
    // Find all the source columns and source tables and put them into a table input step...
    // 
    StepMeta inputStep = generateTableInputStepFromLogicalTable(logicalTable);
    DatabaseMeta sourceDatabaseMeta = ((TableInputMeta) inputStep.getStepMetaInterface()).getDatabaseMeta();
    if (sourceDatabaseMeta != null)
        transMeta.addOrReplaceDatabase(sourceDatabaseMeta);
    inputStep.setLocation(location.x, location.y);
    nextLocation(location);
    transMeta.addStep(inputStep);
    StepMeta lastStep = inputStep;
    // Generate an dimension lookup/update step for each table
    // 
    StepMeta dimensionStep;
    if (dimensionType == DimensionType.SLOWLY_CHANGING_DIMENSION) {
        dimensionStep = generateDimensionLookupStepFromLogicalTable(databaseMeta, logicalTable);
    } else {
        dimensionStep = generateCombinationLookupStepFromLogicalTable(databaseMeta, logicalTable);
    }
    dimensionStep.setLocation(location.x, location.y);
    nextLocation(location);
    transMeta.addStep(dimensionStep);
    TransHopMeta transHop = new TransHopMeta(lastStep, dimensionStep);
    transMeta.addTransHop(transHop);
    return transMeta;
}
Also used : DimensionType(org.pentaho.di.starmodeler.DimensionType) TransMeta(org.pentaho.di.trans.TransMeta) Point(org.pentaho.di.core.gui.Point) TransHopMeta(org.pentaho.di.trans.TransHopMeta) StepMeta(org.pentaho.di.trans.step.StepMeta) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) TableInputMeta(org.pentaho.di.trans.steps.tableinput.TableInputMeta)

Example 4 with TableInputMeta

use of org.pentaho.di.trans.steps.tableinput.TableInputMeta in project pentaho-kettle by pentaho.

the class JobGenerator method generateTableInputStepFromLogicalTable.

private StepMeta generateTableInputStepFromLogicalTable(LogicalTable logicalTable) {
    String name = ConceptUtil.getName(logicalTable, locale);
    String description = ConceptUtil.getDescription(logicalTable, locale);
    TableInputMeta meta = new TableInputMeta();
    // Source database, retain first
    // Source table, retain first
    // Source columns, retain all
    // 
    DatabaseMeta sourceDatabaseMeta = null;
    String sourceTable = null;
    List<String> sourceColumns = new ArrayList<String>();
    for (LogicalColumn column : logicalTable.getLogicalColumns()) {
        String phDb = ConceptUtil.getString(column, DefaultIDs.LOGICAL_COLUMN_SOURCE_DB);
        String phTable = ConceptUtil.getString(column, DefaultIDs.LOGICAL_COLUMN_SOURCE_TABLE);
        String phCol = ConceptUtil.getString(column, DefaultIDs.LOGICAL_COLUMN_SOURCE_COLUMN);
        if (!Utils.isEmpty(phDb) && sourceDatabaseMeta == null) {
            sourceDatabaseMeta = DatabaseMeta.findDatabase(databases, phDb);
        }
        if (!Utils.isEmpty(phTable)) {
            sourceTable = phDb;
        }
        if (!Utils.isEmpty(phCol)) {
            sourceColumns.add(phCol);
        }
    }
    String sql = "SELECT * FROM --< Source query for dimension '" + name + "'";
    meta.setDatabaseMeta(sourceDatabaseMeta);
    if (sourceDatabaseMeta != null && !Utils.isEmpty(sourceTable)) {
        sql = "SELECT ";
        if (sourceColumns.isEmpty()) {
            sql += " * ";
        } else {
            sql += Const.CR;
        }
        boolean first = true;
        for (String sourceColumn : sourceColumns) {
            if (first) {
                first = false;
            } else {
                sql += "      , ";
            }
            sql += sourceDatabaseMeta.quoteField(sourceColumn) + Const.CR;
        }
        sql += "FROM " + sourceDatabaseMeta.getQuotedSchemaTableCombination(null, sourceTable);
    }
    meta.setSQL(sql);
    // Wrap it up...
    // 
    StepMeta stepMeta = new StepMeta("Source data for '" + name + "'", meta);
    stepMeta.drawStep();
    stepMeta.setDescription("Reads data for '" + name + "' : " + description);
    return stepMeta;
}
Also used : LogicalColumn(org.pentaho.metadata.model.LogicalColumn) ArrayList(java.util.ArrayList) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) StepMeta(org.pentaho.di.trans.step.StepMeta) TableInputMeta(org.pentaho.di.trans.steps.tableinput.TableInputMeta)

Example 5 with TableInputMeta

use of org.pentaho.di.trans.steps.tableinput.TableInputMeta in project pentaho-metaverse by pentaho.

the class MetaverseValidationIT method testTableInput.

@Test
public void testTableInput() throws Exception {
    TableInputStepNode tableNode = root.getTableInputStepNode();
    TableInputMeta meta = (TableInputMeta) getStepMeta(tableNode);
    assertNotNull(tableNode);
    assertNotNull(tableNode.getDatasource(meta.getDatabaseMeta().getName()));
    assertNotNull(tableNode.getDatabaseQueryNode());
    assertEquals(meta.getSQL(), tableNode.getDatabaseQueryNode().getQuery());
    Iterable<StreamFieldNode> outputStreamFields = tableNode.getOutputStreamFields();
    Iterable<DatabaseColumnNode> databaseColumns = tableNode.getDatabaseQueryNode().getDatabaseColumns();
    assertEquals(getIterableSize(outputStreamFields), getIterableSize(databaseColumns));
}
Also used : StreamFieldNode(org.pentaho.metaverse.frames.StreamFieldNode) TableInputStepNode(org.pentaho.metaverse.frames.TableInputStepNode) TableInputMeta(org.pentaho.di.trans.steps.tableinput.TableInputMeta) DatabaseColumnNode(org.pentaho.metaverse.frames.DatabaseColumnNode) Test(org.junit.Test)

Aggregations

TableInputMeta (org.pentaho.di.trans.steps.tableinput.TableInputMeta)18 StepMeta (org.pentaho.di.trans.step.StepMeta)14 TransMeta (org.pentaho.di.trans.TransMeta)8 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)6 Test (org.junit.Test)5 PluginRegistry (org.pentaho.di.core.plugins.PluginRegistry)4 TransHopMeta (org.pentaho.di.trans.TransHopMeta)4 ArrayList (java.util.ArrayList)3 Date (java.util.Date)2 HashMap (java.util.HashMap)2 MessageBox (org.eclipse.swt.widgets.MessageBox)2 Matchers.anyString (org.mockito.Matchers.anyString)2 NotePadMeta (org.pentaho.di.core.NotePadMeta)2 Database (org.pentaho.di.core.database.Database)2 KettleException (org.pentaho.di.core.exception.KettleException)2 Point (org.pentaho.di.core.gui.Point)2 StepPluginType (org.pentaho.di.core.plugins.StepPluginType)2 ValueMetaInterface (org.pentaho.di.core.row.ValueMetaInterface)2 Trans (org.pentaho.di.trans.Trans)2 BaseStepMeta (org.pentaho.di.trans.step.BaseStepMeta)2