Search in sources :

Example 1 with TableOutputMeta

use of org.pentaho.di.trans.steps.tableoutput.TableOutputMeta in project pentaho-kettle by pentaho.

the class SpoonDBDelegate method copyTable.

public boolean copyTable(DatabaseMeta sourceDBInfo, DatabaseMeta targetDBInfo, String tablename) {
    try {
        // 
        // Create a new transformation...
        // 
        TransMeta meta = new TransMeta();
        meta.addDatabase(sourceDBInfo);
        meta.addDatabase(targetDBInfo);
        // 
        // Add a note
        // 
        String note = BaseMessages.getString(PKG, "Spoon.Message.Note.ReadInformationFromTableOnDB", tablename, sourceDBInfo.getDatabaseName()) + // "Reads information from table ["+tablename+"]
        Const.CR;
        // on database ["+sourceDBInfo+"]"
        note += BaseMessages.getString(PKG, "Spoon.Message.Note.WriteInformationToTableOnDB", tablename, targetDBInfo.getDatabaseName());
        // the information to table
        // ["+tablename+"] on
        // database
        // ["+targetDBInfo+"]"
        NotePadMeta ni = new NotePadMeta(note, 150, 10, -1, -1);
        meta.addNote(ni);
        // 
        // create the source step...
        // 
        // "read
        String fromstepname = BaseMessages.getString(PKG, "Spoon.Message.Note.ReadFromTable", tablename);
        // from
        // ["+tablename+"]";
        TableInputMeta tii = new TableInputMeta();
        tii.setDatabaseMeta(sourceDBInfo);
        tii.setSQL("SELECT * FROM " + tablename);
        PluginRegistry registry = PluginRegistry.getInstance();
        String fromstepid = registry.getPluginId(StepPluginType.class, tii);
        StepMeta fromstep = new StepMeta(fromstepid, fromstepname, tii);
        fromstep.setLocation(150, 100);
        fromstep.setDraw(true);
        fromstep.setDescription(BaseMessages.getString(PKG, "Spoon.Message.Note.ReadInformationFromTableOnDB", tablename, sourceDBInfo.getDatabaseName()));
        meta.addStep(fromstep);
        // 
        // add logic to rename fields in case any of the field names contain
        // reserved words...
        // Use metadata logic in SelectValues, use SelectValueInfo...
        // 
        Database sourceDB = new Database(loggingObject, sourceDBInfo);
        sourceDB.shareVariablesWith(meta);
        sourceDB.connect();
        try {
            // Get the fields for the input table...
            RowMetaInterface fields = sourceDB.getTableFields(tablename);
            // See if we need to deal with reserved words...
            int nrReserved = targetDBInfo.getNrReservedWords(fields);
            if (nrReserved > 0) {
                SelectValuesMeta svi = new SelectValuesMeta();
                svi.allocate(0, 0, nrReserved);
                int nr = 0;
                // CHECKSTYLE:Indentation:OFF
                for (int i = 0; i < fields.size(); i++) {
                    ValueMetaInterface v = fields.getValueMeta(i);
                    if (targetDBInfo.isReservedWord(v.getName())) {
                        if (svi.getMeta()[nr] == null) {
                            svi.getMeta()[nr] = new SelectMetadataChange(svi);
                        }
                        svi.getMeta()[nr].setName(v.getName());
                        svi.getMeta()[nr].setRename(targetDBInfo.quoteField(v.getName()));
                        nr++;
                    }
                }
                String selstepname = BaseMessages.getString(PKG, "Spoon.Message.Note.HandleReservedWords");
                String selstepid = registry.getPluginId(StepPluginType.class, svi);
                StepMeta selstep = new StepMeta(selstepid, selstepname, svi);
                selstep.setLocation(350, 100);
                selstep.setDraw(true);
                selstep.setDescription(BaseMessages.getString(PKG, "Spoon.Message.Note.RenamesReservedWords", // 
                targetDBInfo.getPluginId()));
                meta.addStep(selstep);
                TransHopMeta shi = new TransHopMeta(fromstep, selstep);
                meta.addTransHop(shi);
                fromstep = selstep;
            }
            // 
            // Create the target step...
            // 
            // 
            // Add the TableOutputMeta step...
            // 
            String tostepname = BaseMessages.getString(PKG, "Spoon.Message.Note.WriteToTable", tablename);
            TableOutputMeta toi = new TableOutputMeta();
            toi.setDatabaseMeta(targetDBInfo);
            toi.setTableName(tablename);
            toi.setCommitSize(200);
            toi.setTruncateTable(true);
            String tostepid = registry.getPluginId(StepPluginType.class, toi);
            StepMeta tostep = new StepMeta(tostepid, tostepname, toi);
            tostep.setLocation(550, 100);
            tostep.setDraw(true);
            tostep.setDescription(BaseMessages.getString(PKG, "Spoon.Message.Note.WriteInformationToTableOnDB2", tablename, targetDBInfo.getDatabaseName()));
            meta.addStep(tostep);
            // 
            // Add a hop between the two steps...
            // 
            TransHopMeta hi = new TransHopMeta(fromstep, tostep);
            meta.addTransHop(hi);
            // OK, if we're still here: overwrite the current transformation...
            // Set a name on this generated transformation
            // 
            String name = "Copy table from [" + sourceDBInfo.getName() + "] to [" + targetDBInfo.getName() + "]";
            String transName = name;
            int nr = 1;
            if (spoon.delegates.trans.getTransformation(transName) != null) {
                nr++;
                transName = name + " " + nr;
            }
            meta.setName(transName);
            spoon.delegates.trans.addTransGraph(meta);
            spoon.refreshGraph();
            spoon.refreshTree();
        } finally {
            sourceDB.disconnect();
        }
    } catch (Exception e) {
        new ErrorDialog(spoon.getShell(), BaseMessages.getString(PKG, "Spoon.Dialog.UnexpectedError.Title"), BaseMessages.getString(PKG, "Spoon.Dialog.UnexpectedError.Message"), new KettleException(e.getMessage(), e));
        return false;
    }
    return true;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) TransMeta(org.pentaho.di.trans.TransMeta) ErrorDialog(org.pentaho.di.ui.core.dialog.ErrorDialog) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) TableOutputMeta(org.pentaho.di.trans.steps.tableoutput.TableOutputMeta) StepMeta(org.pentaho.di.trans.step.StepMeta) TableInputMeta(org.pentaho.di.trans.steps.tableinput.TableInputMeta) KettleException(org.pentaho.di.core.exception.KettleException) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface) SelectValuesMeta(org.pentaho.di.trans.steps.selectvalues.SelectValuesMeta) PluginRegistry(org.pentaho.di.core.plugins.PluginRegistry) Database(org.pentaho.di.core.database.Database) SelectMetadataChange(org.pentaho.di.trans.steps.selectvalues.SelectMetadataChange) NotePadMeta(org.pentaho.di.core.NotePadMeta) TransHopMeta(org.pentaho.di.trans.TransHopMeta)

Example 2 with TableOutputMeta

use of org.pentaho.di.trans.steps.tableoutput.TableOutputMeta in project pentaho-kettle by pentaho.

the class SpoonJobDelegate method ripDB.

public JobMeta ripDB(final List<DatabaseMeta> databases, final String jobname, final RepositoryDirectoryInterface repdir, final String directory, final DatabaseMeta sourceDbInfo, final DatabaseMeta targetDbInfo, final String[] tables) {
    // 
    // Create a new job...
    // 
    final JobMeta jobMeta = new JobMeta();
    jobMeta.setDatabases(databases);
    jobMeta.setFilename(null);
    jobMeta.setName(jobname);
    if (spoon.getRepository() != null) {
        jobMeta.setRepositoryDirectory(repdir);
    } else {
        jobMeta.setFilename(Const.createFilename(directory, jobname, "." + Const.STRING_JOB_DEFAULT_EXT));
    }
    spoon.refreshTree();
    spoon.refreshGraph();
    final Point location = new Point(50, 50);
    // The start entry...
    final JobEntryCopy start = JobMeta.createStartEntry();
    start.setLocation(new Point(location.x, location.y));
    start.setDrawn();
    jobMeta.addJobEntry(start);
    // final Thread parentThread = Thread.currentThread();
    // Create a dialog with a progress indicator!
    IRunnableWithProgress op = monitor -> {
        try {
            // This is running in a new process: copy some KettleVariables
            // info
            // LocalVariables.getInstance().createKettleVariables(Thread.currentThread().getName(),
            // parentThread.getName(), true);
            monitor.beginTask(BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.BuildingNewJob"), tables.length);
            monitor.worked(0);
            JobEntryCopy previous = start;
            // Loop over the table-names...
            for (int i = 0; i < tables.length && !monitor.isCanceled(); i++) {
                monitor.setTaskName(BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.ProcessingTable") + tables[i] + "]...");
                // 
                // Create the new transformation...
                // 
                String transname = BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Transname1") + sourceDbInfo + "].[" + tables[i] + BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Transname2") + targetDbInfo + "]";
                TransMeta transMeta = new TransMeta();
                if (repdir != null) {
                    transMeta.setRepositoryDirectory(repdir);
                } else {
                    transMeta.setFilename(Const.createFilename(directory, transname, "." + Const.STRING_TRANS_DEFAULT_EXT));
                }
                // Add the source & target db
                transMeta.addDatabase(sourceDbInfo);
                transMeta.addDatabase(targetDbInfo);
                // 
                // Add a note
                // 
                String note = BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Note1") + tables[i] + BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Note2") + sourceDbInfo + "]" + Const.CR;
                note += BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Note3") + tables[i] + BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Note4") + targetDbInfo + "]";
                NotePadMeta ni = new NotePadMeta(note, 150, 10, -1, -1);
                transMeta.addNote(ni);
                // 
                // Add the TableInputMeta step...
                // 
                String fromstepname = BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.FromStep.Name") + tables[i] + "]";
                TableInputMeta tii = new TableInputMeta();
                tii.setDefault();
                tii.setDatabaseMeta(sourceDbInfo);
                // It's already quoted!
                tii.setSQL("SELECT * FROM " + tables[i]);
                String fromstepid = PluginRegistry.getInstance().getPluginId(StepPluginType.class, tii);
                StepMeta fromstep = new StepMeta(fromstepid, fromstepname, tii);
                fromstep.setLocation(150, 100);
                fromstep.setDraw(true);
                fromstep.setDescription(BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.FromStep.Description") + tables[i] + BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.FromStep.Description2") + sourceDbInfo + "]");
                transMeta.addStep(fromstep);
                // 
                // Add the TableOutputMeta step...
                // 
                String tostepname = BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.ToStep.Name") + tables[i] + "]";
                TableOutputMeta toi = new TableOutputMeta();
                toi.setDatabaseMeta(targetDbInfo);
                toi.setTableName(tables[i]);
                toi.setCommitSize(100);
                toi.setTruncateTable(true);
                String tostepid = PluginRegistry.getInstance().getPluginId(StepPluginType.class, toi);
                StepMeta tostep = new StepMeta(tostepid, tostepname, toi);
                tostep.setLocation(500, 100);
                tostep.setDraw(true);
                tostep.setDescription(BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.ToStep.Description1") + tables[i] + BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.ToStep.Description2") + targetDbInfo + "]");
                transMeta.addStep(tostep);
                // 
                // Add a hop between the two steps...
                // 
                TransHopMeta hi = new TransHopMeta(fromstep, tostep);
                transMeta.addTransHop(hi);
                // 
                // Now we generate the SQL needed to run for this
                // transformation.
                // 
                // First set the limit to 1 to speed things up!
                String tmpSql = tii.getSQL();
                tii.setSQL(tii.getSQL() + sourceDbInfo.getLimitClause(1));
                String sql;
                try {
                    sql = transMeta.getSQLStatementsString();
                } catch (KettleStepException kse) {
                    throw new InvocationTargetException(kse, BaseMessages.getString(PKG, "Spoon.RipDB.Exception.ErrorGettingSQLFromTransformation") + transMeta + "] : " + kse.getMessage());
                }
                // remove the limit
                tii.setSQL(tmpSql);
                // 
                // Now, save the transformation...
                // 
                boolean ok;
                if (spoon.getRepository() != null) {
                    ok = spoon.saveToRepository(transMeta, false);
                } else {
                    ok = spoon.saveToFile(transMeta);
                }
                if (!ok) {
                    throw new InvocationTargetException(new Exception(BaseMessages.getString(PKG, "Spoon.RipDB.Exception.UnableToSaveTransformationToRepository")), BaseMessages.getString(PKG, "Spoon.RipDB.Exception.UnableToSaveTransformationToRepository"));
                }
                // We can now continue with the population of the job...
                // //////////////////////////////////////////////////////////////////////
                location.x = 250;
                if (i > 0) {
                    location.y += 100;
                }
                // 
                if (!Utils.isEmpty(sql)) {
                    String jesqlname = BaseMessages.getString(PKG, "Spoon.RipDB.JobEntrySQL.Name") + tables[i] + "]";
                    JobEntrySQL jesql = new JobEntrySQL(jesqlname);
                    jesql.setDatabase(targetDbInfo);
                    jesql.setSQL(sql);
                    jesql.setDescription(BaseMessages.getString(PKG, "Spoon.RipDB.JobEntrySQL.Description") + targetDbInfo + "].[" + tables[i] + "]");
                    JobEntryCopy jecsql = new JobEntryCopy();
                    jecsql.setEntry(jesql);
                    jecsql.setLocation(new Point(location.x, location.y));
                    jecsql.setDrawn();
                    jobMeta.addJobEntry(jecsql);
                    // Add the hop too...
                    JobHopMeta jhi = new JobHopMeta(previous, jecsql);
                    jobMeta.addJobHop(jhi);
                    previous = jecsql;
                }
                // 
                // Add the jobentry for the transformation too...
                // 
                String jetransname = BaseMessages.getString(PKG, "Spoon.RipDB.JobEntryTrans.Name") + tables[i] + "]";
                JobEntryTrans jetrans = new JobEntryTrans(jetransname);
                jetrans.setTransname(transMeta.getName());
                if (spoon.getRepository() != null) {
                    jetrans.setSpecificationMethod(ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME);
                    jetrans.setDirectory(transMeta.getRepositoryDirectory().getPath());
                } else {
                    jetrans.setSpecificationMethod(ObjectLocationSpecificationMethod.FILENAME);
                    jetrans.setFileName(Const.createFilename("${" + Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY + "}", transMeta.getName(), "." + Const.STRING_TRANS_DEFAULT_EXT));
                }
                JobEntryCopy jectrans = new JobEntryCopy(jetrans);
                jectrans.setDescription(BaseMessages.getString(PKG, "Spoon.RipDB.JobEntryTrans.Description1") + Const.CR + BaseMessages.getString(PKG, "Spoon.RipDB.JobEntryTrans.Description2") + sourceDbInfo + "].[" + tables[i] + "]" + Const.CR + BaseMessages.getString(PKG, "Spoon.RipDB.JobEntryTrans.Description3") + targetDbInfo + "].[" + tables[i] + "]");
                jectrans.setDrawn();
                location.x += 400;
                jectrans.setLocation(new Point(location.x, location.y));
                jobMeta.addJobEntry(jectrans);
                // Add a hop between the last 2 job entries.
                JobHopMeta jhi2 = new JobHopMeta(previous, jectrans);
                jobMeta.addJobHop(jhi2);
                previous = jectrans;
                monitor.worked(1);
            }
            monitor.worked(100);
            monitor.done();
        } catch (Exception e) {
            new ErrorDialog(spoon.getShell(), "Error", "An unexpected error occurred!", e);
        }
    };
    try {
        ProgressMonitorDialog pmd = new ProgressMonitorDialog(spoon.getShell());
        pmd.run(false, true, op);
    } catch (InvocationTargetException | InterruptedException e) {
        new ErrorDialog(spoon.getShell(), BaseMessages.getString(PKG, "Spoon.ErrorDialog.RipDB.ErrorRippingTheDatabase.Title"), BaseMessages.getString(PKG, "Spoon.ErrorDialog.RipDB.ErrorRippingTheDatabase.Message"), e);
        return null;
    } finally {
        spoon.refreshGraph();
        spoon.refreshTree();
    }
    return jobMeta;
}
Also used : DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) ObjectType(org.pentaho.di.ui.spoon.TabMapEntry.ObjectType) JobGraph(org.pentaho.di.ui.spoon.job.JobGraph) StepPluginType(org.pentaho.di.core.plugins.StepPluginType) TableOutputMeta(org.pentaho.di.trans.steps.tableoutput.TableOutputMeta) Point(org.pentaho.di.core.gui.Point) Date(java.util.Date) JobEntrySQL(org.pentaho.di.job.entries.sql.JobEntrySQL) GUIResource(org.pentaho.di.ui.core.gui.GUIResource) TransMeta(org.pentaho.di.trans.TransMeta) TabMapEntry(org.pentaho.di.ui.spoon.TabMapEntry) Document(org.w3c.dom.Document) Job(org.pentaho.di.job.Job) Map(java.util.Map) NotePadMeta(org.pentaho.di.core.NotePadMeta) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) BaseMessages(org.pentaho.di.i18n.BaseMessages) StepMeta(org.pentaho.di.trans.step.StepMeta) ProgressMonitorDialog(org.eclipse.jface.dialogs.ProgressMonitorDialog) PropsUI(org.pentaho.di.ui.core.PropsUI) Wizard(org.eclipse.jface.wizard.Wizard) TabItem(org.pentaho.xul.swt.tab.TabItem) Utils(org.pentaho.di.core.util.Utils) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) InvocationTargetException(java.lang.reflect.InvocationTargetException) List(java.util.List) JobEntrySpecial(org.pentaho.di.job.entries.special.JobEntrySpecial) JobEntryDialogInterface(org.pentaho.di.job.entry.JobEntryDialogInterface) WizardDialog(org.eclipse.jface.wizard.WizardDialog) SWT(org.eclipse.swt.SWT) KettleStepException(org.pentaho.di.core.exception.KettleStepException) TransAction(org.pentaho.di.core.undo.TransAction) ErrorDialog(org.pentaho.di.ui.core.dialog.ErrorDialog) ObjectLocationSpecificationMethod(org.pentaho.di.core.ObjectLocationSpecificationMethod) JobHopMeta(org.pentaho.di.job.JobHopMeta) KettleException(org.pentaho.di.core.exception.KettleException) HashMap(java.util.HashMap) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) JobEntryPluginType(org.pentaho.di.core.plugins.JobEntryPluginType) Constructor(java.lang.reflect.Constructor) Spoon(org.pentaho.di.ui.spoon.Spoon) ArrayList(java.util.ArrayList) PluginInterface(org.pentaho.di.core.plugins.PluginInterface) Const(org.pentaho.di.core.Const) XMLHandler(org.pentaho.di.core.xml.XMLHandler) PluginRegistry(org.pentaho.di.core.plugins.PluginRegistry) Node(org.w3c.dom.Node) JobExecutionConfiguration(org.pentaho.di.job.JobExecutionConfiguration) RipDatabaseWizardPage2(org.pentaho.di.ui.spoon.wizards.RipDatabaseWizardPage2) RipDatabaseWizardPage1(org.pentaho.di.ui.spoon.wizards.RipDatabaseWizardPage1) RipDatabaseWizardPage3(org.pentaho.di.ui.spoon.wizards.RipDatabaseWizardPage3) RepositoryDirectoryInterface(org.pentaho.di.repository.RepositoryDirectoryInterface) Shell(org.eclipse.swt.widgets.Shell) Repository(org.pentaho.di.repository.Repository) JobMeta(org.pentaho.di.job.JobMeta) DefaultLogLevel(org.pentaho.di.core.logging.DefaultLogLevel) TransHopMeta(org.pentaho.di.trans.TransHopMeta) IRunnableWithProgress(org.eclipse.jface.operation.IRunnableWithProgress) MessageBox(org.eclipse.swt.widgets.MessageBox) VisibleForTesting(com.google.common.annotations.VisibleForTesting) JobEntryTrans(org.pentaho.di.job.entries.trans.JobEntryTrans) TableInputMeta(org.pentaho.di.trans.steps.tableinput.TableInputMeta) JobExecutionConfigurationDialog(org.pentaho.di.ui.job.dialog.JobExecutionConfigurationDialog) ExtensionPointHandler(org.pentaho.di.core.extension.ExtensionPointHandler) JobMeta(org.pentaho.di.job.JobMeta) JobHopMeta(org.pentaho.di.job.JobHopMeta) KettleStepException(org.pentaho.di.core.exception.KettleStepException) ProgressMonitorDialog(org.eclipse.jface.dialogs.ProgressMonitorDialog) TransMeta(org.pentaho.di.trans.TransMeta) ErrorDialog(org.pentaho.di.ui.core.dialog.ErrorDialog) TableOutputMeta(org.pentaho.di.trans.steps.tableoutput.TableOutputMeta) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) JobEntrySQL(org.pentaho.di.job.entries.sql.JobEntrySQL) StepMeta(org.pentaho.di.trans.step.StepMeta) TableInputMeta(org.pentaho.di.trans.steps.tableinput.TableInputMeta) InvocationTargetException(java.lang.reflect.InvocationTargetException) InvocationTargetException(java.lang.reflect.InvocationTargetException) KettleStepException(org.pentaho.di.core.exception.KettleStepException) KettleException(org.pentaho.di.core.exception.KettleException) IRunnableWithProgress(org.eclipse.jface.operation.IRunnableWithProgress) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) StepPluginType(org.pentaho.di.core.plugins.StepPluginType) JobEntryTrans(org.pentaho.di.job.entries.trans.JobEntryTrans) NotePadMeta(org.pentaho.di.core.NotePadMeta) TransHopMeta(org.pentaho.di.trans.TransHopMeta)

Example 3 with TableOutputMeta

use of org.pentaho.di.trans.steps.tableoutput.TableOutputMeta in project pentaho-kettle by pentaho.

the class TableAgileMart method adjustSchema.

@Override
public boolean adjustSchema() {
    TableOutputMeta meta = getMeta();
    TableOutputData data = getData();
    TransMeta transMeta = getTransMeta();
    StepMeta stepMeta = meta.getParentStepMeta();
    DBCache dbcache = transMeta.getDbCache();
    StringBuilder messageBuffer = new StringBuilder();
    try {
        RowMetaInterface prev = transMeta.getPrevStepFields(stepMeta.getName());
        if (log.isDetailed()) {
            logDetailed("Attempting to auto adjust table structure");
        }
        if (log.isDetailed()) {
            logDetailed("getTransMeta: " + getTransMeta());
        }
        if (log.isDetailed()) {
            logDetailed("getStepname: " + getStepname());
        }
        SQLStatement statement = meta.getSQLStatements(transMeta, stepMeta, prev, repository, metaStore);
        if (log.isDetailed()) {
            logDetailed("Statement: " + statement);
        }
        if (log.isDetailed() && statement != null) {
            logDetailed("Statement has SQL: " + statement.hasSQL());
        }
        if (statement != null && statement.hasSQL()) {
            String sql = statement.getSQL();
            if (log.isDetailed()) {
                logDetailed("Trying: " + sql);
            }
            try {
                log.logDetailed("Executing SQL: " + Const.CR + sql);
                data.db.execStatement(sql);
                // Clear the database cache, in case we're using one...
                if (dbcache != null) {
                    dbcache.clear(data.databaseMeta.getName());
                }
            } catch (Exception dbe) {
                String error = BaseMessages.getString(PKG, "SQLEditor.Log.SQLExecError", sql, dbe.toString());
                messageBuffer.append(error).append(Const.CR);
                return false;
            }
            if (log.isDetailed()) {
                logDetailed("Successfull: " + sql);
            }
        } else if (statement.getError() == null) {
            // there were no changes to be made
            return true;
        } else {
            this.message = statement.getError();
            logError(statement.getError());
            return false;
        }
    } catch (Exception e) {
        logError("An error ocurred trying to adjust the table schema", e);
    }
    return true;
}
Also used : TableOutputData(org.pentaho.di.trans.steps.tableoutput.TableOutputData) DBCache(org.pentaho.di.core.DBCache) TransMeta(org.pentaho.di.trans.TransMeta) TableOutputMeta(org.pentaho.di.trans.steps.tableoutput.TableOutputMeta) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) StepMeta(org.pentaho.di.trans.step.StepMeta) SQLStatement(org.pentaho.di.core.SQLStatement) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 4 with TableOutputMeta

use of org.pentaho.di.trans.steps.tableoutput.TableOutputMeta in project pentaho-metaverse by pentaho.

the class MetaverseValidationIT method testTableOutputStepNode.

@Test
public void testTableOutputStepNode() throws Exception {
    // this tests a specific step in a specific transform
    TableOutputStepNode node = root.getTableOutputStepNode();
    // check the table that it writes to
    TableOutputMeta meta = (TableOutputMeta) getStepMeta(node);
    String tableName = meta.getTableName();
    String schema = meta.getSchemaName();
    boolean truncateTable = meta.truncateTable();
    DatabaseTableNode databaseTableNode = node.getDatabaseTable();
    assertEquals(tableName, databaseTableNode.getName());
    assertEquals(schema, node.getSchema());
    assertEquals(truncateTable, node.isTruncateTable());
    Iterable<StreamFieldNode> inputs = node.getInputStreamFields();
    Iterable<StreamFieldNode> outputs = node.getOutputStreamFields();
    assertEquals(getIterableSize(inputs) + meta.getFieldDatabase().length, getIterableSize(outputs));
    for (StreamFieldNode input : inputs) {
        assertEquals(input.getName(), input.getFieldPopulatedByMe().getName());
    }
    DatasourceNode datasource = node.getDatasource(meta.getDatabaseMeta().getName());
    assertEquals(meta.getDatabaseMeta().getHostname(), datasource.getHost());
    assertEquals(meta.getDatabaseMeta().getDatabasePortNumberString(), datasource.getPort());
    assertEquals(meta.getDatabaseMeta().getUsername(), datasource.getUserName());
    assertEquals(meta.getDatabaseMeta().getDatabaseName(), datasource.getDatabaseName());
    assertEquals(DictionaryConst.NODE_TYPE_DATASOURCE, datasource.getType());
    assertEquals(meta.getTableName(), databaseTableNode.getName());
    assertEquals(DictionaryConst.NODE_TYPE_DATA_TABLE, databaseTableNode.getType());
}
Also used : MongoDbDatasourceNode(org.pentaho.metaverse.frames.MongoDbDatasourceNode) DatasourceNode(org.pentaho.metaverse.frames.DatasourceNode) DatabaseTableNode(org.pentaho.metaverse.frames.DatabaseTableNode) TableOutputMeta(org.pentaho.di.trans.steps.tableoutput.TableOutputMeta) StreamFieldNode(org.pentaho.metaverse.frames.StreamFieldNode) TableOutputStepNode(org.pentaho.metaverse.frames.TableOutputStepNode) Test(org.junit.Test)

Example 5 with TableOutputMeta

use of org.pentaho.di.trans.steps.tableoutput.TableOutputMeta in project pentaho-metaverse by pentaho.

the class TableOutputExternalResourceConsumerTest method testGetResourcesFromMeta_static.

@Test
public void testGetResourcesFromMeta_static() throws Exception {
    TableOutputMeta meta = mock(TableOutputMeta.class);
    DatabaseMeta dbMeta = mock(DatabaseMeta.class);
    DatabaseInterface dbi = mock(DatabaseInterface.class);
    when(meta.getDatabaseMeta()).thenReturn(dbMeta);
    when(meta.getTableName()).thenReturn("tableName");
    when(meta.getSchemaName()).thenReturn("schemaName");
    when(meta.getParentStepMeta()).thenReturn(parentStepMeta);
    when(parentStepMeta.getParentTransMeta()).thenReturn(parentTransMeta);
    when(parentTransMeta.environmentSubstitute("tableName")).thenReturn("tableName");
    when(parentTransMeta.environmentSubstitute("schemaName")).thenReturn("schemaName");
    when(dbMeta.getAccessTypeDesc()).thenReturn("JNDI");
    when(dbMeta.getName()).thenReturn("TestConnection");
    when(dbMeta.getDescription()).thenReturn("my conn description");
    when(dbMeta.getDatabaseInterface()).thenReturn(dbi);
    when(dbi.getPluginId()).thenReturn("POSTGRESQL");
    Collection<IExternalResourceInfo> resources = consumer.getResourcesFromMeta(meta, new AnalysisContext(DictionaryConst.CONTEXT_STATIC));
    assertEquals(1, resources.size());
    IExternalResourceInfo res = resources.iterator().next();
    assertEquals("TestConnection", res.getName());
    assertEquals("tableName", res.getAttributes().get(DictionaryConst.PROPERTY_TABLE));
    assertEquals("schemaName", res.getAttributes().get(DictionaryConst.PROPERTY_SCHEMA));
}
Also used : DatabaseInterface(org.pentaho.di.core.database.DatabaseInterface) IExternalResourceInfo(org.pentaho.metaverse.api.model.IExternalResourceInfo) TableOutputMeta(org.pentaho.di.trans.steps.tableoutput.TableOutputMeta) AnalysisContext(org.pentaho.metaverse.api.AnalysisContext) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Test(org.junit.Test)

Aggregations

TableOutputMeta (org.pentaho.di.trans.steps.tableoutput.TableOutputMeta)18 StepMeta (org.pentaho.di.trans.step.StepMeta)9 KettleException (org.pentaho.di.core.exception.KettleException)6 TransMeta (org.pentaho.di.trans.TransMeta)6 Test (org.junit.Test)4 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)4 ArrayList (java.util.ArrayList)3 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)3 RowMetaInterface (org.pentaho.di.core.row.RowMetaInterface)3 TableInputMeta (org.pentaho.di.trans.steps.tableinput.TableInputMeta)3 ErrorDialog (org.pentaho.di.ui.core.dialog.ErrorDialog)3 InputStream (java.io.InputStream)2 HashMap (java.util.HashMap)2 NotePadMeta (org.pentaho.di.core.NotePadMeta)2 SQLStatement (org.pentaho.di.core.SQLStatement)2 DatabaseInterface (org.pentaho.di.core.database.DatabaseInterface)2 KettleStepException (org.pentaho.di.core.exception.KettleStepException)2 PluginRegistry (org.pentaho.di.core.plugins.PluginRegistry)2 Variables (org.pentaho.di.core.variables.Variables)2 TransHopMeta (org.pentaho.di.trans.TransHopMeta)2