Search in sources :

Example 16 with LogTableInterface

use of org.pentaho.di.core.logging.LogTableInterface in project pentaho-kettle by pentaho.

the class JobMeta method loadXML.

/**
 * Load a block of XML from an DOM node.
 *
 * @param jobnode                       The node to load from
 * @param fname                         The filename
 * @param rep                           The reference to a repository to load additional information from
 * @param metaStore                     the MetaStore to use
 * @param ignoreRepositorySharedObjects Do not load shared objects, handled separately
 * @param prompter                      The prompter to use in case a shared object gets overwritten
 * @throws KettleXMLException
 */
public void loadXML(Node jobnode, String fname, Repository rep, IMetaStore metaStore, boolean ignoreRepositorySharedObjects, OverwritePrompter prompter) throws KettleXMLException {
    Props props = null;
    if (Props.isInitialized()) {
        props = Props.getInstance();
    }
    try {
        // clear the jobs;
        clear();
        // Set the filename here so it can be used in variables for ALL aspects of the job FIX: PDI-8890
        if (null == rep) {
            setFilename(fname);
        } else {
            // Set the repository here so it can be used in variables for ALL aspects of the job FIX: PDI-16441
            setRepository(rep);
        }
        // 
        // get job info:
        // 
        setName(XMLHandler.getTagValue(jobnode, "name"));
        // 
        if (rep != null) {
            String directoryPath = XMLHandler.getTagValue(jobnode, "directory");
            if (directoryPath != null) {
                directory = rep.findDirectory(directoryPath);
                if (directory == null) {
                    // not found
                    // The root as default
                    directory = new RepositoryDirectory();
                }
            }
        }
        // description
        description = XMLHandler.getTagValue(jobnode, "description");
        // extended description
        extendedDescription = XMLHandler.getTagValue(jobnode, "extended_description");
        // job version
        jobVersion = XMLHandler.getTagValue(jobnode, "job_version");
        // job status
        jobStatus = Const.toInt(XMLHandler.getTagValue(jobnode, "job_status"), -1);
        // Created user/date
        createdUser = XMLHandler.getTagValue(jobnode, "created_user");
        String createDate = XMLHandler.getTagValue(jobnode, "created_date");
        if (createDate != null) {
            createdDate = XMLHandler.stringToDate(createDate);
        }
        // Changed user/date
        modifiedUser = XMLHandler.getTagValue(jobnode, "modified_user");
        String modDate = XMLHandler.getTagValue(jobnode, "modified_date");
        if (modDate != null) {
            modifiedDate = XMLHandler.stringToDate(modDate);
        }
        // Read objects from the shared XML file & the repository
        try {
            sharedObjectsFile = XMLHandler.getTagValue(jobnode, "shared_objects_file");
            if (rep == null || ignoreRepositorySharedObjects) {
                sharedObjects = readSharedObjects();
            } else {
                sharedObjects = rep.readJobMetaSharedObjects(this);
            }
        } catch (Exception e) {
            LogChannel.GENERAL.logError(BaseMessages.getString(PKG, "JobMeta.ErrorReadingSharedObjects.Message", e.toString()));
            LogChannel.GENERAL.logError(Const.getStackTracker(e));
        }
        // Load the database connections, slave servers, cluster schemas & partition schemas into this object.
        // 
        importFromMetaStore();
        // Read the named parameters.
        Node paramsNode = XMLHandler.getSubNode(jobnode, XML_TAG_PARAMETERS);
        int nrParams = XMLHandler.countNodes(paramsNode, "parameter");
        for (int i = 0; i < nrParams; i++) {
            Node paramNode = XMLHandler.getSubNodeByNr(paramsNode, "parameter", i);
            String paramName = XMLHandler.getTagValue(paramNode, "name");
            String defValue = XMLHandler.getTagValue(paramNode, "default_value");
            String descr = XMLHandler.getTagValue(paramNode, "description");
            addParameterDefinition(paramName, defValue, descr);
        }
        // 
        // Read the database connections
        // 
        int nr = XMLHandler.countNodes(jobnode, "connection");
        Set<String> privateDatabases = new HashSet<String>(nr);
        for (int i = 0; i < nr; i++) {
            Node dbnode = XMLHandler.getSubNodeByNr(jobnode, "connection", i);
            DatabaseMeta dbcon = new DatabaseMeta(dbnode);
            dbcon.shareVariablesWith(this);
            if (!dbcon.isShared()) {
                privateDatabases.add(dbcon.getName());
            }
            DatabaseMeta exist = findDatabase(dbcon.getName());
            if (exist == null) {
                addDatabase(dbcon);
            } else {
                if (!exist.isShared()) {
                    // skip shared connections
                    if (shouldOverwrite(prompter, props, BaseMessages.getString(PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.Message", dbcon.getName()), BaseMessages.getString(PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.DontShowAnyMoreMessage"))) {
                        int idx = indexOfDatabase(exist);
                        removeDatabase(idx);
                        addDatabase(idx, dbcon);
                    }
                }
            }
        }
        setPrivateDatabases(privateDatabases);
        // Read the slave servers...
        // 
        Node slaveServersNode = XMLHandler.getSubNode(jobnode, XML_TAG_SLAVESERVERS);
        int nrSlaveServers = XMLHandler.countNodes(slaveServersNode, SlaveServer.XML_TAG);
        for (int i = 0; i < nrSlaveServers; i++) {
            Node slaveServerNode = XMLHandler.getSubNodeByNr(slaveServersNode, SlaveServer.XML_TAG, i);
            SlaveServer slaveServer = new SlaveServer(slaveServerNode);
            slaveServer.shareVariablesWith(this);
            // Check if the object exists and if it's a shared object.
            // If so, then we will keep the shared version, not this one.
            // The stored XML is only for backup purposes.
            SlaveServer check = findSlaveServer(slaveServer.getName());
            if (check != null) {
                if (!check.isShared()) {
                    // we don't overwrite shared objects.
                    if (shouldOverwrite(prompter, props, BaseMessages.getString(PKG, "JobMeta.Dialog.SlaveServerExistsOverWrite.Message", slaveServer.getName()), BaseMessages.getString(PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.DontShowAnyMoreMessage"))) {
                        addOrReplaceSlaveServer(slaveServer);
                    }
                }
            } else {
                slaveServers.add(slaveServer);
            }
        }
        /*
       * Get the log database connection & log table
       */
        // Backward compatibility...
        // 
        Node jobLogNode = XMLHandler.getSubNode(jobnode, JobLogTable.XML_TAG);
        if (jobLogNode == null) {
            // Load the XML
            // 
            jobLogTable.setConnectionName(XMLHandler.getTagValue(jobnode, "logconnection"));
            jobLogTable.setTableName(XMLHandler.getTagValue(jobnode, "logtable"));
            jobLogTable.setBatchIdUsed("Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "use_batchid")));
            jobLogTable.setLogFieldUsed("Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "use_logfield")));
            jobLogTable.findField(JobLogTable.ID.CHANNEL_ID).setEnabled(false);
            jobLogTable.findField(JobLogTable.ID.LINES_REJECTED).setEnabled(false);
        } else {
            jobLogTable.loadXML(jobLogNode, databases, null);
        }
        Node channelLogTableNode = XMLHandler.getSubNode(jobnode, ChannelLogTable.XML_TAG);
        if (channelLogTableNode != null) {
            channelLogTable.loadXML(channelLogTableNode, databases, null);
        }
        jobEntryLogTable.loadXML(jobnode, databases, null);
        for (LogTableInterface extraLogTable : extraLogTables) {
            extraLogTable.loadXML(jobnode, databases, null);
        }
        batchIdPassed = "Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "pass_batchid"));
        /*
       * read the job entries...
       */
        Node entriesnode = XMLHandler.getSubNode(jobnode, "entries");
        int tr = XMLHandler.countNodes(entriesnode, "entry");
        for (int i = 0; i < tr; i++) {
            Node entrynode = XMLHandler.getSubNodeByNr(entriesnode, "entry", i);
            // System.out.println("Reading entry:\n"+entrynode);
            JobEntryCopy je = new JobEntryCopy(entrynode, databases, slaveServers, rep, metaStore);
            if (je.isSpecial() && je.isMissing()) {
                addMissingEntry((MissingEntry) je.getEntry());
            }
            JobEntryCopy prev = findJobEntry(je.getName(), 0, true);
            if (prev != null) {
                // 
                if (je.getNr() == 0) {
                    // Replace previous version with this one: remove it first
                    // 
                    int idx = indexOfJobEntry(prev);
                    removeJobEntry(idx);
                } else if (je.getNr() > 0) {
                    // Use previously defined JobEntry info!
                    // 
                    je.setEntry(prev.getEntry());
                    // See if entry already exists...
                    prev = findJobEntry(je.getName(), je.getNr(), true);
                    if (prev != null) {
                        // remove the old one!
                        // 
                        int idx = indexOfJobEntry(prev);
                        removeJobEntry(idx);
                    }
                }
            }
            // Add the JobEntryCopy...
            addJobEntry(je);
        }
        Node hopsnode = XMLHandler.getSubNode(jobnode, "hops");
        int ho = XMLHandler.countNodes(hopsnode, "hop");
        for (int i = 0; i < ho; i++) {
            Node hopnode = XMLHandler.getSubNodeByNr(hopsnode, "hop", i);
            JobHopMeta hi = new JobHopMeta(hopnode, this);
            jobhops.add(hi);
        }
        // Read the notes...
        Node notepadsnode = XMLHandler.getSubNode(jobnode, "notepads");
        int nrnotes = XMLHandler.countNodes(notepadsnode, "notepad");
        for (int i = 0; i < nrnotes; i++) {
            Node notepadnode = XMLHandler.getSubNodeByNr(notepadsnode, "notepad", i);
            NotePadMeta ni = new NotePadMeta(notepadnode);
            notes.add(ni);
        }
        // Load the attribute groups map
        // 
        attributesMap = AttributesUtil.loadAttributes(XMLHandler.getSubNode(jobnode, AttributesUtil.XML_TAG));
        ExtensionPointHandler.callExtensionPoint(LogChannel.GENERAL, KettleExtensionPoint.JobMetaLoaded.id, this);
        clearChanged();
    } catch (Exception e) {
        throw new KettleXMLException(BaseMessages.getString(PKG, "JobMeta.Exception.UnableToLoadJobFromXMLNode"), e);
    } finally {
        setInternalKettleVariables();
    }
}
Also used : RepositoryDirectory(org.pentaho.di.repository.RepositoryDirectory) Node(org.w3c.dom.Node) Props(org.pentaho.di.core.Props) SlaveServer(org.pentaho.di.cluster.SlaveServer) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) UnknownParamException(org.pentaho.di.core.parameters.UnknownParamException) LookupReferencesException(org.pentaho.di.core.exception.LookupReferencesException) FileSystemException(org.apache.commons.vfs2.FileSystemException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) IdNotFoundException(org.pentaho.di.core.exception.IdNotFoundException) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) NotePadMeta(org.pentaho.di.core.NotePadMeta) HashSet(java.util.HashSet)

Example 17 with LogTableInterface

use of org.pentaho.di.core.logging.LogTableInterface in project pentaho-kettle by pentaho.

the class JobMeta method clear.

/**
 * Clears or reinitializes many of the JobMeta properties.
 */
@Override
public void clear() {
    jobcopies = new ArrayList<JobEntryCopy>();
    jobhops = new ArrayList<JobHopMeta>();
    jobLogTable = JobLogTable.getDefault(this, this);
    jobEntryLogTable = JobEntryLogTable.getDefault(this, this);
    extraLogTables = new ArrayList<LogTableInterface>();
    List<PluginInterface> plugins = PluginRegistry.getInstance().getPlugins(LogTablePluginType.class);
    for (PluginInterface plugin : plugins) {
        try {
            LogTablePluginInterface logTablePluginInterface = (LogTablePluginInterface) PluginRegistry.getInstance().loadClass(plugin);
            if (logTablePluginInterface.getType() == TableType.JOB) {
                logTablePluginInterface.setContext(this, this);
                extraLogTables.add(logTablePluginInterface);
            }
        } catch (Exception e) {
            LogChannel.GENERAL.logError("Error loading log table plugin with ID " + plugin.getIds()[0], e);
        }
    }
    arguments = null;
    super.clear();
    loopCache = new HashMap<String, Boolean>();
    addDefaults();
    jobStatus = -1;
    jobVersion = null;
    // setInternalKettleVariables(); Don't clear the internal variables for
    // ad-hoc jobs, it's ruins the previews
    // etc.
    log = LogChannel.GENERAL;
}
Also used : LogTablePluginInterface(org.pentaho.di.core.logging.LogTablePluginInterface) LogTablePluginInterface(org.pentaho.di.core.logging.LogTablePluginInterface) PluginInterface(org.pentaho.di.core.plugins.PluginInterface) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) UnknownParamException(org.pentaho.di.core.parameters.UnknownParamException) LookupReferencesException(org.pentaho.di.core.exception.LookupReferencesException) FileSystemException(org.apache.commons.vfs2.FileSystemException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) IdNotFoundException(org.pentaho.di.core.exception.IdNotFoundException) LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy)

Example 18 with LogTableInterface

use of org.pentaho.di.core.logging.LogTableInterface in project pentaho-kettle by pentaho.

the class PurRepositoryUnitTest method onlyGlobalVariablesOfLogTablesSetToNull.

@Test
public void onlyGlobalVariablesOfLogTablesSetToNull() {
    try {
        System.setProperty(Const.KETTLE_GLOBAL_LOG_VARIABLES_CLEAR_ON_EXPORT, "true");
        PurRepositoryExporter purRepoExporter = new PurRepositoryExporter(mock(PurRepository.class));
        String hardcodedString = "hardcoded";
        String globalParam = "${" + Const.KETTLE_TRANS_LOG_TABLE + "}";
        StepLogTable stepLogTable = StepLogTable.getDefault(mockedVariableSpace, mockedHasDbInterface);
        stepLogTable.setConnectionName(hardcodedString);
        stepLogTable.setSchemaName(hardcodedString);
        stepLogTable.setTimeoutInDays(hardcodedString);
        stepLogTable.setTableName(globalParam);
        JobEntryLogTable jobEntryLogTable = JobEntryLogTable.getDefault(mockedVariableSpace, mockedHasDbInterface);
        jobEntryLogTable.setConnectionName(hardcodedString);
        jobEntryLogTable.setSchemaName(hardcodedString);
        jobEntryLogTable.setTimeoutInDays(hardcodedString);
        jobEntryLogTable.setTableName(globalParam);
        List<LogTableInterface> logTables = new ArrayList<>();
        logTables.add(jobEntryLogTable);
        logTables.add(stepLogTable);
        purRepoExporter.setGlobalVariablesOfLogTablesNull(logTables);
        for (LogTableInterface logTable : logTables) {
            assertEquals(logTable.getConnectionName(), hardcodedString);
            assertEquals(logTable.getSchemaName(), hardcodedString);
            assertEquals(logTable.getTimeoutInDays(), hardcodedString);
            assertEquals(logTable.getTableName(), null);
        }
    } finally {
        System.setProperty(Const.KETTLE_GLOBAL_LOG_VARIABLES_CLEAR_ON_EXPORT, "false");
    }
}
Also used : LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) StepLogTable(org.pentaho.di.core.logging.StepLogTable) ArrayList(java.util.ArrayList) JobEntryLogTable(org.pentaho.di.core.logging.JobEntryLogTable) Matchers.anyString(org.mockito.Matchers.anyString) Test(org.junit.Test)

Example 19 with LogTableInterface

use of org.pentaho.di.core.logging.LogTableInterface in project pentaho-kettle by pentaho.

the class PurRepositoryUnitTest method globalVariablesOfLogTablesNotSetToNull.

@Test
public void globalVariablesOfLogTablesNotSetToNull() {
    PurRepositoryExporter purRepoExporter = new PurRepositoryExporter(mock(PurRepository.class));
    String globalParam = "${" + Const.KETTLE_TRANS_LOG_TABLE + "}";
    StepLogTable stepLogTable = StepLogTable.getDefault(mockedVariableSpace, mockedHasDbInterface);
    stepLogTable.setConnectionName(globalParam);
    stepLogTable.setSchemaName(globalParam);
    stepLogTable.setTimeoutInDays(globalParam);
    stepLogTable.setTableName(globalParam);
    JobEntryLogTable jobEntryLogTable = JobEntryLogTable.getDefault(mockedVariableSpace, mockedHasDbInterface);
    jobEntryLogTable.setConnectionName(globalParam);
    jobEntryLogTable.setSchemaName(globalParam);
    jobEntryLogTable.setTimeoutInDays(globalParam);
    jobEntryLogTable.setTableName(globalParam);
    List<LogTableInterface> logTables = new ArrayList<>();
    logTables.add(jobEntryLogTable);
    logTables.add(stepLogTable);
    purRepoExporter.setGlobalVariablesOfLogTablesNull(logTables);
    for (LogTableInterface logTable : logTables) {
        assertEquals(logTable.getConnectionName(), globalParam);
        assertEquals(logTable.getSchemaName(), globalParam);
        assertEquals(logTable.getTimeoutInDays(), globalParam);
        assertEquals(logTable.getTableName(), globalParam);
    }
}
Also used : LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) StepLogTable(org.pentaho.di.core.logging.StepLogTable) ArrayList(java.util.ArrayList) JobEntryLogTable(org.pentaho.di.core.logging.JobEntryLogTable) Matchers.anyString(org.mockito.Matchers.anyString) Test(org.junit.Test)

Example 20 with LogTableInterface

use of org.pentaho.di.core.logging.LogTableInterface in project pentaho-kettle by pentaho.

the class JobDialog method getLogInfo.

private void getLogInfo(int previousLogTableIndex) {
    if (previousLogTableIndex < 0) {
        return;
    }
    // Remember the that was entered data...
    // 
    LogTableInterface modifiedLogTable = logTables.get(previousLogTableIndex);
    LogTableUserInterface logTableUserInterface = logTableUserInterfaces.get(previousLogTableIndex);
    if (logTableUserInterface != null) {
        logTableUserInterface.retrieveLogTableOptions(modifiedLogTable);
    } else {
        if (modifiedLogTable instanceof JobLogTable) {
            getJobLogTableOptions((JobLogTable) modifiedLogTable);
        } else if (modifiedLogTable instanceof ChannelLogTable) {
            getChannelLogTableOptions((ChannelLogTable) modifiedLogTable);
        } else if (modifiedLogTable instanceof JobEntryLogTable) {
            getJobEntryLogTableOptions((JobEntryLogTable) modifiedLogTable);
        }
    }
}
Also used : LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) JobLogTable(org.pentaho.di.core.logging.JobLogTable) ChannelLogTable(org.pentaho.di.core.logging.ChannelLogTable) JobEntryLogTable(org.pentaho.di.core.logging.JobEntryLogTable)

Aggregations

LogTableInterface (org.pentaho.di.core.logging.LogTableInterface)28 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)11 KettleException (org.pentaho.di.core.exception.KettleException)10 ArrayList (java.util.ArrayList)8 Database (org.pentaho.di.core.database.Database)8 RepositoryAttributeInterface (org.pentaho.di.repository.RepositoryAttributeInterface)7 JobEntryLogTable (org.pentaho.di.core.logging.JobEntryLogTable)6 UnknownParamException (org.pentaho.di.core.parameters.UnknownParamException)6 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)5 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)5 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)5 JobEntryCopy (org.pentaho.di.job.entry.JobEntryCopy)5 ErrorDialog (org.pentaho.di.ui.core.dialog.ErrorDialog)5 MessageBox (org.eclipse.swt.widgets.MessageBox)4 NotePadMeta (org.pentaho.di.core.NotePadMeta)4 KettleValueException (org.pentaho.di.core.exception.KettleValueException)4 ChannelLogTable (org.pentaho.di.core.logging.ChannelLogTable)4 JobLogTable (org.pentaho.di.core.logging.JobLogTable)4 FileSystemException (org.apache.commons.vfs2.FileSystemException)3 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)3