Search in sources :

Example 1 with RepositoryAttributeInterface

use of org.pentaho.di.repository.RepositoryAttributeInterface in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryTransDelegate method insertTransformation.

// CHECKSTYLE:LineLength:OFF
private synchronized void insertTransformation(TransMeta transMeta) throws KettleException {
    RowMetaAndData table = new RowMetaAndData();
    table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_TRANSFORMATION), new LongObjectId(transMeta.getObjectId()));
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_TRANSFORMATION_NAME), transMeta.getName());
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_TRANSFORMATION_DESCRIPTION), transMeta.getDescription());
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_TRANSFORMATION_EXTENDED_DESCRIPTION), transMeta.getExtendedDescription());
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_VERSION), transMeta.getTransversion());
    table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_TRANSFORMATION_TRANS_STATUS), new Long(transMeta.getTransstatus() < 0 ? -1L : transMeta.getTransstatus()));
    TransLogTable logTable = transMeta.getTransLogTable();
    StepMeta step = (StepMeta) logTable.getSubject(TransLogTable.ID.LINES_READ);
    table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_READ), step == null ? null : step.getObjectId());
    step = (StepMeta) logTable.getSubject(TransLogTable.ID.LINES_WRITTEN);
    table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_WRITE), step == null ? null : step.getObjectId());
    step = (StepMeta) logTable.getSubject(TransLogTable.ID.LINES_INPUT);
    table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_INPUT), step == null ? null : step.getObjectId());
    step = (StepMeta) logTable.getSubject(TransLogTable.ID.LINES_OUTPUT);
    table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_OUTPUT), step == null ? null : step.getObjectId());
    step = (StepMeta) logTable.getSubject(TransLogTable.ID.LINES_UPDATED);
    table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_STEP_UPDATE), step == null ? null : step.getObjectId());
    table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_LOG), logTable.getDatabaseMeta() == null ? new LongObjectId(-1L).longValue() : new LongObjectId(logTable.getDatabaseMeta().getObjectId()).longValue());
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_LOG), logTable.getDatabaseMeta());
    table.addValue(new ValueMetaBoolean(KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_BATCHID), Boolean.valueOf(logTable.isBatchIdUsed()));
    table.addValue(new ValueMetaBoolean(KettleDatabaseRepository.FIELD_TRANSFORMATION_USE_LOGFIELD), Boolean.valueOf(logTable.isLogFieldUsed()));
    table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DATABASE_MAXDATE), transMeta.getMaxDateConnection() == null ? new LongObjectId(-1L).longValue() : new LongObjectId(transMeta.getMaxDateConnection().getObjectId()).longValue());
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_TRANSFORMATION_TABLE_NAME_MAXDATE), transMeta.getMaxDateTable());
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_TRANSFORMATION_FIELD_NAME_MAXDATE), transMeta.getMaxDateField());
    table.addValue(new ValueMetaNumber(KettleDatabaseRepository.FIELD_TRANSFORMATION_OFFSET_MAXDATE), new Double(transMeta.getMaxDateOffset()));
    table.addValue(new ValueMetaNumber(KettleDatabaseRepository.FIELD_TRANSFORMATION_DIFF_MAXDATE), new Double(transMeta.getMaxDateDifference()));
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_USER), transMeta.getCreatedUser());
    table.addValue(new ValueMetaDate(KettleDatabaseRepository.FIELD_TRANSFORMATION_CREATED_DATE), transMeta.getCreatedDate());
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_USER), transMeta.getModifiedUser());
    table.addValue(new ValueMetaDate(KettleDatabaseRepository.FIELD_TRANSFORMATION_MODIFIED_DATE), transMeta.getModifiedDate());
    table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_TRANSFORMATION_SIZE_ROWSET), new Long(transMeta.getSizeRowset()));
    table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY), transMeta.getRepositoryDirectory().getObjectId());
    repository.connectionDelegate.getDatabase().prepareInsert(table.getRowMeta(), KettleDatabaseRepository.TABLE_R_TRANSFORMATION);
    repository.connectionDelegate.getDatabase().setValuesInsert(table);
    repository.connectionDelegate.getDatabase().insertRow();
    repository.connectionDelegate.getDatabase().closeInsert();
    step = (StepMeta) logTable.getSubject(TransLogTable.ID.LINES_REJECTED);
    if (step != null) {
        ObjectId rejectedId = step.getObjectId();
        Preconditions.checkNotNull(rejectedId);
        repository.connectionDelegate.insertTransAttribute(transMeta.getObjectId(), 0, KettleDatabaseRepository.TRANS_ATTRIBUTE_ID_STEP_REJECTED, Long.valueOf(rejectedId.toString()), null);
    }
    repository.connectionDelegate.insertTransAttribute(transMeta.getObjectId(), 0, KettleDatabaseRepository.TRANS_ATTRIBUTE_UNIQUE_CONNECTIONS, 0, transMeta.isUsingUniqueConnections() ? "Y" : "N");
    repository.connectionDelegate.insertTransAttribute(transMeta.getObjectId(), 0, KettleDatabaseRepository.TRANS_ATTRIBUTE_FEEDBACK_SHOWN, 0, transMeta.isFeedbackShown() ? "Y" : "N");
    repository.connectionDelegate.insertTransAttribute(transMeta.getObjectId(), 0, KettleDatabaseRepository.TRANS_ATTRIBUTE_FEEDBACK_SIZE, transMeta.getFeedbackSize(), "");
    repository.connectionDelegate.insertTransAttribute(transMeta.getObjectId(), 0, KettleDatabaseRepository.TRANS_ATTRIBUTE_USING_THREAD_PRIORITIES, 0, transMeta.isUsingThreadPriorityManagment() ? "Y" : "N");
    repository.connectionDelegate.insertTransAttribute(transMeta.getObjectId(), 0, KettleDatabaseRepository.TRANS_ATTRIBUTE_SHARED_FILE, 0, transMeta.getSharedObjectsFile());
    repository.connectionDelegate.insertTransAttribute(transMeta.getObjectId(), 0, KettleDatabaseRepository.TRANS_ATTRIBUTE_CAPTURE_STEP_PERFORMANCE, 0, transMeta.isCapturingStepPerformanceSnapShots() ? "Y" : "N");
    repository.connectionDelegate.insertTransAttribute(transMeta.getObjectId(), 0, KettleDatabaseRepository.TRANS_ATTRIBUTE_STEP_PERFORMANCE_CAPTURING_DELAY, transMeta.getStepPerformanceCapturingDelay(), "");
    repository.connectionDelegate.insertTransAttribute(transMeta.getObjectId(), 0, KettleDatabaseRepository.TRANS_ATTRIBUTE_STEP_PERFORMANCE_CAPTURING_SIZE_LIMIT, 0, transMeta.getStepPerformanceCapturingSizeLimit());
    repository.connectionDelegate.insertTransAttribute(transMeta.getObjectId(), 0, KettleDatabaseRepository.TRANS_ATTRIBUTE_STEP_PERFORMANCE_LOG_TABLE, 0, transMeta.getPerformanceLogTable().getTableName());
    repository.connectionDelegate.insertTransAttribute(transMeta.getObjectId(), 0, KettleDatabaseRepository.TRANS_ATTRIBUTE_LOG_SIZE_LIMIT, 0, transMeta.getTransLogTable().getLogSizeLimit());
    repository.connectionDelegate.insertTransAttribute(transMeta.getObjectId(), 0, KettleDatabaseRepository.TRANS_ATTRIBUTE_LOG_INTERVAL, 0, transMeta.getTransLogTable().getLogInterval());
    repository.connectionDelegate.insertTransAttribute(transMeta.getObjectId(), 0, KettleDatabaseRepository.TRANS_ATTRIBUTE_TRANSFORMATION_TYPE, 0, transMeta.getTransformationType().getCode());
    repository.connectionDelegate.insertTransAttribute(transMeta.getObjectId(), 0, KettleDatabaseRepository.TRANS_ATTRIBUTE_SLEEP_TIME_EMPTY, transMeta.getSleepTimeEmpty(), null);
    repository.connectionDelegate.insertTransAttribute(transMeta.getObjectId(), 0, KettleDatabaseRepository.TRANS_ATTRIBUTE_SLEEP_TIME_FULL, transMeta.getSleepTimeFull(), null);
    // Save the logging connection link...
    if (logTable.getDatabaseMeta() != null) {
        repository.insertStepDatabase(transMeta.getObjectId(), null, logTable.getDatabaseMeta().getObjectId());
    }
    // Save the maxdate connection link...
    if (transMeta.getMaxDateConnection() != null) {
        repository.insertStepDatabase(transMeta.getObjectId(), null, transMeta.getMaxDateConnection().getObjectId());
    }
    // Save the logging tables too..
    // 
    RepositoryAttributeInterface attributeInterface = new KettleDatabaseRepositoryTransAttribute(repository.connectionDelegate, transMeta.getObjectId());
    transMeta.getTransLogTable().saveToRepository(attributeInterface);
    transMeta.getStepLogTable().saveToRepository(attributeInterface);
    transMeta.getPerformanceLogTable().saveToRepository(attributeInterface);
    transMeta.getChannelLogTable().saveToRepository(attributeInterface);
}
Also used : ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) LongObjectId(org.pentaho.di.repository.LongObjectId) ObjectId(org.pentaho.di.repository.ObjectId) ValueMetaBoolean(org.pentaho.di.core.row.value.ValueMetaBoolean) LongObjectId(org.pentaho.di.repository.LongObjectId) StepMeta(org.pentaho.di.trans.step.StepMeta) RepositoryAttributeInterface(org.pentaho.di.repository.RepositoryAttributeInterface) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) ValueMetaNumber(org.pentaho.di.core.row.value.ValueMetaNumber) TransLogTable(org.pentaho.di.core.logging.TransLogTable) ValueMetaInteger(org.pentaho.di.core.row.value.ValueMetaInteger) ValueMetaDate(org.pentaho.di.core.row.value.ValueMetaDate)

Example 2 with RepositoryAttributeInterface

use of org.pentaho.di.repository.RepositoryAttributeInterface in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryJobDelegate method loadJobMeta.

/**
 * Load a job in a directory
 *
 * @param log
 *          the logging channel
 * @param rep
 *          The Repository
 * @param jobname
 *          The name of the job
 * @param repdir
 *          The directory in which the job resides.
 * @throws KettleException
 */
public JobMeta loadJobMeta(String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor) throws KettleException {
    JobMeta jobMeta = new JobMeta();
    synchronized (repository) {
        try {
            // Clear everything...
            jobMeta.clear();
            jobMeta.setRepositoryDirectory(repdir);
            // Get the transformation id
            jobMeta.setObjectId(getJobID(jobname, repdir.getObjectId()));
            // If no valid id is available in the database, then give error...
            if (jobMeta.getObjectId() != null) {
                // Load the notes...
                ObjectId[] noteids = repository.getJobNoteIDs(jobMeta.getObjectId());
                ObjectId[] jecids = repository.getJobEntryCopyIDs(jobMeta.getObjectId());
                ObjectId[] hopid = repository.getJobHopIDs(jobMeta.getObjectId());
                int nrWork = 2 + noteids.length + jecids.length + hopid.length;
                if (monitor != null) {
                    monitor.beginTask(BaseMessages.getString(PKG, "JobMeta.Monitor.LoadingJob") + repdir + Const.FILE_SEPARATOR + jobname, nrWork);
                }
                // 
                if (monitor != null) {
                    monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobInformation"));
                }
                RowMetaAndData jobRow = getJob(jobMeta.getObjectId());
                jobMeta.setName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_NAME, null));
                jobMeta.setDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, null));
                jobMeta.setExtendedDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, null));
                jobMeta.setJobversion(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, null));
                jobMeta.setJobstatus(Const.toInt(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, null), -1));
                jobMeta.setCreatedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_CREATED_USER, null));
                jobMeta.setCreatedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, new Date()));
                jobMeta.setModifiedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, null));
                jobMeta.setModifiedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, new Date()));
                long id_logdb = jobRow.getInteger(KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, 0);
                if (id_logdb > 0) {
                    // Get the logconnection
                    // 
                    DatabaseMeta logDb = repository.loadDatabaseMeta(new LongObjectId(id_logdb), null);
                    jobMeta.getJobLogTable().setConnectionName(logDb.getName());
                // jobMeta.getJobLogTable().getDatabaseMeta().shareVariablesWith(jobMeta);
                }
                jobMeta.getJobLogTable().setTableName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, null));
                jobMeta.getJobLogTable().setBatchIdUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, false));
                jobMeta.getJobLogTable().setLogFieldUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, false));
                jobMeta.getJobLogTable().setLogSizeLimit(getJobAttributeString(jobMeta.getObjectId(), 0, KettleDatabaseRepository.JOB_ATTRIBUTE_LOG_SIZE_LIMIT));
                jobMeta.setBatchIdPassed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, false));
                // Load all the log tables for the job...
                // 
                RepositoryAttributeInterface attributeInterface = new KettleDatabaseRepositoryJobAttribute(repository.connectionDelegate, jobMeta.getObjectId());
                for (LogTableInterface logTable : jobMeta.getLogTables()) {
                    logTable.loadFromRepository(attributeInterface);
                }
                if (monitor != null) {
                    monitor.worked(1);
                }
                // 
                if (monitor != null) {
                    monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingAvailableDatabasesFromRepository"));
                }
                // Read objects from the shared XML file & the repository
                try {
                    jobMeta.setSharedObjectsFile(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, null));
                    jobMeta.setSharedObjects(repository != null ? repository.readJobMetaSharedObjects(jobMeta) : jobMeta.readSharedObjects());
                } catch (Exception e) {
                    log.logError(BaseMessages.getString(PKG, "JobMeta.ErrorReadingSharedObjects.Message", e.toString()));
                    // 
                    log.logError(Const.getStackTracker(e));
                }
                if (monitor != null) {
                    monitor.worked(1);
                }
                if (log.isDetailed()) {
                    log.logDetailed("Loading " + noteids.length + " notes");
                }
                for (int i = 0; i < noteids.length; i++) {
                    if (monitor != null) {
                        monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingNoteNr") + (i + 1) + "/" + noteids.length);
                    }
                    NotePadMeta ni = repository.notePadDelegate.loadNotePadMeta(noteids[i]);
                    if (jobMeta.indexOfNote(ni) < 0) {
                        jobMeta.addNote(ni);
                    }
                    if (monitor != null) {
                        monitor.worked(1);
                    }
                }
                // Load the group attributes map
                // 
                jobMeta.setAttributesMap(loadJobAttributesMap(jobMeta.getObjectId()));
                // Load the job entries...
                // 
                // Keep a unique list of job entries to facilitate in the loading.
                // 
                List<JobEntryInterface> jobentries = new ArrayList<JobEntryInterface>();
                if (log.isDetailed()) {
                    log.logDetailed("Loading " + jecids.length + " job entries");
                }
                for (int i = 0; i < jecids.length; i++) {
                    if (monitor != null) {
                        monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobEntryNr") + (i + 1) + "/" + (jecids.length));
                    }
                    JobEntryCopy jec = repository.jobEntryDelegate.loadJobEntryCopy(jobMeta.getObjectId(), jecids[i], jobentries, jobMeta.getDatabases(), jobMeta.getSlaveServers(), jobname);
                    if (jec.isMissing()) {
                        jobMeta.addMissingEntry((MissingEntry) jec.getEntry());
                    }
                    // Also set the copy number...
                    // We count the number of job entry copies that use the job
                    // entry
                    // 
                    int copyNr = 0;
                    for (JobEntryCopy copy : jobMeta.getJobCopies()) {
                        if (jec.getEntry() == copy.getEntry()) {
                            copyNr++;
                        }
                    }
                    jec.setNr(copyNr);
                    int idx = jobMeta.indexOfJobEntry(jec);
                    if (idx < 0) {
                        if (jec.getName() != null && jec.getName().length() > 0) {
                            jobMeta.addJobEntry(jec);
                        }
                    } else {
                        // replace it!
                        jobMeta.setJobEntry(idx, jec);
                    }
                    if (monitor != null) {
                        monitor.worked(1);
                    }
                }
                // Load the hops...
                if (log.isDetailed()) {
                    log.logDetailed("Loading " + hopid.length + " job hops");
                }
                for (int i = 0; i < hopid.length; i++) {
                    if (monitor != null) {
                        monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobHopNr") + (i + 1) + "/" + (jecids.length));
                    }
                    JobHopMeta hi = loadJobHopMeta(hopid[i], jobMeta.getJobCopies());
                    jobMeta.getJobhops().add(hi);
                    if (monitor != null) {
                        monitor.worked(1);
                    }
                }
                loadRepParameters(jobMeta);
                // Finally, clear the changed flags...
                jobMeta.clearChanged();
                if (monitor != null) {
                    monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.FinishedLoadOfJob"));
                }
                if (monitor != null) {
                    monitor.done();
                }
                // close prepared statements, minimize locking etc.
                // 
                repository.connectionDelegate.closeAttributeLookupPreparedStatements();
                return jobMeta;
            } else {
                throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.CanNotFindJob") + jobname);
            }
        } catch (KettleException dbe) {
            throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", jobname), dbe);
        } finally {
            jobMeta.initializeVariablesFrom(jobMeta.getParentVariableSpace());
            jobMeta.setInternalKettleVariables();
        }
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) JobHopMeta(org.pentaho.di.job.JobHopMeta) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) LongObjectId(org.pentaho.di.repository.LongObjectId) ObjectId(org.pentaho.di.repository.ObjectId) ArrayList(java.util.ArrayList) LongObjectId(org.pentaho.di.repository.LongObjectId) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Date(java.util.Date) ValueMetaDate(org.pentaho.di.core.row.value.ValueMetaDate) RepositoryAttributeInterface(org.pentaho.di.repository.RepositoryAttributeInterface) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) NotePadMeta(org.pentaho.di.core.NotePadMeta)

Example 3 with RepositoryAttributeInterface

use of org.pentaho.di.repository.RepositoryAttributeInterface in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryJobDelegate method insertJob.

private synchronized void insertJob(JobMeta jobMeta) throws KettleException {
    RowMetaAndData table = new RowMetaAndData();
    table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_JOB_ID_JOB), jobMeta.getObjectId());
    table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY), jobMeta.getRepositoryDirectory().getObjectId());
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_NAME), jobMeta.getName());
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_DESCRIPTION), jobMeta.getDescription());
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION), jobMeta.getExtendedDescription());
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_JOB_VERSION), jobMeta.getJobversion());
    table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_JOB_JOB_STATUS), new Long(jobMeta.getJobstatus() < 0 ? -1L : jobMeta.getJobstatus()));
    table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG), jobMeta.getJobLogTable().getDatabaseMeta() != null ? jobMeta.getJobLogTable().getDatabaseMeta().getObjectId() : -1L);
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG), jobMeta.getJobLogTable().getTableName());
    table.addValue(new ValueMetaBoolean(KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID), jobMeta.getJobLogTable().isBatchIdUsed());
    table.addValue(new ValueMetaBoolean(KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD), jobMeta.getJobLogTable().isLogFieldUsed());
    repository.connectionDelegate.insertJobAttribute(jobMeta.getObjectId(), 0, KettleDatabaseRepository.JOB_ATTRIBUTE_LOG_SIZE_LIMIT, 0, jobMeta.getJobLogTable().getLogSizeLimit());
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_CREATED_USER), jobMeta.getCreatedUser());
    table.addValue(new ValueMetaDate(KettleDatabaseRepository.FIELD_JOB_CREATED_DATE), jobMeta.getCreatedDate());
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER), jobMeta.getModifiedUser());
    table.addValue(new ValueMetaDate(KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE), jobMeta.getModifiedDate());
    table.addValue(new ValueMetaBoolean(KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID), jobMeta.isBatchIdPassed());
    table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_SHARED_FILE), jobMeta.getSharedObjectsFile());
    repository.connectionDelegate.getDatabase().prepareInsert(table.getRowMeta(), KettleDatabaseRepository.TABLE_R_JOB);
    repository.connectionDelegate.getDatabase().setValuesInsert(table);
    repository.connectionDelegate.getDatabase().insertRow();
    if (log.isDebug()) {
        log.logDebug("Inserted new record into table " + quoteTable(KettleDatabaseRepository.TABLE_R_JOB) + " with data : " + table);
    }
    repository.connectionDelegate.getDatabase().closeInsert();
    // Save the logging connection link...
    if (jobMeta.getJobLogTable().getDatabaseMeta() != null) {
        repository.insertJobEntryDatabase(jobMeta.getObjectId(), null, jobMeta.getJobLogTable().getDatabaseMeta().getObjectId());
    }
    // Save the logging tables too..
    // 
    RepositoryAttributeInterface attributeInterface = new KettleDatabaseRepositoryJobAttribute(repository.connectionDelegate, jobMeta.getObjectId());
    for (LogTableInterface logTable : jobMeta.getLogTables()) {
        logTable.saveToRepository(attributeInterface);
    }
}
Also used : ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) ValueMetaInteger(org.pentaho.di.core.row.value.ValueMetaInteger) ValueMetaBoolean(org.pentaho.di.core.row.value.ValueMetaBoolean) ValueMetaDate(org.pentaho.di.core.row.value.ValueMetaDate) RepositoryAttributeInterface(org.pentaho.di.repository.RepositoryAttributeInterface)

Example 4 with RepositoryAttributeInterface

use of org.pentaho.di.repository.RepositoryAttributeInterface in project pentaho-kettle by pentaho.

the class TransDelegate method saveTransformationDetails.

private void saveTransformationDetails(final DataNode rootNode, final TransMeta transMeta) throws KettleException {
    rootNode.setProperty(PROP_EXTENDED_DESCRIPTION, transMeta.getExtendedDescription());
    rootNode.setProperty(PROP_TRANS_VERSION, transMeta.getTransversion());
    rootNode.setProperty(PROP_TRANS_STATUS, transMeta.getTransstatus() < 0 ? -1L : transMeta.getTransstatus());
    rootNode.setProperty(PROP_STEP_READ, transMeta.getTransLogTable().getStepnameRead());
    rootNode.setProperty(PROP_STEP_WRITE, transMeta.getTransLogTable().getStepnameWritten());
    rootNode.setProperty(PROP_STEP_INPUT, transMeta.getTransLogTable().getStepnameInput());
    rootNode.setProperty(PROP_STEP_OUTPUT, transMeta.getTransLogTable().getStepnameOutput());
    rootNode.setProperty(PROP_STEP_UPDATE, transMeta.getTransLogTable().getStepnameUpdated());
    rootNode.setProperty(PROP_STEP_REJECTED, transMeta.getTransLogTable().getStepnameRejected());
    if (transMeta.getTransLogTable().getDatabaseMeta() != null) {
        DataNodeRef ref = new DataNodeRef(transMeta.getTransLogTable().getDatabaseMeta().getObjectId().getId());
        rootNode.setProperty(PROP_DATABASE_LOG, ref);
    }
    rootNode.setProperty(PROP_TABLE_NAME_LOG, transMeta.getTransLogTable().getTableName());
    rootNode.setProperty(PROP_USE_BATCHID, Boolean.valueOf(transMeta.getTransLogTable().isBatchIdUsed()));
    rootNode.setProperty(PROP_USE_LOGFIELD, Boolean.valueOf(transMeta.getTransLogTable().isLogFieldUsed()));
    if (transMeta.getMaxDateConnection() != null) {
        DataNodeRef ref = new DataNodeRef(transMeta.getMaxDateConnection().getObjectId().getId());
        rootNode.setProperty(PROP_ID_DATABASE_MAXDATE, ref);
    }
    rootNode.setProperty(PROP_TABLE_NAME_MAXDATE, transMeta.getMaxDateTable());
    rootNode.setProperty(PROP_FIELD_NAME_MAXDATE, transMeta.getMaxDateField());
    rootNode.setProperty(PROP_OFFSET_MAXDATE, new Double(transMeta.getMaxDateOffset()));
    rootNode.setProperty(PROP_DIFF_MAXDATE, new Double(transMeta.getMaxDateDifference()));
    rootNode.setProperty(PROP_CREATED_USER, transMeta.getCreatedUser());
    rootNode.setProperty(PROP_CREATED_DATE, transMeta.getCreatedDate());
    rootNode.setProperty(PROP_MODIFIED_USER, transMeta.getModifiedUser());
    rootNode.setProperty(PROP_MODIFIED_DATE, transMeta.getModifiedDate());
    rootNode.setProperty(PROP_SIZE_ROWSET, transMeta.getSizeRowset());
    rootNode.setProperty(PROP_UNIQUE_CONNECTIONS, transMeta.isUsingUniqueConnections());
    rootNode.setProperty(PROP_FEEDBACK_SHOWN, transMeta.isFeedbackShown());
    rootNode.setProperty(PROP_FEEDBACK_SIZE, transMeta.getFeedbackSize());
    rootNode.setProperty(PROP_USING_THREAD_PRIORITIES, transMeta.isUsingThreadPriorityManagment());
    rootNode.setProperty(PROP_SHARED_FILE, transMeta.getSharedObjectsFile());
    rootNode.setProperty(PROP_CAPTURE_STEP_PERFORMANCE, transMeta.isCapturingStepPerformanceSnapShots());
    rootNode.setProperty(PROP_STEP_PERFORMANCE_CAPTURING_DELAY, transMeta.getStepPerformanceCapturingDelay());
    rootNode.setProperty(PROP_STEP_PERFORMANCE_CAPTURING_SIZE_LIMIT, transMeta.getStepPerformanceCapturingSizeLimit());
    rootNode.setProperty(PROP_STEP_PERFORMANCE_LOG_TABLE, transMeta.getPerformanceLogTable().getTableName());
    rootNode.setProperty(PROP_LOG_SIZE_LIMIT, transMeta.getTransLogTable().getLogSizeLimit());
    rootNode.setProperty(PROP_LOG_INTERVAL, transMeta.getTransLogTable().getLogInterval());
    rootNode.setProperty(PROP_TRANSFORMATION_TYPE, transMeta.getTransformationType().getCode());
    // Save the logging tables too..
    // 
    RepositoryAttributeInterface attributeInterface = new PurRepositoryAttribute(rootNode, transMeta.getDatabases());
    for (LogTableInterface logTable : transMeta.getLogTables()) {
        logTable.saveToRepository(attributeInterface);
    }
    // Save the transformation attribute groups map
    // 
    AttributesMapUtil.saveAttributesMap(rootNode, transMeta);
}
Also used : DataNodeRef(org.pentaho.platform.api.repository2.unified.data.node.DataNodeRef) LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) RepositoryAttributeInterface(org.pentaho.di.repository.RepositoryAttributeInterface)

Example 5 with RepositoryAttributeInterface

use of org.pentaho.di.repository.RepositoryAttributeInterface in project pentaho-kettle by pentaho.

the class JobDelegate method saveJobDetails.

private void saveJobDetails(DataNode rootNode, JobMeta jobMeta) throws KettleException {
    rootNode.setProperty(PROP_EXTENDED_DESCRIPTION, jobMeta.getExtendedDescription());
    rootNode.setProperty(PROP_JOB_VERSION, jobMeta.getJobversion());
    rootNode.setProperty(PROP_JOB_STATUS, jobMeta.getJobstatus() < 0 ? -1L : jobMeta.getJobstatus());
    if (jobMeta.getJobLogTable().getDatabaseMeta() != null) {
        DataNodeRef ref = new DataNodeRef(jobMeta.getJobLogTable().getDatabaseMeta().getObjectId().getId());
        rootNode.setProperty(PROP_DATABASE_LOG, ref);
    }
    rootNode.setProperty(PROP_TABLE_NAME_LOG, jobMeta.getJobLogTable().getTableName());
    rootNode.setProperty(PROP_CREATED_USER, jobMeta.getCreatedUser());
    rootNode.setProperty(PROP_CREATED_DATE, jobMeta.getCreatedDate());
    rootNode.setProperty(PROP_MODIFIED_USER, jobMeta.getModifiedUser());
    rootNode.setProperty(PROP_MODIFIED_DATE, jobMeta.getModifiedDate());
    rootNode.setProperty(PROP_USE_BATCH_ID, jobMeta.getJobLogTable().isBatchIdUsed());
    rootNode.setProperty(PROP_PASS_BATCH_ID, jobMeta.isBatchIdPassed());
    rootNode.setProperty(PROP_USE_LOGFIELD, jobMeta.getJobLogTable().isLogFieldUsed());
    rootNode.setProperty(PROP_SHARED_FILE, jobMeta.getSharedObjectsFile());
    rootNode.setProperty(PROP_LOG_SIZE_LIMIT, jobMeta.getJobLogTable().getLogSizeLimit());
    // Save the logging tables too..
    // 
    RepositoryAttributeInterface attributeInterface = new PurRepositoryAttribute(rootNode, jobMeta.getDatabases());
    for (LogTableInterface logTable : jobMeta.getLogTables()) {
        logTable.saveToRepository(attributeInterface);
    }
    // Load the attributes map
    // 
    AttributesMapUtil.saveAttributesMap(rootNode, jobMeta);
}
Also used : DataNodeRef(org.pentaho.platform.api.repository2.unified.data.node.DataNodeRef) LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) RepositoryAttributeInterface(org.pentaho.di.repository.RepositoryAttributeInterface)

Aggregations

RepositoryAttributeInterface (org.pentaho.di.repository.RepositoryAttributeInterface)8 LogTableInterface (org.pentaho.di.core.logging.LogTableInterface)7 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)3 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)3 KettleException (org.pentaho.di.core.exception.KettleException)3 ValueMetaDate (org.pentaho.di.core.row.value.ValueMetaDate)3 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)3 LongObjectId (org.pentaho.di.repository.LongObjectId)3 ObjectId (org.pentaho.di.repository.ObjectId)3 NotePadMeta (org.pentaho.di.core.NotePadMeta)2 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)2 ValueMetaBoolean (org.pentaho.di.core.row.value.ValueMetaBoolean)2 ValueMetaInteger (org.pentaho.di.core.row.value.ValueMetaInteger)2 StringObjectId (org.pentaho.di.repository.StringObjectId)2 StepMeta (org.pentaho.di.trans.step.StepMeta)2 DataNodeRef (org.pentaho.platform.api.repository2.unified.data.node.DataNodeRef)2 ArrayList (java.util.ArrayList)1 Date (java.util.Date)1 TransLogTable (org.pentaho.di.core.logging.TransLogTable)1 ValueMetaNumber (org.pentaho.di.core.row.value.ValueMetaNumber)1