Search in sources :

Example 61 with DatabaseMeta

use of org.pentaho.di.core.database.DatabaseMeta in project pentaho-kettle by pentaho.

the class DataHandlerTest method testLoadAccessDataWithSelectedItem.

@Test
public void testLoadAccessDataWithSelectedItem() throws Exception {
    when(accessBox.getSelectedItem()).thenReturn("ODBC");
    DatabaseInterface dbInterface = mock(DatabaseInterface.class);
    DatabaseMeta databaseMeta = mock(DatabaseMeta.class);
    when(dbInterface.getAccessTypeList()).thenReturn(new int[] { DatabaseMeta.TYPE_ACCESS_NATIVE, DatabaseMeta.TYPE_ACCESS_ODBC });
    when(dbInterface.getDefaultDatabasePort()).thenReturn(5309);
    when(connectionBox.getSelectedItem()).thenReturn("myDb");
    DataHandler.connectionMap.put("myDb", dbInterface);
    dataHandler.cache = databaseMeta;
    dataHandler.getData();
    dataHandler.loadAccessData();
}
Also used : DatabaseInterface(org.pentaho.di.core.database.DatabaseInterface) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) BaseDatabaseMeta(org.pentaho.di.core.database.BaseDatabaseMeta) Test(org.junit.Test)

Example 62 with DatabaseMeta

use of org.pentaho.di.core.database.DatabaseMeta in project pentaho-kettle by pentaho.

the class DataHandlerTest method testPushCacheUpdatesDatabaseInterface.

@Test
public void testPushCacheUpdatesDatabaseInterface() throws Exception {
    DatabaseMeta databaseMeta = mock(DatabaseMeta.class);
    when(connectionBox.getSelectedItem()).thenReturn("test");
    dataHandler.cache = databaseMeta;
    dataHandler.getControls();
    dataHandler.getData();
    dataHandler.pushCache();
    verify(databaseMeta).setDatabaseType("test");
}
Also used : DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) BaseDatabaseMeta(org.pentaho.di.core.database.BaseDatabaseMeta) Test(org.junit.Test)

Example 63 with DatabaseMeta

use of org.pentaho.di.core.database.DatabaseMeta in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryTransDelegate method saveTransformation.

/**
 * Saves the transformation to a repository.
 *
 * @param transMeta
 *          the transformation metadata to store
 * @param monitor
 *          the way we report progress to the user, can be null if no UI is present
 * @param overwriteAssociated
 *          Overwrite existing object(s)?
 * @throws KettleException
 *           if an error occurs.
 */
public void saveTransformation(TransMeta transMeta, String versionComment, ProgressMonitorListener monitor, boolean overwriteAssociated) throws KettleException {
    try {
        if (monitor != null) {
            monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.LockingRepository"));
        }
        repository.insertLogEntry("save transformation '" + transMeta.getName() + "'");
        // Clear attribute id cache
        // force repository lookup.
        repository.connectionDelegate.clearNextIDCounters();
        // Do we have a valid directory?
        if (transMeta.getRepositoryDirectory().getObjectId() == null) {
            throw new KettleException(BaseMessages.getString(PKG, "TransMeta.Exception.PlsSelectAValidDirectoryBeforeSavingTheTransformation"));
        }
        int nrWorks = 2 + transMeta.nrDatabases() + transMeta.nrNotes() + transMeta.nrSteps() + transMeta.nrTransHops();
        if (monitor != null) {
            monitor.beginTask(BaseMessages.getString(PKG, "TransMeta.Monitor.SavingTransformationTask.Title") + transMeta.getPathAndName(), nrWorks);
        }
        if (log.isDebug()) {
            log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.SavingOfTransformationStarted"));
        }
        if (monitor != null && monitor.isCanceled()) {
            throw new KettleDatabaseException();
        }
        // 
        if (monitor != null) {
            monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.HandlingOldVersionTransformationTask.Title"));
        // transMeta.setObjectId(getTransformationID(transMeta.getName(),
        // transMeta.getRepositoryDirectory().getObjectId()));
        }
        // If no valid id is available in the database, assign one...
        if (transMeta.getObjectId() == null) {
            transMeta.setObjectId(repository.connectionDelegate.getNextTransformationID());
        } else {
            // of the database for this id_transformation, before we put it back in...
            if (monitor != null) {
                monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.DeletingOldVersionTransformationTask.Title"));
            }
            if (log.isDebug()) {
                log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.DeletingOldVersionTransformation"));
            }
            repository.deleteTransformation(transMeta.getObjectId());
            if (log.isDebug()) {
                log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.OldVersionOfTransformationRemoved"));
            }
        }
        if (monitor != null) {
            monitor.worked(1);
        }
        if (log.isDebug()) {
            log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.SavingNotes"));
        }
        for (int i = 0; i < transMeta.nrNotes(); i++) {
            if (monitor != null && monitor.isCanceled()) {
                throw new KettleDatabaseException(BaseMessages.getString(PKG, "TransMeta.Log.UserCancelledTransSave"));
            }
            // if (monitor != null) monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.SavingNoteTask.Title") +
            // (i + 1) + "/" + transMeta.nrNotes());
            NotePadMeta ni = transMeta.getNote(i);
            repository.saveNotePadMeta(ni, transMeta.getObjectId());
            if (ni.getObjectId() != null) {
                repository.insertTransNote(transMeta.getObjectId(), ni.getObjectId());
            }
            if (monitor != null) {
                monitor.worked(1);
            }
        }
        if (log.isDebug()) {
            log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.SavingDatabaseConnections"));
        }
        for (int i = 0; i < transMeta.nrDatabases(); i++) {
            if (monitor != null && monitor.isCanceled()) {
                throw new KettleDatabaseException(BaseMessages.getString(PKG, "TransMeta.Log.UserCancelledTransSave"));
            }
            // if (monitor != null) monitor.subTask(BaseMessages.getString(PKG,
            // "TransMeta.Monitor.SavingDatabaseTask.Title") + (i + 1) + "/" + transMeta.nrDatabases());
            DatabaseMeta databaseMeta = transMeta.getDatabase(i);
            // repository)
            if (overwriteAssociated || databaseMeta.hasChanged() || databaseMeta.getObjectId() == null) {
                repository.save(databaseMeta, versionComment, monitor, overwriteAssociated);
            }
            if (monitor != null) {
                monitor.worked(1);
            }
        }
        // It is possible that we received another step through a plugin.
        if (log.isDebug()) {
            log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.CheckingStepTypes"));
        }
        repository.updateStepTypes();
        repository.updateDatabaseTypes();
        if (log.isDebug()) {
            log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.SavingSteps"));
        }
        for (int i = 0; i < transMeta.nrSteps(); i++) {
            if (monitor != null && monitor.isCanceled()) {
                throw new KettleDatabaseException(BaseMessages.getString(PKG, "TransMeta.Log.UserCancelledTransSave"));
            }
            // if (monitor != null) monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.SavingStepTask.Title") +
            // (i + 1) + "/" + transMeta.nrSteps());
            StepMeta stepMeta = transMeta.getStep(i);
            repository.stepDelegate.saveStepMeta(stepMeta, transMeta.getObjectId());
            if (monitor != null) {
                monitor.worked(1);
            }
        }
        repository.connectionDelegate.closeStepAttributeInsertPreparedStatement();
        if (log.isDebug()) {
            log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.SavingHops"));
        }
        for (int i = 0; i < transMeta.nrTransHops(); i++) {
            if (monitor != null && monitor.isCanceled()) {
                throw new KettleDatabaseException(BaseMessages.getString(PKG, "TransMeta.Log.UserCancelledTransSave"));
            }
            // if (monitor != null) monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.SavingHopTask.Title") +
            // (i + 1) + "/" + transMeta.nrTransHops());
            TransHopMeta hi = transMeta.getTransHop(i);
            saveTransHopMeta(hi, transMeta.getObjectId());
            if (monitor != null) {
                monitor.worked(1);
            }
        }
        // if (monitor != null) monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.FinishingTask.Title"));
        if (log.isDebug()) {
            log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.SavingTransformationInfo"));
        }
        // save the top level information for the transformation
        insertTransformation(transMeta);
        saveTransParameters(transMeta);
        repository.connectionDelegate.closeTransAttributeInsertPreparedStatement();
        // 
        for (int i = 0; i < transMeta.getPartitionSchemas().size(); i++) {
            if (monitor != null && monitor.isCanceled()) {
                throw new KettleDatabaseException(BaseMessages.getString(PKG, "TransMeta.Log.UserCancelledTransSave"));
            }
            PartitionSchema partitionSchema = transMeta.getPartitionSchemas().get(i);
            // See if this transformation really is a consumer of this object
            // It might be simply loaded as a shared object from the repository
            // 
            boolean isUsedByTransformation = transMeta.isUsingPartitionSchema(partitionSchema);
            repository.save(partitionSchema, versionComment, null, transMeta.getObjectId(), isUsedByTransformation, overwriteAssociated);
        }
        // 
        for (int i = 0; i < transMeta.getSlaveServers().size(); i++) {
            if (monitor != null && monitor.isCanceled()) {
                throw new KettleDatabaseException(BaseMessages.getString(PKG, "TransMeta.Log.UserCancelledTransSave"));
            }
            SlaveServer slaveServer = transMeta.getSlaveServers().get(i);
            boolean isUsedByTransformation = transMeta.isUsingSlaveServer(slaveServer);
            repository.save(slaveServer, versionComment, null, transMeta.getObjectId(), isUsedByTransformation, overwriteAssociated);
        }
        // Save the clustering schemas
        for (int i = 0; i < transMeta.getClusterSchemas().size(); i++) {
            if (monitor != null && monitor.isCanceled()) {
                throw new KettleDatabaseException(BaseMessages.getString(PKG, "TransMeta.Log.UserCancelledTransSave"));
            }
            ClusterSchema clusterSchema = transMeta.getClusterSchemas().get(i);
            boolean isUsedByTransformation = transMeta.isUsingClusterSchema(clusterSchema);
            repository.save(clusterSchema, versionComment, null, transMeta.getObjectId(), isUsedByTransformation, overwriteAssociated);
        }
        if (log.isDebug()) {
            log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.SavingDependencies"));
        }
        for (int i = 0; i < transMeta.nrDependencies(); i++) {
            if (monitor != null && monitor.isCanceled()) {
                throw new KettleDatabaseException(BaseMessages.getString(PKG, "TransMeta.Log.UserCancelledTransSave"));
            }
            TransDependency td = transMeta.getDependency(i);
            saveTransDependency(td, transMeta.getObjectId());
        }
        saveTransAttributesMap(transMeta.getObjectId(), transMeta.getAttributesMap());
        // Save the step error handling information as well!
        for (int i = 0; i < transMeta.nrSteps(); i++) {
            StepMeta stepMeta = transMeta.getStep(i);
            StepErrorMeta stepErrorMeta = stepMeta.getStepErrorMeta();
            if (stepErrorMeta != null) {
                repository.stepDelegate.saveStepErrorMeta(stepErrorMeta, transMeta.getObjectId(), stepMeta.getObjectId());
            }
        }
        repository.connectionDelegate.closeStepAttributeInsertPreparedStatement();
        if (log.isDebug()) {
            log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.SavingFinished"));
        }
        if (monitor != null) {
            monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.UnlockingRepository"));
        }
        repository.unlockRepository();
        // Perform a commit!
        repository.commit();
        transMeta.clearChanged();
        if (monitor != null) {
            monitor.worked(1);
        }
        if (monitor != null) {
            monitor.done();
        }
    } catch (KettleDatabaseException dbe) {
        // Oops, roll back!
        repository.rollback();
        log.logError(BaseMessages.getString(PKG, "TransMeta.Log.ErrorSavingTransformationToRepository") + Const.CR + dbe.getMessage());
        throw new KettleException(BaseMessages.getString(PKG, "TransMeta.Log.ErrorSavingTransformationToRepository"), dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) PartitionSchema(org.pentaho.di.partition.PartitionSchema) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) StepErrorMeta(org.pentaho.di.trans.step.StepErrorMeta) TransDependency(org.pentaho.di.trans.TransDependency) SlaveServer(org.pentaho.di.cluster.SlaveServer) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) StepMeta(org.pentaho.di.trans.step.StepMeta) NotePadMeta(org.pentaho.di.core.NotePadMeta) TransHopMeta(org.pentaho.di.trans.TransHopMeta) ClusterSchema(org.pentaho.di.cluster.ClusterSchema)

Example 64 with DatabaseMeta

use of org.pentaho.di.core.database.DatabaseMeta in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryJobDelegate method loadJobMeta.

/**
 * Load a job in a directory
 *
 * @param log
 *          the logging channel
 * @param rep
 *          The Repository
 * @param jobname
 *          The name of the job
 * @param repdir
 *          The directory in which the job resides.
 * @throws KettleException
 */
public JobMeta loadJobMeta(String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor) throws KettleException {
    JobMeta jobMeta = new JobMeta();
    synchronized (repository) {
        try {
            // Clear everything...
            jobMeta.clear();
            jobMeta.setRepositoryDirectory(repdir);
            // Get the transformation id
            jobMeta.setObjectId(getJobID(jobname, repdir.getObjectId()));
            // If no valid id is available in the database, then give error...
            if (jobMeta.getObjectId() != null) {
                // Load the notes...
                ObjectId[] noteids = repository.getJobNoteIDs(jobMeta.getObjectId());
                ObjectId[] jecids = repository.getJobEntryCopyIDs(jobMeta.getObjectId());
                ObjectId[] hopid = repository.getJobHopIDs(jobMeta.getObjectId());
                int nrWork = 2 + noteids.length + jecids.length + hopid.length;
                if (monitor != null) {
                    monitor.beginTask(BaseMessages.getString(PKG, "JobMeta.Monitor.LoadingJob") + repdir + Const.FILE_SEPARATOR + jobname, nrWork);
                }
                // 
                if (monitor != null) {
                    monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobInformation"));
                }
                RowMetaAndData jobRow = getJob(jobMeta.getObjectId());
                jobMeta.setName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_NAME, null));
                jobMeta.setDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, null));
                jobMeta.setExtendedDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, null));
                jobMeta.setJobversion(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, null));
                jobMeta.setJobstatus(Const.toInt(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, null), -1));
                jobMeta.setCreatedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_CREATED_USER, null));
                jobMeta.setCreatedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, new Date()));
                jobMeta.setModifiedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, null));
                jobMeta.setModifiedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, new Date()));
                long id_logdb = jobRow.getInteger(KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, 0);
                if (id_logdb > 0) {
                    // Get the logconnection
                    // 
                    DatabaseMeta logDb = repository.loadDatabaseMeta(new LongObjectId(id_logdb), null);
                    jobMeta.getJobLogTable().setConnectionName(logDb.getName());
                // jobMeta.getJobLogTable().getDatabaseMeta().shareVariablesWith(jobMeta);
                }
                jobMeta.getJobLogTable().setTableName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, null));
                jobMeta.getJobLogTable().setBatchIdUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, false));
                jobMeta.getJobLogTable().setLogFieldUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, false));
                jobMeta.getJobLogTable().setLogSizeLimit(getJobAttributeString(jobMeta.getObjectId(), 0, KettleDatabaseRepository.JOB_ATTRIBUTE_LOG_SIZE_LIMIT));
                jobMeta.setBatchIdPassed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, false));
                // Load all the log tables for the job...
                // 
                RepositoryAttributeInterface attributeInterface = new KettleDatabaseRepositoryJobAttribute(repository.connectionDelegate, jobMeta.getObjectId());
                for (LogTableInterface logTable : jobMeta.getLogTables()) {
                    logTable.loadFromRepository(attributeInterface);
                }
                if (monitor != null) {
                    monitor.worked(1);
                }
                // 
                if (monitor != null) {
                    monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingAvailableDatabasesFromRepository"));
                }
                // Read objects from the shared XML file & the repository
                try {
                    jobMeta.setSharedObjectsFile(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, null));
                    jobMeta.setSharedObjects(repository != null ? repository.readJobMetaSharedObjects(jobMeta) : jobMeta.readSharedObjects());
                } catch (Exception e) {
                    log.logError(BaseMessages.getString(PKG, "JobMeta.ErrorReadingSharedObjects.Message", e.toString()));
                    // 
                    log.logError(Const.getStackTracker(e));
                }
                if (monitor != null) {
                    monitor.worked(1);
                }
                if (log.isDetailed()) {
                    log.logDetailed("Loading " + noteids.length + " notes");
                }
                for (int i = 0; i < noteids.length; i++) {
                    if (monitor != null) {
                        monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingNoteNr") + (i + 1) + "/" + noteids.length);
                    }
                    NotePadMeta ni = repository.notePadDelegate.loadNotePadMeta(noteids[i]);
                    if (jobMeta.indexOfNote(ni) < 0) {
                        jobMeta.addNote(ni);
                    }
                    if (monitor != null) {
                        monitor.worked(1);
                    }
                }
                // Load the group attributes map
                // 
                jobMeta.setAttributesMap(loadJobAttributesMap(jobMeta.getObjectId()));
                // Load the job entries...
                // 
                // Keep a unique list of job entries to facilitate in the loading.
                // 
                List<JobEntryInterface> jobentries = new ArrayList<JobEntryInterface>();
                if (log.isDetailed()) {
                    log.logDetailed("Loading " + jecids.length + " job entries");
                }
                for (int i = 0; i < jecids.length; i++) {
                    if (monitor != null) {
                        monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobEntryNr") + (i + 1) + "/" + (jecids.length));
                    }
                    JobEntryCopy jec = repository.jobEntryDelegate.loadJobEntryCopy(jobMeta.getObjectId(), jecids[i], jobentries, jobMeta.getDatabases(), jobMeta.getSlaveServers(), jobname);
                    if (jec.isMissing()) {
                        jobMeta.addMissingEntry((MissingEntry) jec.getEntry());
                    }
                    // Also set the copy number...
                    // We count the number of job entry copies that use the job
                    // entry
                    // 
                    int copyNr = 0;
                    for (JobEntryCopy copy : jobMeta.getJobCopies()) {
                        if (jec.getEntry() == copy.getEntry()) {
                            copyNr++;
                        }
                    }
                    jec.setNr(copyNr);
                    int idx = jobMeta.indexOfJobEntry(jec);
                    if (idx < 0) {
                        if (jec.getName() != null && jec.getName().length() > 0) {
                            jobMeta.addJobEntry(jec);
                        }
                    } else {
                        // replace it!
                        jobMeta.setJobEntry(idx, jec);
                    }
                    if (monitor != null) {
                        monitor.worked(1);
                    }
                }
                // Load the hops...
                if (log.isDetailed()) {
                    log.logDetailed("Loading " + hopid.length + " job hops");
                }
                for (int i = 0; i < hopid.length; i++) {
                    if (monitor != null) {
                        monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobHopNr") + (i + 1) + "/" + (jecids.length));
                    }
                    JobHopMeta hi = loadJobHopMeta(hopid[i], jobMeta.getJobCopies());
                    jobMeta.getJobhops().add(hi);
                    if (monitor != null) {
                        monitor.worked(1);
                    }
                }
                loadRepParameters(jobMeta);
                // Finally, clear the changed flags...
                jobMeta.clearChanged();
                if (monitor != null) {
                    monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.FinishedLoadOfJob"));
                }
                if (monitor != null) {
                    monitor.done();
                }
                // close prepared statements, minimize locking etc.
                // 
                repository.connectionDelegate.closeAttributeLookupPreparedStatements();
                return jobMeta;
            } else {
                throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.CanNotFindJob") + jobname);
            }
        } catch (KettleException dbe) {
            throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", jobname), dbe);
        } finally {
            jobMeta.initializeVariablesFrom(jobMeta.getParentVariableSpace());
            jobMeta.setInternalKettleVariables();
        }
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) JobHopMeta(org.pentaho.di.job.JobHopMeta) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) LongObjectId(org.pentaho.di.repository.LongObjectId) ObjectId(org.pentaho.di.repository.ObjectId) ArrayList(java.util.ArrayList) LongObjectId(org.pentaho.di.repository.LongObjectId) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Date(java.util.Date) ValueMetaDate(org.pentaho.di.core.row.value.ValueMetaDate) RepositoryAttributeInterface(org.pentaho.di.repository.RepositoryAttributeInterface) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) NotePadMeta(org.pentaho.di.core.NotePadMeta)

Example 65 with DatabaseMeta

use of org.pentaho.di.core.database.DatabaseMeta in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryJobDelegate method readDatabases.

/**
 * Read the database connections in the repository and add them to this job if they are not yet present.
 *
 * @param jobMeta
 *          the job to put the database connections in
 * @param overWriteShared
 *          set to true if you want to overwrite shared connections while loading.
 * @throws KettleException
 */
public void readDatabases(JobMeta jobMeta, boolean overWriteShared) throws KettleException {
    try {
        ObjectId[] dbids = repository.getDatabaseIDs(false);
        for (int i = 0; i < dbids.length; i++) {
            // reads last version
            DatabaseMeta databaseMeta = repository.loadDatabaseMeta(dbids[i], null);
            databaseMeta.shareVariablesWith(jobMeta);
            // See if there already is one in the transformation
            // 
            DatabaseMeta check = jobMeta.findDatabase(databaseMeta.getName());
            // 
            if (check == null || overWriteShared) {
                if (databaseMeta.getName() != null) {
                    jobMeta.addOrReplaceDatabase(databaseMeta);
                    if (!overWriteShared) {
                        databaseMeta.setChanged(false);
                    }
                }
            }
        }
        jobMeta.setChanged(false);
    } catch (KettleDatabaseException dbe) {
        throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Log.UnableToReadDatabaseIDSFromRepository"), dbe);
    } catch (KettleException ke) {
        throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Log.UnableToReadDatabasesFromRepository"), ke);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) LongObjectId(org.pentaho.di.repository.LongObjectId) ObjectId(org.pentaho.di.repository.ObjectId) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta)

Aggregations

DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)522 Test (org.junit.Test)133 KettleException (org.pentaho.di.core.exception.KettleException)131 Database (org.pentaho.di.core.database.Database)88 MessageBox (org.eclipse.swt.widgets.MessageBox)66 ErrorDialog (org.pentaho.di.ui.core.dialog.ErrorDialog)63 TransMeta (org.pentaho.di.trans.TransMeta)57 StepMeta (org.pentaho.di.trans.step.StepMeta)54 ArrayList (java.util.ArrayList)53 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)48 RowMetaInterface (org.pentaho.di.core.row.RowMetaInterface)44 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)42 SlaveServer (org.pentaho.di.cluster.SlaveServer)33 IMetaStore (org.pentaho.metastore.api.IMetaStore)30 ObjectId (org.pentaho.di.repository.ObjectId)29 DatabaseExplorerDialog (org.pentaho.di.ui.core.database.dialog.DatabaseExplorerDialog)29 JobMeta (org.pentaho.di.job.JobMeta)26 TransHopMeta (org.pentaho.di.trans.TransHopMeta)26 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)24 PluginRegistry (org.pentaho.di.core.plugins.PluginRegistry)24