Search in sources :

Example 61 with SlaveServer

use of org.pentaho.di.cluster.SlaveServer in project pentaho-kettle by pentaho.

the class Spoon method delSlaveServer.

public void delSlaveServer() {
    final HasSlaveServersInterface hasSlaveServersInterface = (HasSlaveServersInterface) selectionObjectParent;
    final SlaveServer slaveServer = (SlaveServer) selectionObject;
    delSlaveServer(hasSlaveServersInterface, slaveServer);
}
Also used : HasSlaveServersInterface(org.pentaho.di.trans.HasSlaveServersInterface) SlaveServer(org.pentaho.di.cluster.SlaveServer)

Example 62 with SlaveServer

use of org.pentaho.di.cluster.SlaveServer in project pentaho-kettle by pentaho.

the class Spoon method loadSessionInformation.

private void loadSessionInformation(Repository repository, boolean saveOldDatabases) {
    JobMeta[] jobMetas = getLoadedJobs();
    for (JobMeta jobMeta : jobMetas) {
        for (int i = 0; i < jobMeta.nrDatabases(); i++) {
            jobMeta.getDatabase(i).setObjectId(null);
        }
        // Set for the existing job the ID at -1!
        jobMeta.setObjectId(null);
        // Keep track of the old databases for now.
        List<DatabaseMeta> oldDatabases = jobMeta.getDatabases();
        // In order to re-match the databases on name (not content), we
        // need to load the databases from the new repository.
        // NOTE: for purposes such as DEVELOP - TEST - PRODUCTION
        // cycles.
        // first clear the list of databases and slave servers
        jobMeta.setDatabases(new ArrayList<DatabaseMeta>());
        jobMeta.setSlaveServers(new ArrayList<SlaveServer>());
        // Read them from the new repository.
        try {
            SharedObjects sharedObjects = repository != null ? repository.readJobMetaSharedObjects(jobMeta) : jobMeta.readSharedObjects();
            sharedObjectsFileMap.put(sharedObjects.getFilename(), sharedObjects);
        } catch (KettleException e) {
            new ErrorDialog(shell, BaseMessages.getString(PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Title"), BaseMessages.getString(PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Message", makeTabName(jobMeta, true)), e);
        }
        // Then we need to re-match the databases at save time...
        for (DatabaseMeta oldDatabase : oldDatabases) {
            DatabaseMeta newDatabase = DatabaseMeta.findDatabase(jobMeta.getDatabases(), oldDatabase.getName());
            // If it exists, change the settings...
            if (newDatabase != null) {
                // 
                // A database connection with the same name exists in
                // the new repository.
                // Change the old connections to reflect the settings in
                // the new repository
                // 
                oldDatabase.setDatabaseInterface(newDatabase.getDatabaseInterface());
            } else {
                if (saveOldDatabases) {
                    // 
                    // The old database is not present in the new
                    // repository: simply add it to the list.
                    // When the job gets saved, it will be added
                    // to the repository.
                    // 
                    jobMeta.addDatabase(oldDatabase);
                }
            }
        }
        if (repository != null) {
            try {
                // For the existing job, change the directory too:
                // Try to find the same directory in the new repository...
                RepositoryDirectoryInterface rdi = repository.findDirectory(jobMeta.getRepositoryDirectory().getPath());
                if (rdi != null && !rdi.getPath().equals("/")) {
                    jobMeta.setRepositoryDirectory(rdi);
                } else {
                    // the root is the default!
                    jobMeta.setRepositoryDirectory(repository.loadRepositoryDirectoryTree());
                }
            } catch (KettleException ke) {
                rep = null;
                new ErrorDialog(shell, BaseMessages.getString(PKG, "Spoon.Dialog.ErrorConnectingRepository.Title"), BaseMessages.getString(PKG, "Spoon.Dialog.ErrorConnectingRepository.Message", Const.CR), ke);
            }
        }
    }
    TransMeta[] transMetas = getLoadedTransformations();
    for (TransMeta transMeta : transMetas) {
        for (int i = 0; i < transMeta.nrDatabases(); i++) {
            transMeta.getDatabase(i).setObjectId(null);
        }
        // Set for the existing transformation the ID at -1!
        transMeta.setObjectId(null);
        // Keep track of the old databases for now.
        List<DatabaseMeta> oldDatabases = transMeta.getDatabases();
        // In order to re-match the databases on name (not content), we
        // need to load the databases from the new repository.
        // NOTE: for purposes such as DEVELOP - TEST - PRODUCTION
        // cycles.
        // first clear the list of databases, partition schemas, slave
        // servers, clusters
        transMeta.setDatabases(new ArrayList<DatabaseMeta>());
        transMeta.setPartitionSchemas(new ArrayList<PartitionSchema>());
        transMeta.setSlaveServers(new ArrayList<SlaveServer>());
        transMeta.setClusterSchemas(new ArrayList<ClusterSchema>());
        // Read them from the new repository.
        try {
            SharedObjects sharedObjects = repository != null ? repository.readTransSharedObjects(transMeta) : transMeta.readSharedObjects();
            sharedObjectsFileMap.put(sharedObjects.getFilename(), sharedObjects);
        } catch (KettleException e) {
            new ErrorDialog(shell, BaseMessages.getString(PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Title"), BaseMessages.getString(PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Message", makeTabName(transMeta, true)), e);
        }
        // Then we need to re-match the databases at save time...
        for (DatabaseMeta oldDatabase : oldDatabases) {
            DatabaseMeta newDatabase = DatabaseMeta.findDatabase(transMeta.getDatabases(), oldDatabase.getName());
            // If it exists, change the settings...
            if (newDatabase != null) {
                // 
                // A database connection with the same name exists in
                // the new repository.
                // Change the old connections to reflect the settings in
                // the new repository
                // 
                oldDatabase.setDatabaseInterface(newDatabase.getDatabaseInterface());
            } else {
                if (saveOldDatabases) {
                    // 
                    // The old database is not present in the new
                    // repository: simply add it to the list.
                    // When the transformation gets saved, it will be added
                    // to the repository.
                    // 
                    transMeta.addDatabase(oldDatabase);
                }
            }
        }
        if (repository != null) {
            try {
                // For the existing transformation, change the directory too:
                // Try to find the same directory in the new repository...
                RepositoryDirectoryInterface rdi = repository.findDirectory(transMeta.getRepositoryDirectory().getPath());
                if (rdi != null && !rdi.getPath().equals("/")) {
                    transMeta.setRepositoryDirectory(rdi);
                } else {
                    // the root is the default!
                    transMeta.setRepositoryDirectory(repository.loadRepositoryDirectoryTree());
                }
            } catch (KettleException ke) {
                rep = null;
                new ErrorDialog(shell, BaseMessages.getString(PKG, "Spoon.Dialog.ErrorConnectingRepository.Title"), BaseMessages.getString(PKG, "Spoon.Dialog.ErrorConnectingRepository.Message", Const.CR), ke);
            }
        }
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) RepositoryDirectoryInterface(org.pentaho.di.repository.RepositoryDirectoryInterface) JobMeta(org.pentaho.di.job.JobMeta) PartitionSchema(org.pentaho.di.partition.PartitionSchema) TransMeta(org.pentaho.di.trans.TransMeta) ErrorDialog(org.pentaho.di.ui.core.dialog.ErrorDialog) SharedObjects(org.pentaho.di.shared.SharedObjects) SlaveServer(org.pentaho.di.cluster.SlaveServer) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) ClusterSchema(org.pentaho.di.cluster.ClusterSchema)

Example 63 with SlaveServer

use of org.pentaho.di.cluster.SlaveServer in project pentaho-kettle by pentaho.

the class Spoon method editSlaveServer.

public void editSlaveServer() {
    final SlaveServer slaveServer = (SlaveServer) selectionObject;
    editSlaveServer(slaveServer);
}
Also used : SlaveServer(org.pentaho.di.cluster.SlaveServer)

Example 64 with SlaveServer

use of org.pentaho.di.cluster.SlaveServer in project pentaho-kettle by pentaho.

the class AbstractMeta method addOrReplaceSlaveServer.

/**
 * Add a new slave server to the transformation if that didn't exist yet. Otherwise, replace it.
 *
 * @param slaveServer The slave server to be added.
 */
public void addOrReplaceSlaveServer(SlaveServer slaveServer) {
    int index = slaveServers.indexOf(slaveServer);
    if (index < 0) {
        slaveServers.add(slaveServer);
    } else {
        SlaveServer previous = slaveServers.get(index);
        previous.replaceMeta(slaveServer);
    }
    setChanged();
}
Also used : SlaveServer(org.pentaho.di.cluster.SlaveServer) Point(org.pentaho.di.core.gui.Point)

Example 65 with SlaveServer

use of org.pentaho.di.cluster.SlaveServer in project pentaho-kettle by pentaho.

the class AbstractMeta method clear.

public void clear() {
    setName(null);
    setFilename(null);
    notes = new ArrayList<NotePadMeta>();
    databases = new ArrayList<DatabaseMeta>();
    slaveServers = new ArrayList<SlaveServer>();
    channelLogTable = ChannelLogTable.getDefault(this, this);
    attributesMap = new HashMap<String, Map<String, String>>();
    max_undo = Const.MAX_UNDO;
    clearUndo();
    clearChanged();
    setChanged(false);
    channelLogTable = ChannelLogTable.getDefault(this, this);
    createdUser = "-";
    createdDate = new Date();
    modifiedUser = "-";
    modifiedDate = new Date();
    directory = new RepositoryDirectory();
    description = null;
    extendedDescription = null;
}
Also used : RepositoryDirectory(org.pentaho.di.repository.RepositoryDirectory) NotePadMeta(org.pentaho.di.core.NotePadMeta) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) SlaveServer(org.pentaho.di.cluster.SlaveServer) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) Date(java.util.Date)

Aggregations

SlaveServer (org.pentaho.di.cluster.SlaveServer)110 KettleException (org.pentaho.di.core.exception.KettleException)35 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)32 Test (org.junit.Test)22 ClusterSchema (org.pentaho.di.cluster.ClusterSchema)22 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)18 PartitionSchema (org.pentaho.di.partition.PartitionSchema)18 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)17 JobMeta (org.pentaho.di.job.JobMeta)16 ObjectId (org.pentaho.di.repository.ObjectId)16 StepMeta (org.pentaho.di.trans.step.StepMeta)14 ArrayList (java.util.ArrayList)13 TransMeta (org.pentaho.di.trans.TransMeta)11 Result (org.pentaho.di.core.Result)10 KettleFileException (org.pentaho.di.core.exception.KettleFileException)10 UnknownParamException (org.pentaho.di.core.parameters.UnknownParamException)10 NotePadMeta (org.pentaho.di.core.NotePadMeta)9 Point (org.pentaho.di.core.gui.Point)8 List (java.util.List)7 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)7