Search in sources :

Example 6 with DataNode

use of org.pentaho.platform.api.repository2.unified.data.node.DataNode in project pentaho-kettle by pentaho.

the class AttributesMapUtil method loadAttributesMap.

public static final void loadAttributesMap(DataNode dataNode, AttributesInterface attributesInterface) throws KettleException {
    Map<String, Map<String, String>> attributesMap = new HashMap<String, Map<String, String>>();
    attributesInterface.setAttributesMap(attributesMap);
    DataNode groupsNode = dataNode.getNode(NODE_ATTRIBUTE_GROUPS);
    if (groupsNode != null) {
        Iterable<DataNode> nodes = groupsNode.getNodes();
        for (Iterator<DataNode> groupsIterator = nodes.iterator(); groupsIterator.hasNext(); ) {
            DataNode groupNode = groupsIterator.next();
            HashMap<String, String> attributes = new HashMap<String, String>();
            attributesMap.put(groupNode.getName(), attributes);
            Iterable<DataProperty> properties = groupNode.getProperties();
            for (Iterator<DataProperty> propertiesIterator = properties.iterator(); propertiesIterator.hasNext(); ) {
                DataProperty dataProperty = propertiesIterator.next();
                String key = dataProperty.getName();
                String value = dataProperty.getString();
                if (key != null && value != null) {
                    attributes.put(key, value);
                }
            }
        }
    }
}
Also used : HashMap(java.util.HashMap) DataNode(org.pentaho.platform.api.repository2.unified.data.node.DataNode) DataProperty(org.pentaho.platform.api.repository2.unified.data.node.DataProperty) Map(java.util.Map) HashMap(java.util.HashMap)

Example 7 with DataNode

use of org.pentaho.platform.api.repository2.unified.data.node.DataNode in project pentaho-kettle by pentaho.

the class ClusterDelegate method dataNodeToElement.

public void dataNodeToElement(DataNode rootNode, RepositoryElementInterface element) throws KettleException {
    ClusterSchema clusterSchema = (ClusterSchema) element;
    // The metadata...
    clusterSchema.setBasePort(getString(rootNode, PROP_BASE_PORT));
    clusterSchema.setSocketsBufferSize(getString(rootNode, PROP_SOCKETS_BUFFER_SIZE));
    clusterSchema.setSocketsFlushInterval(getString(rootNode, PROP_SOCKETS_FLUSH_INTERVAL));
    clusterSchema.setSocketsCompressed(rootNode.getProperty(PROP_SOCKETS_COMPRESSED).getBoolean());
    clusterSchema.setDynamic(rootNode.getProperty(PROP_DYNAMIC).getBoolean());
    DataNode attrNode = rootNode.getNode(NODE_ATTRIBUTES);
    // The slaves...
    long nrSlaves = attrNode.getProperty(PROP_NB_SLAVE_SERVERS).getLong();
    for (int i = 0; i < nrSlaves; i++) {
        if (attrNode.hasProperty(String.valueOf(i))) {
            DataNodeRef slaveNodeRef = attrNode.getProperty(String.valueOf(i)).getRef();
            clusterSchema.getSlaveServers().add(findSlaveServer(new StringObjectId(slaveNodeRef.toString())));
        }
    }
}
Also used : DataNodeRef(org.pentaho.platform.api.repository2.unified.data.node.DataNodeRef) DataNode(org.pentaho.platform.api.repository2.unified.data.node.DataNode) StringObjectId(org.pentaho.di.repository.StringObjectId) ClusterSchema(org.pentaho.di.cluster.ClusterSchema)

Example 8 with DataNode

use of org.pentaho.platform.api.repository2.unified.data.node.DataNode in project pentaho-kettle by pentaho.

the class ClusterDelegate method elementToDataNode.

public DataNode elementToDataNode(RepositoryElementInterface element) throws KettleException {
    ClusterSchema clusterSchema = (ClusterSchema) element;
    DataNode rootNode = new DataNode(NODE_ROOT);
    // save the properties...
    rootNode.setProperty(PROP_BASE_PORT, clusterSchema.getBasePort());
    rootNode.setProperty(PROP_SOCKETS_BUFFER_SIZE, clusterSchema.getSocketsBufferSize());
    rootNode.setProperty(PROP_SOCKETS_FLUSH_INTERVAL, clusterSchema.getSocketsFlushInterval());
    rootNode.setProperty(PROP_SOCKETS_COMPRESSED, clusterSchema.isSocketsCompressed());
    rootNode.setProperty(PROP_DYNAMIC, clusterSchema.isDynamic());
    DataNode attrNode = rootNode.addNode(NODE_ATTRIBUTES);
    // Also save the used slave server references.
    attrNode.setProperty(PROP_NB_SLAVE_SERVERS, clusterSchema.getSlaveServers().size());
    for (int i = 0; i < clusterSchema.getSlaveServers().size(); i++) {
        SlaveServer slaveServer = clusterSchema.getSlaveServers().get(i);
        DataNodeRef slaveNodeRef = new DataNodeRef(slaveServer.getObjectId().getId());
        // Save the slave server by reference, this way it becomes impossible to delete the slave by accident when still
        // in use.
        attrNode.setProperty(String.valueOf(i), slaveNodeRef);
    }
    return rootNode;
}
Also used : DataNodeRef(org.pentaho.platform.api.repository2.unified.data.node.DataNodeRef) DataNode(org.pentaho.platform.api.repository2.unified.data.node.DataNode) SlaveServer(org.pentaho.di.cluster.SlaveServer) ClusterSchema(org.pentaho.di.cluster.ClusterSchema)

Example 9 with DataNode

use of org.pentaho.platform.api.repository2.unified.data.node.DataNode in project pentaho-kettle by pentaho.

the class JobDelegate method saveJobDetails.

private void saveJobDetails(DataNode rootNode, JobMeta jobMeta) throws KettleException {
    rootNode.setProperty(PROP_EXTENDED_DESCRIPTION, jobMeta.getExtendedDescription());
    rootNode.setProperty(PROP_JOB_VERSION, jobMeta.getJobversion());
    rootNode.setProperty(PROP_JOB_STATUS, jobMeta.getJobstatus() < 0 ? -1L : jobMeta.getJobstatus());
    if (jobMeta.getJobLogTable().getDatabaseMeta() != null) {
        DataNodeRef ref = new DataNodeRef(jobMeta.getJobLogTable().getDatabaseMeta().getObjectId().getId());
        rootNode.setProperty(PROP_DATABASE_LOG, ref);
    }
    rootNode.setProperty(PROP_TABLE_NAME_LOG, jobMeta.getJobLogTable().getTableName());
    rootNode.setProperty(PROP_CREATED_USER, jobMeta.getCreatedUser());
    rootNode.setProperty(PROP_CREATED_DATE, jobMeta.getCreatedDate());
    rootNode.setProperty(PROP_MODIFIED_USER, jobMeta.getModifiedUser());
    rootNode.setProperty(PROP_MODIFIED_DATE, jobMeta.getModifiedDate());
    rootNode.setProperty(PROP_USE_BATCH_ID, jobMeta.getJobLogTable().isBatchIdUsed());
    rootNode.setProperty(PROP_PASS_BATCH_ID, jobMeta.isBatchIdPassed());
    rootNode.setProperty(PROP_USE_LOGFIELD, jobMeta.getJobLogTable().isLogFieldUsed());
    rootNode.setProperty(PROP_SHARED_FILE, jobMeta.getSharedObjectsFile());
    rootNode.setProperty(PROP_LOG_SIZE_LIMIT, jobMeta.getJobLogTable().getLogSizeLimit());
    // Save the logging tables too..
    // 
    RepositoryAttributeInterface attributeInterface = new PurRepositoryAttribute(rootNode, jobMeta.getDatabases());
    for (LogTableInterface logTable : jobMeta.getLogTables()) {
        logTable.saveToRepository(attributeInterface);
    }
    // Load the attributes map
    // 
    AttributesMapUtil.saveAttributesMap(rootNode, jobMeta);
}
Also used : DataNodeRef(org.pentaho.platform.api.repository2.unified.data.node.DataNodeRef) LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) RepositoryAttributeInterface(org.pentaho.di.repository.RepositoryAttributeInterface)

Example 10 with DataNode

use of org.pentaho.platform.api.repository2.unified.data.node.DataNode in project pentaho-kettle by pentaho.

the class SlaveDelegate method elementToDataNode.

public DataNode elementToDataNode(RepositoryElementInterface element) throws KettleException {
    SlaveServer slaveServer = (SlaveServer) element;
    DataNode rootNode = new DataNode(NODE_ROOT);
    /*
     * // Check for naming collision ObjectId slaveId = repo.getSlaveID(slaveServer.getName()); if (slaveId != null &&
     * slaveServer.getObjectId()!=null && !slaveServer.getObjectId().equals(slaveId)) { // We have a naming collision,
     * abort the save throw new KettleException("Failed to save object to repository. Object [" + slaveServer.getName()
     * + "] already exists."); }
     */
    // Create or version a new slave node
    // 
    rootNode.setProperty(PROP_HOST_NAME, slaveServer.getHostname());
    rootNode.setProperty(PROP_PORT, slaveServer.getPort());
    rootNode.setProperty(PROP_WEBAPP_NAME, slaveServer.getWebAppName());
    rootNode.setProperty(PROP_USERNAME, slaveServer.getUsername());
    rootNode.setProperty(PROP_PASSWORD, Encr.encryptPasswordIfNotUsingVariables(slaveServer.getPassword()));
    rootNode.setProperty(PROP_PROXY_HOST_NAME, slaveServer.getProxyHostname());
    rootNode.setProperty(PROP_PROXY_PORT, slaveServer.getProxyPort());
    rootNode.setProperty(PROP_NON_PROXY_HOSTS, slaveServer.getNonProxyHosts());
    rootNode.setProperty(PROP_MASTER, slaveServer.isMaster());
    return rootNode;
}
Also used : DataNode(org.pentaho.platform.api.repository2.unified.data.node.DataNode) SlaveServer(org.pentaho.di.cluster.SlaveServer)

Aggregations

DataNode (org.pentaho.platform.api.repository2.unified.data.node.DataNode)63 NodeRepositoryFileData (org.pentaho.platform.api.repository2.unified.data.node.NodeRepositoryFileData)22 Test (org.junit.Test)17 RepositoryFile (org.pentaho.platform.api.repository2.unified.RepositoryFile)17 DataProperty (org.pentaho.platform.api.repository2.unified.data.node.DataProperty)15 DataNodeRef (org.pentaho.platform.api.repository2.unified.data.node.DataNodeRef)11 Matchers.anyString (org.mockito.Matchers.anyString)7 Date (java.util.Date)6 IUnifiedRepository (org.pentaho.platform.api.repository2.unified.IUnifiedRepository)6 ArrayList (java.util.ArrayList)5 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)5 ITenant (org.pentaho.platform.api.mt.ITenant)5 HashMap (java.util.HashMap)4 Map (java.util.Map)4 NotePadMeta (org.pentaho.di.core.NotePadMeta)4 KettleException (org.pentaho.di.core.exception.KettleException)4 JobMeta (org.pentaho.di.job.JobMeta)4 JobEntryCopy (org.pentaho.di.job.entry.JobEntryCopy)4 StringObjectId (org.pentaho.di.repository.StringObjectId)4 Properties (java.util.Properties)3