Search in sources :

Example 11 with StepPartitioningMeta

use of org.pentaho.di.trans.step.StepPartitioningMeta in project pentaho-kettle by pentaho.

the class TransSplitter method verifySlavePartitioningConfiguration.

private void verifySlavePartitioningConfiguration(TransMeta slave, StepMeta stepMeta, ClusterSchema clusterSchema, SlaveServer slaveServer) {
    Map<StepMeta, String> stepPartitionFlag = slaveStepPartitionFlag.get(slave);
    if (stepPartitionFlag == null) {
        stepPartitionFlag = new Hashtable<StepMeta, String>();
        slaveStepPartitionFlag.put(slave, stepPartitionFlag);
    }
    if (stepPartitionFlag.get(stepMeta) != null) {
        // already done;
        return;
    }
    StepPartitioningMeta partitioningMeta = stepMeta.getStepPartitioningMeta();
    if (partitioningMeta != null && partitioningMeta.getMethodType() != StepPartitioningMeta.PARTITIONING_METHOD_NONE && partitioningMeta.getPartitionSchema() != null) {
        // Find the schemaPartitions map to use
        Map<PartitionSchema, List<String>> schemaPartitionsMap = slaveServerPartitionsMap.get(slaveServer);
        if (schemaPartitionsMap != null) {
            PartitionSchema partitionSchema = partitioningMeta.getPartitionSchema();
            List<String> partitionsList = schemaPartitionsMap.get(partitionSchema);
            if (partitionsList != null) {
                // We found a list of partitions, now let's create a new partition schema with this data.
                String targetSchemaName = createSlavePartitionSchemaName(partitionSchema.getName());
                PartitionSchema targetSchema = slave.findPartitionSchema(targetSchemaName);
                if (targetSchema == null) {
                    targetSchema = new PartitionSchema(targetSchemaName, partitionsList);
                    // add it to the slave if it doesn't exist.
                    slave.getPartitionSchemas().add(targetSchema);
                }
            }
        }
    }
    // is done.
    stepPartitionFlag.put(stepMeta, "Y");
}
Also used : PartitionSchema(org.pentaho.di.partition.PartitionSchema) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) List(java.util.List) StepPartitioningMeta(org.pentaho.di.trans.step.StepPartitioningMeta) StepMeta(org.pentaho.di.trans.step.StepMeta)

Example 12 with StepPartitioningMeta

use of org.pentaho.di.trans.step.StepPartitioningMeta in project pentaho-kettle by pentaho.

the class TransDelegate method elementToDataNode.

public DataNode elementToDataNode(final RepositoryElementInterface element) throws KettleException {
    TransMeta transMeta = (TransMeta) element;
    DataNode rootNode = new DataNode(NODE_TRANS);
    if (transMeta.getPrivateDatabases() != null) {
        // save all private transformations database name http://jira.pentaho.com/browse/PPP-3405
        String privateDatabaseNames = StringUtils.join(transMeta.getPrivateDatabases(), TRANS_PRIVATE_DATABASE_DELIMITER);
        DataNode privateDatabaseNode = rootNode.addNode(NODE_TRANS_PRIVATE_DATABASES);
        privateDatabaseNode.setProperty(PROP_TRANS_PRIVATE_DATABASE_NAMES, privateDatabaseNames);
    }
    DataNode stepsNode = rootNode.addNode(NODE_STEPS);
    // Also save all the steps in the transformation!
    // 
    int stepNr = 0;
    for (StepMeta step : transMeta.getSteps()) {
        stepNr++;
        // $NON-NLS-1$
        DataNode stepNode = stepsNode.addNode(sanitizeNodeName(step.getName()) + "_" + stepNr + EXT_STEP);
        // Store the main data
        // 
        stepNode.setProperty(PROP_NAME, step.getName());
        stepNode.setProperty(PROP_DESCRIPTION, step.getDescription());
        stepNode.setProperty(PROP_STEP_TYPE, step.getStepID());
        stepNode.setProperty(PROP_STEP_DISTRIBUTE, step.isDistributes());
        stepNode.setProperty(PROP_STEP_ROW_DISTRIBUTION, step.getRowDistribution() == null ? null : step.getRowDistribution().getCode());
        stepNode.setProperty(PROP_STEP_COPIES, step.getCopies());
        stepNode.setProperty(PROP_STEP_COPIES_STRING, step.getCopiesString());
        stepNode.setProperty(PROP_STEP_GUI_LOCATION_X, step.getLocation().x);
        stepNode.setProperty(PROP_STEP_GUI_LOCATION_Y, step.getLocation().y);
        stepNode.setProperty(PROP_STEP_GUI_DRAW, step.isDrawn());
        // Also save the step group attributes map
        // 
        AttributesMapUtil.saveAttributesMap(stepNode, step);
        // Save the step metadata using the repository save method, NOT XML
        // That is because we want to keep the links to databases, conditions, etc by ID, not name.
        // 
        StepMetaInterface stepMetaInterface = step.getStepMetaInterface();
        DataNode stepCustomNode = new DataNode(NODE_STEP_CUSTOM);
        Repository proxy = new RepositoryProxy(stepCustomNode);
        compatibleSaveRep(stepMetaInterface, proxy, null, null);
        stepMetaInterface.saveRep(proxy, proxy.getMetaStore(), null, null);
        stepNode.addNode(stepCustomNode);
        // Save the partitioning information by reference as well...
        // 
        StepPartitioningMeta partitioningMeta = step.getStepPartitioningMeta();
        if (partitioningMeta != null && partitioningMeta.getPartitionSchema() != null && partitioningMeta.isPartitioned()) {
            DataNodeRef ref = new DataNodeRef(partitioningMeta.getPartitionSchema().getObjectId().getId());
            stepNode.setProperty(PROP_PARTITIONING_SCHEMA, ref);
            // method of partitioning
            stepNode.setProperty(PROP_PARTITIONING_METHOD, partitioningMeta.getMethodCode());
            if (partitioningMeta.getPartitioner() != null) {
                DataNode partitionerCustomNode = new DataNode(NODE_PARTITIONER_CUSTOM);
                proxy = new RepositoryProxy(partitionerCustomNode);
                partitioningMeta.getPartitioner().saveRep(proxy, null, null);
                stepNode.addNode(partitionerCustomNode);
            }
        }
        // Save the clustering information as well...
        // 
        stepNode.setProperty(PROP_CLUSTER_SCHEMA, step.getClusterSchema() == null ? "" : // $NON-NLS-1$
        step.getClusterSchema().getName());
        // Save the error hop metadata
        // 
        StepErrorMeta stepErrorMeta = step.getStepErrorMeta();
        if (stepErrorMeta != null) {
            stepNode.setProperty(PROP_STEP_ERROR_HANDLING_SOURCE_STEP, stepErrorMeta.getSourceStep() != null ? stepErrorMeta.getSourceStep().getName() : // $NON-NLS-1$
            "");
            stepNode.setProperty(PROP_STEP_ERROR_HANDLING_TARGET_STEP, stepErrorMeta.getTargetStep() != null ? stepErrorMeta.getTargetStep().getName() : // $NON-NLS-1$
            "");
            stepNode.setProperty(PROP_STEP_ERROR_HANDLING_IS_ENABLED, stepErrorMeta.isEnabled());
            stepNode.setProperty(PROP_STEP_ERROR_HANDLING_NR_VALUENAME, stepErrorMeta.getNrErrorsValuename());
            stepNode.setProperty(PROP_STEP_ERROR_HANDLING_DESCRIPTIONS_VALUENAME, stepErrorMeta.getErrorDescriptionsValuename());
            stepNode.setProperty(PROP_STEP_ERROR_HANDLING_FIELDS_VALUENAME, stepErrorMeta.getErrorFieldsValuename());
            stepNode.setProperty(PROP_STEP_ERROR_HANDLING_CODES_VALUENAME, stepErrorMeta.getErrorCodesValuename());
            stepNode.setProperty(PROP_STEP_ERROR_HANDLING_MAX_ERRORS, stepErrorMeta.getMaxErrors());
            stepNode.setProperty(PROP_STEP_ERROR_HANDLING_MAX_PCT_ERRORS, stepErrorMeta.getMaxPercentErrors());
            stepNode.setProperty(PROP_STEP_ERROR_HANDLING_MIN_PCT_ROWS, stepErrorMeta.getMinPercentRows());
        }
    }
    // Save the notes
    // 
    DataNode notesNode = rootNode.addNode(NODE_NOTES);
    notesNode.setProperty(PROP_NR_NOTES, transMeta.nrNotes());
    for (int i = 0; i < transMeta.nrNotes(); i++) {
        NotePadMeta note = transMeta.getNote(i);
        DataNode noteNode = notesNode.addNode(NOTE_PREFIX + i);
        noteNode.setProperty(PROP_XML, note.getXML());
    }
    // Finally, save the hops
    // 
    DataNode hopsNode = rootNode.addNode(NODE_HOPS);
    hopsNode.setProperty(PROP_NR_HOPS, transMeta.nrTransHops());
    for (int i = 0; i < transMeta.nrTransHops(); i++) {
        TransHopMeta hop = transMeta.getTransHop(i);
        DataNode hopNode = hopsNode.addNode(TRANS_HOP_PREFIX + i);
        hopNode.setProperty(TRANS_HOP_FROM, hop.getFromStep().getName());
        hopNode.setProperty(TRANS_HOP_TO, hop.getToStep().getName());
        hopNode.setProperty(TRANS_HOP_ENABLED, hop.isEnabled());
    }
    // Parameters
    // 
    String[] paramKeys = transMeta.listParameters();
    DataNode paramsNode = rootNode.addNode(NODE_PARAMETERS);
    paramsNode.setProperty(PROP_NR_PARAMETERS, paramKeys == null ? 0 : paramKeys.length);
    for (int idx = 0; idx < paramKeys.length; idx++) {
        DataNode paramNode = paramsNode.addNode(TRANS_PARAM_PREFIX + idx);
        String key = paramKeys[idx];
        String description = transMeta.getParameterDescription(paramKeys[idx]);
        String defaultValue = transMeta.getParameterDefault(paramKeys[idx]);
        // $NON-NLS-1$
        paramNode.setProperty(PARAM_KEY, key != null ? key : "");
        // $NON-NLS-1$
        paramNode.setProperty(PARAM_DEFAULT, defaultValue != null ? defaultValue : "");
        // $NON-NLS-1$
        paramNode.setProperty(PARAM_DESC, description != null ? description : "");
    }
    // Let's not forget to save the details of the transformation itself.
    // This includes logging information, parameters, etc.
    // 
    saveTransformationDetails(rootNode, transMeta);
    return rootNode;
}
Also used : DataNodeRef(org.pentaho.platform.api.repository2.unified.data.node.DataNodeRef) TransMeta(org.pentaho.di.trans.TransMeta) StepMetaInterface(org.pentaho.di.trans.step.StepMetaInterface) StepErrorMeta(org.pentaho.di.trans.step.StepErrorMeta) StepPartitioningMeta(org.pentaho.di.trans.step.StepPartitioningMeta) StepMeta(org.pentaho.di.trans.step.StepMeta) Repository(org.pentaho.di.repository.Repository) IUnifiedRepository(org.pentaho.platform.api.repository2.unified.IUnifiedRepository) DataNode(org.pentaho.platform.api.repository2.unified.data.node.DataNode) NotePadMeta(org.pentaho.di.core.NotePadMeta) TransHopMeta(org.pentaho.di.trans.TransHopMeta)

Example 13 with StepPartitioningMeta

use of org.pentaho.di.trans.step.StepPartitioningMeta in project pentaho-kettle by pentaho.

the class TransDelegate method dataNodeToElement.

public void dataNodeToElement(final DataNode rootNode, final RepositoryElementInterface element) throws KettleException {
    TransMeta transMeta = (TransMeta) element;
    Set<String> privateDatabases = null;
    // read the private databases
    DataNode privateDbsNode = rootNode.getNode(NODE_TRANS_PRIVATE_DATABASES);
    // BACKLOG-6635
    if (privateDbsNode != null) {
        privateDatabases = new HashSet<String>();
        if (privateDbsNode.hasProperty(PROP_TRANS_PRIVATE_DATABASE_NAMES)) {
            for (String privateDatabaseName : getString(privateDbsNode, PROP_TRANS_PRIVATE_DATABASE_NAMES).split(TRANS_PRIVATE_DATABASE_DELIMITER)) {
                if (!privateDatabaseName.isEmpty()) {
                    privateDatabases.add(privateDatabaseName);
                }
            }
        } else {
            for (DataNode privateDatabase : privateDbsNode.getNodes()) {
                privateDatabases.add(privateDatabase.getName());
            }
        }
    }
    transMeta.setPrivateDatabases(privateDatabases);
    // read the steps...
    // 
    DataNode stepsNode = rootNode.getNode(NODE_STEPS);
    for (DataNode stepNode : stepsNode.getNodes()) {
        StepMeta stepMeta = new StepMeta(new StringObjectId(stepNode.getId().toString()));
        // for tracing, retain hierarchy
        stepMeta.setParentTransMeta(transMeta);
        // Read the basics
        // 
        stepMeta.setName(getString(stepNode, PROP_NAME));
        if (stepNode.hasProperty(PROP_DESCRIPTION)) {
            stepMeta.setDescription(getString(stepNode, PROP_DESCRIPTION));
        }
        stepMeta.setDistributes(stepNode.getProperty(PROP_STEP_DISTRIBUTE).getBoolean());
        DataProperty rowDistributionProperty = stepNode.getProperty(PROP_STEP_ROW_DISTRIBUTION);
        String rowDistributionCode = rowDistributionProperty == null ? null : rowDistributionProperty.getString();
        RowDistributionInterface rowDistribution = PluginRegistry.getInstance().loadClass(RowDistributionPluginType.class, rowDistributionCode, RowDistributionInterface.class);
        stepMeta.setRowDistribution(rowDistribution);
        stepMeta.setDraw(stepNode.getProperty(PROP_STEP_GUI_DRAW).getBoolean());
        int copies = (int) stepNode.getProperty(PROP_STEP_COPIES).getLong();
        String copiesString = stepNode.getProperty(PROP_STEP_COPIES_STRING) != null ? stepNode.getProperty(PROP_STEP_COPIES_STRING).getString() : StringUtils.EMPTY;
        if (!Utils.isEmpty(copiesString)) {
            stepMeta.setCopiesString(copiesString);
        } else {
            // for backward compatibility
            stepMeta.setCopies(copies);
        }
        int x = (int) stepNode.getProperty(PROP_STEP_GUI_LOCATION_X).getLong();
        int y = (int) stepNode.getProperty(PROP_STEP_GUI_LOCATION_Y).getLong();
        stepMeta.setLocation(x, y);
        // Load the group attributes map
        // 
        AttributesMapUtil.loadAttributesMap(stepNode, stepMeta);
        String stepType = getString(stepNode, PROP_STEP_TYPE);
        // Create a new StepMetaInterface object...
        // 
        PluginRegistry registry = PluginRegistry.getInstance();
        PluginInterface stepPlugin = registry.findPluginWithId(StepPluginType.class, stepType);
        StepMetaInterface stepMetaInterface = null;
        if (stepPlugin != null) {
            stepMetaInterface = (StepMetaInterface) registry.loadClass(stepPlugin);
            // revert to the default in case we loaded an alternate version
            stepType = stepPlugin.getIds()[0];
        } else {
            stepMeta.setStepMetaInterface((StepMetaInterface) new MissingTrans(stepMeta.getName(), stepType));
            transMeta.addMissingTrans((MissingTrans) stepMeta.getStepMetaInterface());
        }
        stepMeta.setStepID(stepType);
        // Read the metadata from the repository too...
        // 
        RepositoryProxy proxy = new RepositoryProxy(stepNode.getNode(NODE_STEP_CUSTOM));
        if (!stepMeta.isMissing()) {
            readRepCompatibleStepMeta(stepMetaInterface, proxy, null, transMeta.getDatabases());
            stepMetaInterface.readRep(proxy, transMeta.getMetaStore(), null, transMeta.getDatabases());
            stepMeta.setStepMetaInterface(stepMetaInterface);
        }
        // Get the partitioning as well...
        StepPartitioningMeta stepPartitioningMeta = new StepPartitioningMeta();
        if (stepNode.hasProperty(PROP_PARTITIONING_SCHEMA)) {
            String partSchemaId = stepNode.getProperty(PROP_PARTITIONING_SCHEMA).getRef().getId().toString();
            String schemaName = repo.loadPartitionSchema(new StringObjectId(partSchemaId), null).getName();
            stepPartitioningMeta.setPartitionSchemaName(schemaName);
            String methodCode = getString(stepNode, PROP_PARTITIONING_METHOD);
            stepPartitioningMeta.setMethod(StepPartitioningMeta.getMethod(methodCode));
            if (stepPartitioningMeta.getPartitioner() != null) {
                proxy = new RepositoryProxy(stepNode.getNode(NODE_PARTITIONER_CUSTOM));
                stepPartitioningMeta.getPartitioner().loadRep(proxy, null);
            }
            stepPartitioningMeta.hasChanged(true);
        }
        stepMeta.setStepPartitioningMeta(stepPartitioningMeta);
        stepMeta.getStepPartitioningMeta().setPartitionSchemaAfterLoading(transMeta.getPartitionSchemas());
        // Get the cluster schema name
        String clusterSchemaName = getString(stepNode, PROP_CLUSTER_SCHEMA);
        stepMeta.setClusterSchemaName(clusterSchemaName);
        if (clusterSchemaName != null && transMeta.getClusterSchemas() != null) {
            // Get the cluster schema from the given name
            for (ClusterSchema clusterSchema : transMeta.getClusterSchemas()) {
                if (clusterSchema.getName().equals(clusterSchemaName)) {
                    stepMeta.setClusterSchema(clusterSchema);
                    break;
                }
            }
        }
        transMeta.addStep(stepMeta);
    }
    for (DataNode stepNode : stepsNode.getNodes()) {
        ObjectId stepObjectId = new StringObjectId(stepNode.getId().toString());
        StepMeta stepMeta = StepMeta.findStep(transMeta.getSteps(), stepObjectId);
        // 
        if (stepNode.hasProperty(PROP_STEP_ERROR_HANDLING_SOURCE_STEP)) {
            StepErrorMeta meta = new StepErrorMeta(transMeta, stepMeta);
            meta.setTargetStep(StepMeta.findStep(transMeta.getSteps(), stepNode.getProperty(PROP_STEP_ERROR_HANDLING_TARGET_STEP).getString()));
            meta.setEnabled(stepNode.getProperty(PROP_STEP_ERROR_HANDLING_IS_ENABLED).getBoolean());
            meta.setNrErrorsValuename(getString(stepNode, PROP_STEP_ERROR_HANDLING_NR_VALUENAME));
            meta.setErrorDescriptionsValuename(getString(stepNode, PROP_STEP_ERROR_HANDLING_DESCRIPTIONS_VALUENAME));
            meta.setErrorFieldsValuename(getString(stepNode, PROP_STEP_ERROR_HANDLING_FIELDS_VALUENAME));
            meta.setErrorCodesValuename(getString(stepNode, PROP_STEP_ERROR_HANDLING_CODES_VALUENAME));
            meta.setMaxErrors(getString(stepNode, PROP_STEP_ERROR_HANDLING_MAX_ERRORS));
            meta.setMaxPercentErrors(getString(stepNode, PROP_STEP_ERROR_HANDLING_MAX_PCT_ERRORS));
            meta.setMinPercentRows(getString(stepNode, PROP_STEP_ERROR_HANDLING_MIN_PCT_ROWS));
            // a bit of a trick, I know.
            meta.getSourceStep().setStepErrorMeta(meta);
        }
    }
    // 
    for (int i = 0; i < transMeta.nrSteps(); i++) {
        StepMeta stepMeta = transMeta.getStep(i);
        StepMetaInterface sii = stepMeta.getStepMetaInterface();
        if (sii != null) {
            sii.searchInfoAndTargetSteps(transMeta.getSteps());
        }
    }
    // Read the notes...
    // 
    DataNode notesNode = rootNode.getNode(NODE_NOTES);
    int nrNotes = (int) notesNode.getProperty(PROP_NR_NOTES).getLong();
    for (DataNode noteNode : notesNode.getNodes()) {
        String xml = getString(noteNode, PROP_XML);
        transMeta.addNote(new NotePadMeta(XMLHandler.getSubNode(XMLHandler.loadXMLString(xml), NotePadMeta.XML_TAG)));
    }
    if (transMeta.nrNotes() != nrNotes) {
        throw new KettleException("The number of notes read [" + transMeta.nrNotes() + "] was not the number we expected [" + nrNotes + "]");
    }
    // Read the hops...
    // 
    DataNode hopsNode = rootNode.getNode(NODE_HOPS);
    int nrHops = (int) hopsNode.getProperty(PROP_NR_HOPS).getLong();
    for (DataNode hopNode : hopsNode.getNodes()) {
        String stepFromName = getString(hopNode, TRANS_HOP_FROM);
        String stepToName = getString(hopNode, TRANS_HOP_TO);
        boolean enabled = true;
        if (hopNode.hasProperty(TRANS_HOP_ENABLED)) {
            enabled = hopNode.getProperty(TRANS_HOP_ENABLED).getBoolean();
        }
        StepMeta stepFrom = StepMeta.findStep(transMeta.getSteps(), stepFromName);
        StepMeta stepTo = StepMeta.findStep(transMeta.getSteps(), stepToName);
        // 
        if (stepFrom != null && stepTo != null) {
            transMeta.addTransHop(new TransHopMeta(stepFrom, stepTo, enabled));
        }
    }
    if (transMeta.nrTransHops() != nrHops) {
        throw new KettleException("The number of hops read [" + transMeta.nrTransHops() + "] was not the number we expected [" + nrHops + "]");
    }
    // Load the details at the end, to make sure we reference the databases correctly, etc.
    // 
    loadTransformationDetails(rootNode, transMeta);
    transMeta.eraseParameters();
    DataNode paramsNode = rootNode.getNode(NODE_PARAMETERS);
    int count = (int) paramsNode.getProperty(PROP_NR_PARAMETERS).getLong();
    for (int idx = 0; idx < count; idx++) {
        DataNode paramNode = paramsNode.getNode(TRANS_PARAM_PREFIX + idx);
        String key = getString(paramNode, PARAM_KEY);
        String def = getString(paramNode, PARAM_DEFAULT);
        String desc = getString(paramNode, PARAM_DESC);
        transMeta.addParameterDefinition(key, def, desc);
    }
    transMeta.activateParameters();
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) StringObjectId(org.pentaho.di.repository.StringObjectId) ObjectId(org.pentaho.di.repository.ObjectId) PluginInterface(org.pentaho.di.core.plugins.PluginInterface) TransMeta(org.pentaho.di.trans.TransMeta) StepMetaInterface(org.pentaho.di.trans.step.StepMetaInterface) StepErrorMeta(org.pentaho.di.trans.step.StepErrorMeta) DataProperty(org.pentaho.platform.api.repository2.unified.data.node.DataProperty) StepPartitioningMeta(org.pentaho.di.trans.step.StepPartitioningMeta) StepMeta(org.pentaho.di.trans.step.StepMeta) StringObjectId(org.pentaho.di.repository.StringObjectId) DataNode(org.pentaho.platform.api.repository2.unified.data.node.DataNode) PluginRegistry(org.pentaho.di.core.plugins.PluginRegistry) RowDistributionInterface(org.pentaho.di.trans.step.RowDistributionInterface) MissingTrans(org.pentaho.di.trans.steps.missing.MissingTrans) NotePadMeta(org.pentaho.di.core.NotePadMeta) TransHopMeta(org.pentaho.di.trans.TransHopMeta) ClusterSchema(org.pentaho.di.cluster.ClusterSchema)

Example 14 with StepPartitioningMeta

use of org.pentaho.di.trans.step.StepPartitioningMeta in project pentaho-kettle by pentaho.

the class TransPartitioningTest method prepareStepMetas_x2_cl1.

/**
 * This is a case when first step running 2 copies and next is partitioned one.
 *
 * @throws KettlePluginException
 */
private void prepareStepMetas_x2_cl1() throws KettlePluginException {
    StepMeta dummy1 = new StepMeta(ONE, null);
    StepMeta dummy2 = new StepMeta(TWO, null);
    PartitionSchema schema1 = new PartitionSchema("p1", Arrays.asList(new String[] { PID1, PID2 }));
    StepPartitioningMeta partMeta1 = new StepPartitioningMeta("Mirror to all partitions", schema1);
    dummy2.setStepPartitioningMeta(partMeta1);
    dummy1.setCopies(2);
    chain.add(dummy1);
    chain.add(dummy2);
    for (StepMeta item : chain) {
        item.setStepMetaInterface(new DummyTransMeta());
    }
}
Also used : PartitionSchema(org.pentaho.di.partition.PartitionSchema) StepPartitioningMeta(org.pentaho.di.trans.step.StepPartitioningMeta) StepMeta(org.pentaho.di.trans.step.StepMeta) DummyTransMeta(org.pentaho.di.trans.steps.dummytrans.DummyTransMeta)

Example 15 with StepPartitioningMeta

use of org.pentaho.di.trans.step.StepPartitioningMeta in project pentaho-kettle by pentaho.

the class TransPartitioningTest method prepareStepMetas_1_cl1.

/**
 * This is a case when we have 1 step to 1 clustered step distribution.
 *
 * @throws KettlePluginException
 */
private void prepareStepMetas_1_cl1() throws KettlePluginException {
    StepMeta dummy1 = new StepMeta(ONE, null);
    StepMeta dummy2 = new StepMeta(TWO, null);
    PartitionSchema schema = new PartitionSchema("p1", Arrays.asList(new String[] { PID1, PID2 }));
    StepPartitioningMeta partMeta = new StepPartitioningMeta("Mirror to all partitions", schema);
    dummy2.setStepPartitioningMeta(partMeta);
    chain.add(dummy1);
    chain.add(dummy2);
    for (StepMeta item : chain) {
        item.setStepMetaInterface(new DummyTransMeta());
    }
}
Also used : PartitionSchema(org.pentaho.di.partition.PartitionSchema) StepPartitioningMeta(org.pentaho.di.trans.step.StepPartitioningMeta) StepMeta(org.pentaho.di.trans.step.StepMeta) DummyTransMeta(org.pentaho.di.trans.steps.dummytrans.DummyTransMeta)

Aggregations

StepPartitioningMeta (org.pentaho.di.trans.step.StepPartitioningMeta)23 PartitionSchema (org.pentaho.di.partition.PartitionSchema)18 StepMeta (org.pentaho.di.trans.step.StepMeta)14 KettleException (org.pentaho.di.core.exception.KettleException)8 TransMeta (org.pentaho.di.trans.TransMeta)7 Test (org.junit.Test)6 StepMetaInterface (org.pentaho.di.trans.step.StepMetaInterface)5 DummyTransMeta (org.pentaho.di.trans.steps.dummytrans.DummyTransMeta)5 ArrayList (java.util.ArrayList)4 LinkedList (java.util.LinkedList)4 List (java.util.List)4 ClusterSchema (org.pentaho.di.cluster.ClusterSchema)4 NotePadMeta (org.pentaho.di.core.NotePadMeta)4 TransHopMeta (org.pentaho.di.trans.TransHopMeta)4 StepErrorMeta (org.pentaho.di.trans.step.StepErrorMeta)4 SlaveServer (org.pentaho.di.cluster.SlaveServer)3 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)3 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)3 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)3 HashMap (java.util.HashMap)2