use of org.pentaho.di.trans.TransHopMeta in project pentaho-kettle by pentaho.
the class TransDelegate method dataNodeToElement.
public void dataNodeToElement(final DataNode rootNode, final RepositoryElementInterface element) throws KettleException {
TransMeta transMeta = (TransMeta) element;
Set<String> privateDatabases = null;
// read the private databases
DataNode privateDbsNode = rootNode.getNode(NODE_TRANS_PRIVATE_DATABASES);
// BACKLOG-6635
if (privateDbsNode != null) {
privateDatabases = new HashSet<String>();
if (privateDbsNode.hasProperty(PROP_TRANS_PRIVATE_DATABASE_NAMES)) {
for (String privateDatabaseName : getString(privateDbsNode, PROP_TRANS_PRIVATE_DATABASE_NAMES).split(TRANS_PRIVATE_DATABASE_DELIMITER)) {
if (!privateDatabaseName.isEmpty()) {
privateDatabases.add(privateDatabaseName);
}
}
} else {
for (DataNode privateDatabase : privateDbsNode.getNodes()) {
privateDatabases.add(privateDatabase.getName());
}
}
}
transMeta.setPrivateDatabases(privateDatabases);
// read the steps...
//
DataNode stepsNode = rootNode.getNode(NODE_STEPS);
for (DataNode stepNode : stepsNode.getNodes()) {
StepMeta stepMeta = new StepMeta(new StringObjectId(stepNode.getId().toString()));
// for tracing, retain hierarchy
stepMeta.setParentTransMeta(transMeta);
// Read the basics
//
stepMeta.setName(getString(stepNode, PROP_NAME));
if (stepNode.hasProperty(PROP_DESCRIPTION)) {
stepMeta.setDescription(getString(stepNode, PROP_DESCRIPTION));
}
stepMeta.setDistributes(stepNode.getProperty(PROP_STEP_DISTRIBUTE).getBoolean());
DataProperty rowDistributionProperty = stepNode.getProperty(PROP_STEP_ROW_DISTRIBUTION);
String rowDistributionCode = rowDistributionProperty == null ? null : rowDistributionProperty.getString();
RowDistributionInterface rowDistribution = PluginRegistry.getInstance().loadClass(RowDistributionPluginType.class, rowDistributionCode, RowDistributionInterface.class);
stepMeta.setRowDistribution(rowDistribution);
stepMeta.setDraw(stepNode.getProperty(PROP_STEP_GUI_DRAW).getBoolean());
int copies = (int) stepNode.getProperty(PROP_STEP_COPIES).getLong();
String copiesString = stepNode.getProperty(PROP_STEP_COPIES_STRING) != null ? stepNode.getProperty(PROP_STEP_COPIES_STRING).getString() : StringUtils.EMPTY;
if (!Utils.isEmpty(copiesString)) {
stepMeta.setCopiesString(copiesString);
} else {
// for backward compatibility
stepMeta.setCopies(copies);
}
int x = (int) stepNode.getProperty(PROP_STEP_GUI_LOCATION_X).getLong();
int y = (int) stepNode.getProperty(PROP_STEP_GUI_LOCATION_Y).getLong();
stepMeta.setLocation(x, y);
// Load the group attributes map
//
AttributesMapUtil.loadAttributesMap(stepNode, stepMeta);
String stepType = getString(stepNode, PROP_STEP_TYPE);
// Create a new StepMetaInterface object...
//
PluginRegistry registry = PluginRegistry.getInstance();
PluginInterface stepPlugin = registry.findPluginWithId(StepPluginType.class, stepType);
StepMetaInterface stepMetaInterface = null;
if (stepPlugin != null) {
stepMetaInterface = (StepMetaInterface) registry.loadClass(stepPlugin);
// revert to the default in case we loaded an alternate version
stepType = stepPlugin.getIds()[0];
} else {
stepMeta.setStepMetaInterface((StepMetaInterface) new MissingTrans(stepMeta.getName(), stepType));
transMeta.addMissingTrans((MissingTrans) stepMeta.getStepMetaInterface());
}
stepMeta.setStepID(stepType);
// Read the metadata from the repository too...
//
RepositoryProxy proxy = new RepositoryProxy(stepNode.getNode(NODE_STEP_CUSTOM));
if (!stepMeta.isMissing()) {
readRepCompatibleStepMeta(stepMetaInterface, proxy, null, transMeta.getDatabases());
stepMetaInterface.readRep(proxy, transMeta.getMetaStore(), null, transMeta.getDatabases());
stepMeta.setStepMetaInterface(stepMetaInterface);
}
// Get the partitioning as well...
StepPartitioningMeta stepPartitioningMeta = new StepPartitioningMeta();
if (stepNode.hasProperty(PROP_PARTITIONING_SCHEMA)) {
String partSchemaId = stepNode.getProperty(PROP_PARTITIONING_SCHEMA).getRef().getId().toString();
String schemaName = repo.loadPartitionSchema(new StringObjectId(partSchemaId), null).getName();
stepPartitioningMeta.setPartitionSchemaName(schemaName);
String methodCode = getString(stepNode, PROP_PARTITIONING_METHOD);
stepPartitioningMeta.setMethod(StepPartitioningMeta.getMethod(methodCode));
if (stepPartitioningMeta.getPartitioner() != null) {
proxy = new RepositoryProxy(stepNode.getNode(NODE_PARTITIONER_CUSTOM));
stepPartitioningMeta.getPartitioner().loadRep(proxy, null);
}
stepPartitioningMeta.hasChanged(true);
}
stepMeta.setStepPartitioningMeta(stepPartitioningMeta);
stepMeta.getStepPartitioningMeta().setPartitionSchemaAfterLoading(transMeta.getPartitionSchemas());
// Get the cluster schema name
String clusterSchemaName = getString(stepNode, PROP_CLUSTER_SCHEMA);
stepMeta.setClusterSchemaName(clusterSchemaName);
if (clusterSchemaName != null && transMeta.getClusterSchemas() != null) {
// Get the cluster schema from the given name
for (ClusterSchema clusterSchema : transMeta.getClusterSchemas()) {
if (clusterSchema.getName().equals(clusterSchemaName)) {
stepMeta.setClusterSchema(clusterSchema);
break;
}
}
}
transMeta.addStep(stepMeta);
}
for (DataNode stepNode : stepsNode.getNodes()) {
ObjectId stepObjectId = new StringObjectId(stepNode.getId().toString());
StepMeta stepMeta = StepMeta.findStep(transMeta.getSteps(), stepObjectId);
//
if (stepNode.hasProperty(PROP_STEP_ERROR_HANDLING_SOURCE_STEP)) {
StepErrorMeta meta = new StepErrorMeta(transMeta, stepMeta);
meta.setTargetStep(StepMeta.findStep(transMeta.getSteps(), stepNode.getProperty(PROP_STEP_ERROR_HANDLING_TARGET_STEP).getString()));
meta.setEnabled(stepNode.getProperty(PROP_STEP_ERROR_HANDLING_IS_ENABLED).getBoolean());
meta.setNrErrorsValuename(getString(stepNode, PROP_STEP_ERROR_HANDLING_NR_VALUENAME));
meta.setErrorDescriptionsValuename(getString(stepNode, PROP_STEP_ERROR_HANDLING_DESCRIPTIONS_VALUENAME));
meta.setErrorFieldsValuename(getString(stepNode, PROP_STEP_ERROR_HANDLING_FIELDS_VALUENAME));
meta.setErrorCodesValuename(getString(stepNode, PROP_STEP_ERROR_HANDLING_CODES_VALUENAME));
meta.setMaxErrors(getString(stepNode, PROP_STEP_ERROR_HANDLING_MAX_ERRORS));
meta.setMaxPercentErrors(getString(stepNode, PROP_STEP_ERROR_HANDLING_MAX_PCT_ERRORS));
meta.setMinPercentRows(getString(stepNode, PROP_STEP_ERROR_HANDLING_MIN_PCT_ROWS));
// a bit of a trick, I know.
meta.getSourceStep().setStepErrorMeta(meta);
}
}
//
for (int i = 0; i < transMeta.nrSteps(); i++) {
StepMeta stepMeta = transMeta.getStep(i);
StepMetaInterface sii = stepMeta.getStepMetaInterface();
if (sii != null) {
sii.searchInfoAndTargetSteps(transMeta.getSteps());
}
}
// Read the notes...
//
DataNode notesNode = rootNode.getNode(NODE_NOTES);
int nrNotes = (int) notesNode.getProperty(PROP_NR_NOTES).getLong();
for (DataNode noteNode : notesNode.getNodes()) {
String xml = getString(noteNode, PROP_XML);
transMeta.addNote(new NotePadMeta(XMLHandler.getSubNode(XMLHandler.loadXMLString(xml), NotePadMeta.XML_TAG)));
}
if (transMeta.nrNotes() != nrNotes) {
throw new KettleException("The number of notes read [" + transMeta.nrNotes() + "] was not the number we expected [" + nrNotes + "]");
}
// Read the hops...
//
DataNode hopsNode = rootNode.getNode(NODE_HOPS);
int nrHops = (int) hopsNode.getProperty(PROP_NR_HOPS).getLong();
for (DataNode hopNode : hopsNode.getNodes()) {
String stepFromName = getString(hopNode, TRANS_HOP_FROM);
String stepToName = getString(hopNode, TRANS_HOP_TO);
boolean enabled = true;
if (hopNode.hasProperty(TRANS_HOP_ENABLED)) {
enabled = hopNode.getProperty(TRANS_HOP_ENABLED).getBoolean();
}
StepMeta stepFrom = StepMeta.findStep(transMeta.getSteps(), stepFromName);
StepMeta stepTo = StepMeta.findStep(transMeta.getSteps(), stepToName);
//
if (stepFrom != null && stepTo != null) {
transMeta.addTransHop(new TransHopMeta(stepFrom, stepTo, enabled));
}
}
if (transMeta.nrTransHops() != nrHops) {
throw new KettleException("The number of hops read [" + transMeta.nrTransHops() + "] was not the number we expected [" + nrHops + "]");
}
// Load the details at the end, to make sure we reference the databases correctly, etc.
//
loadTransformationDetails(rootNode, transMeta);
transMeta.eraseParameters();
DataNode paramsNode = rootNode.getNode(NODE_PARAMETERS);
int count = (int) paramsNode.getProperty(PROP_NR_PARAMETERS).getLong();
for (int idx = 0; idx < count; idx++) {
DataNode paramNode = paramsNode.getNode(TRANS_PARAM_PREFIX + idx);
String key = getString(paramNode, PARAM_KEY);
String def = getString(paramNode, PARAM_DEFAULT);
String desc = getString(paramNode, PARAM_DESC);
transMeta.addParameterDefinition(key, def, desc);
}
transMeta.activateParameters();
}
use of org.pentaho.di.trans.TransHopMeta in project pentaho-kettle by pentaho.
the class TransMetaConverterTest method errorHops.
@Test
public void errorHops() throws Exception {
TransMeta meta = new TransMeta();
meta.setFilename("fileName");
StepMeta from = new StepMeta("step1", stepMetaInterface);
meta.addStep(from);
StepMeta to = new StepMeta("step2", stepMetaInterface);
meta.addStep(to);
meta.addTransHop(new TransHopMeta(from, to));
StepMeta error = new StepMeta("errorHandler", stepMetaInterface);
meta.addStep(error);
TransHopMeta errorHop = new TransHopMeta(from, error);
errorHop.setErrorHop(true);
meta.addTransHop(errorHop);
Transformation trans = TransMetaConverter.convert(meta);
Map<String, List<Hop>> hops = trans.getHops().stream().collect(Collectors.groupingBy(Hop::getType));
List<Hop> normalHops = hops.get(Hop.TYPE_NORMAL);
assertThat(normalHops.size(), is(1));
assertThat(normalHops.get(0).getTo().getId(), is("step2"));
List<Hop> errorHops = hops.get(Hop.TYPE_ERROR);
assertThat(errorHops.size(), is(1));
assertThat(errorHops.get(0).getTo().getId(), is("errorHandler"));
assertThat(hops.values().stream().flatMap(List::stream).map(Hop::getFrom).map(Operation::getId).collect(Collectors.toList()), everyItem(equalTo("step1")));
}
use of org.pentaho.di.trans.TransHopMeta in project pentaho-kettle by pentaho.
the class TransMetaConverterTest method testDisabledHops.
@Test
public void testDisabledHops() {
TransMeta trans = new TransMeta();
StepMeta start = new StepMeta("Start", stepMetaInterface);
trans.addStep(start);
StepMeta withEnabledHop = new StepMeta("WithEnabledHop", stepMetaInterface);
trans.addStep(withEnabledHop);
StepMeta withDisabledHop = new StepMeta("WithDisabledHop", stepMetaInterface);
trans.addStep(withDisabledHop);
StepMeta shouldStay = new StepMeta("ShouldStay", stepMetaInterface);
trans.addStep(shouldStay);
StepMeta shouldNotStay = new StepMeta("ShouldNotStay", stepMetaInterface);
trans.addStep(shouldNotStay);
StepMeta withEnabledAndDisabledHops = new StepMeta("WithEnabledAndDisabledHops", stepMetaInterface);
trans.addStep(withEnabledAndDisabledHops);
StepMeta afterEnabledDisabled = new StepMeta("AfterEnabledDisabled", stepMetaInterface);
trans.addStep(afterEnabledDisabled);
trans.addTransHop(new TransHopMeta(start, withEnabledHop));
trans.addTransHop(new TransHopMeta(start, withDisabledHop, false));
trans.addTransHop(new TransHopMeta(withEnabledHop, shouldStay));
trans.addTransHop(new TransHopMeta(withDisabledHop, shouldStay));
trans.addTransHop(new TransHopMeta(withDisabledHop, shouldNotStay));
trans.addTransHop(new TransHopMeta(start, withEnabledAndDisabledHops));
trans.addTransHop(new TransHopMeta(withEnabledHop, withEnabledAndDisabledHops, false));
trans.addTransHop(new TransHopMeta(withEnabledAndDisabledHops, afterEnabledDisabled));
Transformation transformation = TransMetaConverter.convert(trans);
List<String> steps = transformation.getOperations().stream().map(op -> op.getId()).collect(Collectors.toList());
assertThat("Only 5 ops should exist", steps.size(), is(5));
assertThat(steps, hasItems("Start", "WithEnabledHop", "ShouldStay", "WithEnabledAndDisabledHops", "AfterEnabledDisabled"));
List<String> hops = transformation.getHops().stream().map(hop -> hop.getId()).collect(Collectors.toList());
assertThat("Only 4 hops should exist", hops.size(), is(4));
assertThat(hops, hasItems("Start -> WithEnabledHop", "WithEnabledHop -> ShouldStay", "Start -> WithEnabledAndDisabledHops", "WithEnabledAndDisabledHops -> AfterEnabledDisabled"));
}
use of org.pentaho.di.trans.TransHopMeta in project pentaho-kettle by pentaho.
the class CheckSumTest method buildHexadecimalChecksumTrans.
private Trans buildHexadecimalChecksumTrans(int checkSumType, boolean compatibilityMode, boolean oldChecksumBehaviour) throws Exception {
// Create a new transformation...
TransMeta transMeta = new TransMeta();
transMeta.setName(getClass().getName());
// Create a CheckSum Step
String checkSumStepname = "CheckSum";
CheckSumMeta meta = new CheckSumMeta();
// Set the compatibility mode and other required fields
meta.setCompatibilityMode(compatibilityMode);
meta.setResultFieldName("hex");
meta.setCheckSumType(checkSumType);
meta.setResultType(CheckSumMeta.result_TYPE_HEXADECIMAL);
meta.setFieldName(new String[] { "test" });
meta.setOldChecksumBehaviour(oldChecksumBehaviour);
String checkSumPluginPid = PluginRegistry.getInstance().getPluginId(StepPluginType.class, meta);
StepMeta checkSumStep = new StepMeta(checkSumPluginPid, checkSumStepname, meta);
transMeta.addStep(checkSumStep);
// Create a Dummy step
String dummyStepname = "Output";
DummyTransMeta dummyMeta = new DummyTransMeta();
String dummyStepPid = PluginRegistry.getInstance().getPluginId(StepPluginType.class, dummyMeta);
StepMeta dummyStep = new StepMeta(dummyStepPid, dummyStepname, dummyMeta);
transMeta.addStep(dummyStep);
// Create a hop from CheckSum to Output
TransHopMeta hop = new TransHopMeta(checkSumStep, dummyStep);
transMeta.addTransHop(hop);
return new Trans(transMeta);
}
use of org.pentaho.di.trans.TransHopMeta in project pentaho-kettle by pentaho.
the class TableInputIT method testTableInputWithParam.
/**
* Test case for table input which is taking its input from a hop. This is a regression test case for JIRA PDI-588.
*
* The query in the table input step has one '?' and this parameter is filled by values read from an input hop.
*/
public void testTableInputWithParam() throws Exception {
KettleEnvironment.init();
//
// Create a new transformation...
//
TransMeta transMeta = new TransMeta();
transMeta.setName("transname");
// Add the database connections
for (int i = 0; i < databasesXML.length; i++) {
DatabaseMeta databaseMeta = new DatabaseMeta(databasesXML[i]);
transMeta.addDatabase(databaseMeta);
}
DatabaseMeta dbInfo = transMeta.findDatabase("db");
// Execute our setup SQLs in the database.
Database database = new Database(transMeta, dbInfo);
database.connect();
createTables(database);
createData(database);
PluginRegistry registry = PluginRegistry.getInstance();
//
// create an injector step...
//
String injectorStepname = "injector step";
InjectorMeta im = new InjectorMeta();
// Set the information of the injector.
String injectorPid = registry.getPluginId(StepPluginType.class, im);
StepMeta injectorStep = new StepMeta(injectorPid, injectorStepname, im);
transMeta.addStep(injectorStep);
//
// create the source step...
//
String fromstepname = "read from [" + source_table + "]";
TableInputMeta tii = new TableInputMeta();
tii.setDatabaseMeta(transMeta.findDatabase("db"));
tii.setLookupFromStep(injectorStep);
tii.setExecuteEachInputRow(true);
String selectSQL = "SELECT " + Const.CR;
selectSQL += "ID, CODE ";
selectSQL += "FROM " + source_table + " WHERE CODE = ? ORDER BY ID, CODE;";
tii.setSQL(selectSQL);
String fromstepid = registry.getPluginId(StepPluginType.class, tii);
StepMeta fromstep = new StepMeta(fromstepid, fromstepname, tii);
fromstep.setDescription("Reads information from table [" + source_table + "] on database [" + dbInfo + "]");
transMeta.addStep(fromstep);
TransHopMeta hi = new TransHopMeta(injectorStep, fromstep);
transMeta.addTransHop(hi);
// Now execute the transformation...
Trans trans = new Trans(transMeta);
trans.prepareExecution(null);
StepInterface si = trans.getStepInterface(fromstepname, 0);
RowStepCollector rc = new RowStepCollector();
si.addRowListener(rc);
RowProducer rp = trans.addRowProducer(injectorStepname, 0);
trans.startThreads();
// add rows
List<RowMetaAndData> inputList = createDataRows();
for (RowMetaAndData rm : inputList) {
rp.putRow(rm.getRowMeta(), rm.getData());
}
rp.finished();
trans.waitUntilFinished();
List<RowMetaAndData> resultRows = rc.getRowsWritten();
List<RowMetaAndData> goldRows = createResultDataRows();
checkRows(goldRows, resultRows);
}
Aggregations