use of org.apache.hop.pipeline.transform.TransformMetaDataCombi in project hop by apache.
the class PipelineDataProbeXp method executeProbingPipeline.
/**
* Execute a probing pipeline for the current pipeline. Add a listener to the transform copies.
* Send the data to the PipelineDataProbe transform(s) in the probing pipeline
*
* @param pipelineProbe
* @param dataProbeLocation
* @param loggingPipelineFilename The pipeline to start for the location
* @param pipeline The parent pipeline to listen to
* @param variables
* @throws HopException
*/
private synchronized void executeProbingPipeline(PipelineProbe pipelineProbe, DataProbeLocation dataProbeLocation, String loggingPipelineFilename, IPipelineEngine<PipelineMeta> pipeline, IVariables variables) throws HopException {
PipelineMeta probingPipelineMeta = new PipelineMeta(loggingPipelineFilename, pipeline.getMetadataProvider(), true, variables);
// Create a local pipeline engine...
//
LocalPipelineEngine probingPipeline = new LocalPipelineEngine(probingPipelineMeta, variables, pipeline);
// Flag it as a probing and logging pipeline so we don't try to probe or log ourselves...
//
probingPipeline.getExtensionDataMap().put(PIPELINE_DATA_PROBE_FLAG, "Y");
probingPipeline.getExtensionDataMap().put(PipelineStartLoggingXp.PIPELINE_LOGGING_FLAG, "Y");
// Only log errors
//
probingPipeline.setLogLevel(LogLevel.ERROR);
probingPipeline.prepareExecution();
List<IEngineComponent> componentCopies = pipeline.getComponentCopies(dataProbeLocation.getSourceTransformName());
for (IEngineComponent componentCopy : componentCopies) {
//
for (TransformMetaDataCombi combi : probingPipeline.getTransforms()) {
if (combi.transform instanceof PipelineDataProbe) {
// Give the transform a bit more information to work with...
//
PipelineDataProbe pipelineDataProbe = (PipelineDataProbe) combi.transform;
pipelineDataProbe.setSourcePipelineName(pipeline.getPipelineMeta().getName());
pipelineDataProbe.setSourceTransformLogChannelId(pipeline.getLogChannelId());
pipelineDataProbe.setSourceTransformName(componentCopy.getName());
pipelineDataProbe.setSourceTransformCopy(componentCopy.getCopyNr());
try {
final RowProducer rowProducer = probingPipeline.addRowProducer(combi.transformName, combi.copy);
// For every copy of the component, add an input row set to the parent pipeline...
//
componentCopy.addRowListener(new RowAdapter() {
@Override
public void rowWrittenEvent(IRowMeta rowMeta, Object[] row) throws HopTransformException {
// Pass this row to the row producer...
//
rowProducer.putRow(rowMeta, row);
}
});
// If the pipeline we're the transform is and we can safely stop streaming...
//
pipeline.addExecutionFinishedListener(pe -> rowProducer.finished());
} catch (HopException e) {
throw new HopTransformException("Error adding row producer to transform '" + combi.transformName + "'", e);
}
}
}
}
// Execute the logging pipeline to save the logging information
//
probingPipeline.startThreads();
// We'll not wait around until this is finished...
// The pipeline should stop automatically when the parent does
//
pipeline.addExecutionStoppedListener(e -> probingPipeline.stopAll());
}
use of org.apache.hop.pipeline.transform.TransformMetaDataCombi in project hop by apache.
the class PipelineTest method combi.
private TransformMetaDataCombi combi(ITransform transform, ITransformData data, TransformMeta transformMeta) {
TransformMetaDataCombi transformMetaDataCombi = new TransformMetaDataCombi();
transformMetaDataCombi.transform = transform;
transformMetaDataCombi.data = data;
transformMetaDataCombi.transformMeta = transformMeta;
return transformMetaDataCombi;
}
use of org.apache.hop.pipeline.transform.TransformMetaDataCombi in project hop by apache.
the class CsvProcessRowInParallelTest method createBaseCombi.
private TransformMetaDataCombi createBaseCombi(File sharedFile, boolean headerPresent, String delimiter) {
TransformMetaDataCombi combi = new TransformMetaDataCombi();
CsvInputData data = new CsvInputData();
CsvInputMeta meta = createMeta(sharedFile, createInputFileFields("Field_000", "Field_001"), headerPresent, delimiter);
CsvInput csvInput = createCsvInput(meta, data);
csvInput.init();
combi.transform = csvInput;
combi.data = data;
combi.meta = meta;
return combi;
}
use of org.apache.hop.pipeline.transform.TransformMetaDataCombi in project hop by apache.
the class InjectDataSetIntoTransformExtensionPoint method injectDataSetIntoTransform.
private void injectDataSetIntoTransform(final LocalPipelineEngine pipeline, final String dataSetName, final IHopMetadataProvider metadataProvider, final TransformMeta transformMeta, PipelineUnitTestSetLocation inputLocation) throws HopException, HopException {
final DataSet dataSet = metadataProvider.getSerializer(DataSet.class).load(dataSetName);
if (dataSet == null) {
throw new HopException("Unable to find data set '" + dataSetName + "'");
}
final ILogChannel log = pipeline.getLogChannel();
final RowProducer rowProducer = pipeline.addRowProducer(transformMeta.getName(), 0);
// Look for the transform into which we'll inject rows...
//
TransformMetaDataCombi combi = null;
for (TransformMetaDataCombi transform : pipeline.getTransforms()) {
if (transform.transformName.equals(transformMeta.getName())) {
combi = transform;
break;
}
}
if (combi != null) {
// Get the rows of the mapped values in the mapped order sorted as asked
//
final List<Object[]> dataSetRows = dataSet.getAllRows(pipeline, log, inputLocation);
IRowMeta dataSetRowMeta = dataSet.getMappedDataSetFieldsRowMeta(inputLocation);
// The rows to inject are always driven by the dataset, NOT the transform it replaces (!) for
// simplicity
//
IRowMeta injectRowMeta = new RowMeta();
// Figure out which fields to pass
// Only inject those mentioned in the field mappings...
//
int[] fieldIndexes = new int[inputLocation.getFieldMappings().size()];
for (int i = 0; i < inputLocation.getFieldMappings().size(); i++) {
PipelineUnitTestFieldMapping fieldMapping = inputLocation.getFieldMappings().get(i);
fieldIndexes[i] = dataSetRowMeta.indexOfValue(fieldMapping.getDataSetFieldName());
if (fieldIndexes[i] < 0) {
throw new HopException("Unable to find mapped field '" + fieldMapping.getDataSetFieldName() + "' in data set '" + dataSet.getName() + "'");
}
IValueMeta injectValueMeta = dataSetRowMeta.getValueMeta(fieldIndexes[i]).clone();
// Rename to the transform output names though...
//
injectValueMeta.setName(fieldMapping.getTransformFieldName());
injectRowMeta.addValueMeta(injectValueMeta);
}
log.logDetailed("Injecting data set '" + dataSetName + "' into transform '" + transformMeta.getName() + "', fields: " + Arrays.toString(injectRowMeta.getFieldNames()));
// Pass rows
//
Runnable runnable = () -> {
try {
for (Object[] dataSetRow : dataSetRows) {
// pass the row with the external names, in the right order and with the selected
// columns from the data set
//
Object[] row = RowDataUtil.allocateRowData(injectRowMeta.size());
for (int i = 0; i < fieldIndexes.length; i++) {
row[i] = dataSetRow[fieldIndexes[i]];
}
rowProducer.putRow(injectRowMeta, row);
}
rowProducer.finished();
} catch (Exception e) {
throw new RuntimeException("Problem injecting data set '" + dataSetName + "' row into transform '" + transformMeta.getName() + "'", e);
}
};
Thread thread = new Thread(runnable);
thread.start();
}
}
use of org.apache.hop.pipeline.transform.TransformMetaDataCombi in project hop by apache.
the class CsvProcessRowInParallelTest method createAndRunOneTransform.
private int createAndRunOneTransform(File sharedFile, int transformNr, int totalNumberOfTransforms, boolean headersPresent, String delimiter) throws Exception {
TransformMetaDataCombi combiTransform1 = createBaseCombi(sharedFile, headersPresent, delimiter);
configureData((CsvInputData) combiTransform1.data, transformNr, totalNumberOfTransforms);
return processRows(combiTransform1);
}
Aggregations