use of org.talend.dataprep.cache.TransformationMetadataCacheKey in project data-prep by Talend.
the class TransformationService method executeDiffOnSample.
private void executeDiffOnSample(final PreviewParameters previewParameters, final OutputStream output) {
final TransformationMetadataCacheKey metadataKey = //
cacheKeyGenerator.generateMetadataKey(//
previewParameters.getPreparationId(), //
Step.ROOT_STEP.id(), //
previewParameters.getSourceType());
final ContentCacheKey contentKey = //
cacheKeyGenerator.generateContentKey(//
previewParameters.getDataSetId(), //
previewParameters.getPreparationId(), //
Step.ROOT_STEP.id(), //
JSON, //
previewParameters.getSourceType(), // no filters for preview
"");
try (//
final InputStream metadata = contentCache.get(metadataKey);
//
final InputStream content = contentCache.get(contentKey);
final JsonParser contentParser = mapper.getFactory().createParser(new InputStreamReader(content, UTF_8))) {
// build metadata
final RowMetadata rowMetadata = mapper.readerFor(RowMetadata.class).readValue(metadata);
final DataSetMetadata dataSetMetadata = new DataSetMetadata();
dataSetMetadata.setRowMetadata(rowMetadata);
// build dataset
final DataSet dataSet = mapper.readerFor(DataSet.class).readValue(contentParser);
dataSet.setMetadata(dataSetMetadata);
// trigger diff
executePreview(//
previewParameters.getNewActions(), //
previewParameters.getBaseActions(), //
previewParameters.getTdpIds(), //
dataSet, //
output);
} catch (final IOException e) {
throw new TDPException(TransformationErrorCodes.UNABLE_TO_PERFORM_PREVIEW, e);
}
}
use of org.talend.dataprep.cache.TransformationMetadataCacheKey in project data-prep by Talend.
the class PipelineTransformer method buildExecutable.
@Override
public ExecutableTransformer buildExecutable(DataSet input, Configuration configuration) {
final RowMetadata rowMetadata = input.getMetadata().getRowMetadata();
// prepare the fallback row metadata
RowMetadata fallBackRowMetadata = transformationRowMetadataUtils.getMatchingEmptyRowMetadata(rowMetadata);
final TransformerWriter writer = writerRegistrationService.getWriter(configuration.formatId(), configuration.output(), configuration.getArguments());
final ConfiguredCacheWriter metadataWriter = new ConfiguredCacheWriter(contentCache, DEFAULT);
final TransformationMetadataCacheKey metadataKey = cacheKeyGenerator.generateMetadataKey(configuration.getPreparationId(), configuration.stepId(), configuration.getSourceType());
final PreparationMessage preparation = configuration.getPreparation();
// function that from a step gives the rowMetadata associated to the previous/parent step
final Function<Step, RowMetadata> previousStepRowMetadataSupplier = s -> //
Optional.ofNullable(s.getParent()).map(//
id -> preparationUpdater.get(id)).orElse(null);
final Pipeline pipeline = //
Pipeline.Builder.builder().withAnalyzerService(//
analyzerService).withActionRegistry(//
actionRegistry).withPreparation(//
preparation).withActions(//
actionParser.parse(configuration.getActions())).withInitialMetadata(rowMetadata, //
configuration.volume() == SMALL).withMonitor(//
configuration.getMonitor()).withFilter(//
configuration.getFilter()).withLimit(//
configuration.getLimit()).withFilterOut(//
configuration.getOutFilter()).withOutput(//
() -> new WriterNode(writer, metadataWriter, metadataKey, fallBackRowMetadata)).withStatisticsAdapter(//
adapter).withStepMetadataSupplier(//
previousStepRowMetadataSupplier).withGlobalStatistics(//
configuration.isGlobalStatistics()).allowMetadataChange(//
configuration.isAllowMetadataChange()).build();
// wrap this transformer into an executable transformer
return new ExecutableTransformer() {
@Override
public void execute() {
try {
LOGGER.debug("Before transformation: {}", pipeline);
pipeline.execute(input);
} finally {
LOGGER.debug("After transformation: {}", pipeline);
}
if (preparation != null) {
final UpdatedStepVisitor visitor = new UpdatedStepVisitor(preparationUpdater);
pipeline.accept(visitor);
}
}
@Override
public void signal(Signal signal) {
pipeline.signal(signal);
}
};
}
Aggregations