use of org.talend.dataprep.transformation.api.transformer.Transformer in project data-prep by Talend.
the class XlsWriterTest method createSchemaParser.
/**
* utility function
*/
public SchemaParser.Request createSchemaParser(String inputFileName) throws Exception {
Path path = Files.createTempFile("datarep-foo", "xlsx");
Files.deleteIfExists(path);
try (final OutputStream outputStream = Files.newOutputStream(path)) {
final Configuration configuration = //
Configuration.builder().format(//
XlsFormat.XLSX).output(//
outputStream).actions(//
"").build();
final Transformer exporter = factory.get(configuration);
final InputStream inputStream = XlsWriterTest.class.getResourceAsStream(inputFileName);
try (JsonParser parser = mapper.getFactory().createParser(inputStream)) {
final DataSet dataSet = mapper.readerFor(DataSet.class).readValue(parser);
exporter.buildExecutable(dataSet, configuration).execute();
}
}
DataSetMetadata metadata = metadataBuilder.metadata().id("123").build();
return new SchemaParser.Request(Files.newInputStream(path), metadata);
}
use of org.talend.dataprep.transformation.api.transformer.Transformer in project data-prep by Talend.
the class PipelineTransformer method buildExecutable.
@Override
public ExecutableTransformer buildExecutable(DataSet input, Configuration configuration) {
final RowMetadata rowMetadata = input.getMetadata().getRowMetadata();
// prepare the fallback row metadata
RowMetadata fallBackRowMetadata = transformationRowMetadataUtils.getMatchingEmptyRowMetadata(rowMetadata);
final TransformerWriter writer = writerRegistrationService.getWriter(configuration.formatId(), configuration.output(), configuration.getArguments());
final ConfiguredCacheWriter metadataWriter = new ConfiguredCacheWriter(contentCache, DEFAULT);
final TransformationMetadataCacheKey metadataKey = cacheKeyGenerator.generateMetadataKey(configuration.getPreparationId(), configuration.stepId(), configuration.getSourceType());
final PreparationMessage preparation = configuration.getPreparation();
// function that from a step gives the rowMetadata associated to the previous/parent step
final Function<Step, RowMetadata> previousStepRowMetadataSupplier = s -> //
Optional.ofNullable(s.getParent()).map(//
id -> preparationUpdater.get(id)).orElse(null);
final Pipeline pipeline = //
Pipeline.Builder.builder().withAnalyzerService(//
analyzerService).withActionRegistry(//
actionRegistry).withPreparation(//
preparation).withActions(//
actionParser.parse(configuration.getActions())).withInitialMetadata(rowMetadata, //
configuration.volume() == SMALL).withMonitor(//
configuration.getMonitor()).withFilter(//
configuration.getFilter()).withLimit(//
configuration.getLimit()).withFilterOut(//
configuration.getOutFilter()).withOutput(//
() -> new WriterNode(writer, metadataWriter, metadataKey, fallBackRowMetadata)).withStatisticsAdapter(//
adapter).withStepMetadataSupplier(//
previousStepRowMetadataSupplier).withGlobalStatistics(//
configuration.isGlobalStatistics()).allowMetadataChange(//
configuration.isAllowMetadataChange()).build();
// wrap this transformer into an executable transformer
return new ExecutableTransformer() {
@Override
public void execute() {
try {
LOGGER.debug("Before transformation: {}", pipeline);
pipeline.execute(input);
} finally {
LOGGER.debug("After transformation: {}", pipeline);
}
if (preparation != null) {
final UpdatedStepVisitor visitor = new UpdatedStepVisitor(preparationUpdater);
pipeline.accept(visitor);
}
}
@Override
public void signal(Signal signal) {
pipeline.signal(signal);
}
};
}
Aggregations