use of org.talend.dataprep.api.export.ExportParameters in project data-prep by Talend.
the class PrepMetadataCacheCondition method apply.
@Override
public boolean apply(Object... args) {
// check pre-condition
Validate.notNull(args);
Validate.isTrue(args.length == 2);
Validate.isInstanceOf(String.class, args[0]);
Validate.isInstanceOf(String.class, args[1]);
try {
String preparationId = (String) args[0];
String headId = (String) args[1];
ExportParameters exportParameters = new ExportParameters();
exportParameters.setPreparationId(preparationId);
exportParameters.setStepId(headId);
exportParameters = exportParametersUtil.populateFromPreparationExportParameter(exportParameters);
final TransformationMetadataCacheKey cacheKey = cacheKeyGenerator.generateMetadataKey(exportParameters.getPreparationId(), exportParameters.getStepId(), HEAD);
return cacheCondition.apply(cacheKey);
} catch (IOException e) {
LOGGER.error("Cannot get all information from export parameters", e);
}
return false;
}
use of org.talend.dataprep.api.export.ExportParameters in project data-prep by Talend.
the class PreparationCacheCondition method apply.
@Override
public boolean apply(Object... args) {
// check pre-condition
Validate.notNull(args);
Validate.isTrue(args.length == 1);
Validate.isInstanceOf(ExportParameters.class, args[0]);
try {
ExportParameters exportParameters = exportParametersUtil.populateFromPreparationExportParameter((ExportParameters) args[0]);
TransformationCacheKey cacheKey = cacheKeyGenerator.generateContentKey(exportParameters);
return cacheCondition.apply(cacheKey);
} catch (IOException e) {
LOGGER.error("Cannot get all information from export parameters", e);
return false;
}
}
use of org.talend.dataprep.api.export.ExportParameters in project data-prep by Talend.
the class TransformationService method execute.
@RequestMapping(value = "/apply", method = POST)
@ApiOperation(value = "Run the transformation given the provided export parameters", notes = "This operation transforms the dataset or preparation using parameters in export parameters.")
@VolumeMetered
@//
AsyncOperation(//
conditionalClass = GetPrepContentAsyncCondition.class, //
resultUrlGenerator = PreparationGetContentUrlGenerator.class, //
executionIdGeneratorClass = ExportParametersExecutionIdGenerator.class)
public StreamingResponseBody execute(@ApiParam(value = "Preparation id to apply.") @RequestBody @Valid @AsyncParameter @AsyncExecutionId final ExportParameters parameters) throws IOException {
ExportParameters completeParameters = parameters;
if (StringUtils.isNotEmpty(completeParameters.getPreparationId())) {
// we deal with preparation transformation (not dataset)
completeParameters = exportParametersUtil.populateFromPreparationExportParameter(parameters);
ContentCacheKey cacheKey = cacheKeyGenerator.generateContentKey(completeParameters);
if (!contentCache.has(cacheKey)) {
preparationExportStrategy.performPreparation(completeParameters, new NullOutputStream());
}
}
return executeSampleExportStrategy(completeParameters);
}
use of org.talend.dataprep.api.export.ExportParameters in project data-prep by Talend.
the class TransformationService method applyOnDataset.
/**
* Apply the preparation to the dataset out of the given IDs.
*
* @param preparationId the preparation id to apply on the dataset.
* @param datasetId the dataset id to transform.
* @param formatName The output {@link ExportFormat format}. This format also set the MIME response type.
* @param stepId the preparation step id to use (default is 'head').
* @param name the transformation name.
* @param exportParams additional (optional) export parameters.
*/
// @formatter:off
@RequestMapping(value = "/apply/preparation/{preparationId}/dataset/{datasetId}/{format}", method = GET)
@ApiOperation(value = "Transform the given preparation to the given format on the given dataset id", notes = "This operation transforms the dataset using preparation id in the provided format.")
@VolumeMetered
public StreamingResponseBody applyOnDataset(@ApiParam(value = "Preparation id to apply.") @PathVariable(value = "preparationId") final String preparationId, @ApiParam(value = "DataSet id to transform.") @PathVariable(value = "datasetId") final String datasetId, @ApiParam(value = "Output format") @PathVariable("format") final String formatName, @ApiParam(value = "Step id", defaultValue = "head") @RequestParam(value = "stepId", required = false, defaultValue = "head") final String stepId, @ApiParam(value = "Name of the transformation", defaultValue = "untitled") @RequestParam(value = "name", required = false, defaultValue = "untitled") final String name, @RequestParam final Map<String, String> exportParams) {
// @formatter:on
final ExportParameters exportParameters = new ExportParameters();
exportParameters.setPreparationId(preparationId);
exportParameters.setDatasetId(datasetId);
exportParameters.setExportType(formatName);
exportParameters.setStepId(stepId);
exportParameters.setExportName(name);
exportParameters.getArguments().putAll(exportParams);
return executeSampleExportStrategy(exportParameters);
}
use of org.talend.dataprep.api.export.ExportParameters in project data-prep by Talend.
the class TransformationService method aggregate.
/**
* Compute the given aggregation.
*
* @param rawParams the aggregation rawParams as body rawParams.
*/
// @formatter:off
@RequestMapping(value = "/aggregate", method = POST, consumes = APPLICATION_JSON_VALUE)
@ApiOperation(value = "Compute the aggregation according to the request body rawParams", consumes = APPLICATION_JSON_VALUE)
@VolumeMetered
public AggregationResult aggregate(@ApiParam(value = "The aggregation rawParams in json") @RequestBody final String rawParams) {
// @formatter:on
// parse the aggregation parameters
final AggregationParameters parameters;
try {
parameters = mapper.readerFor(AggregationParameters.class).readValue(rawParams);
LOG.debug("Aggregation requested {}", parameters);
} catch (IOException e) {
throw new TDPException(CommonErrorCodes.BAD_AGGREGATION_PARAMETERS, e);
}
InputStream contentToAggregate;
// get the content of the preparation (internal call with piped streams)
if (StringUtils.isNotBlank(parameters.getPreparationId())) {
try {
PipedOutputStream temp = new PipedOutputStream();
contentToAggregate = new PipedInputStream(temp);
// because of piped streams, processing must be asynchronous
Runnable r = () -> {
try {
final ExportParameters exportParameters = new ExportParameters();
exportParameters.setPreparationId(parameters.getPreparationId());
exportParameters.setDatasetId(parameters.getDatasetId());
final String filter = parameters.getFilter();
if (filter != null) {
if (filter.isEmpty()) {
throw new TDPException(CommonErrorCodes.UNABLE_TO_AGGREGATE, new IllegalArgumentException("Source should not be empty"));
}
exportParameters.setFilter(mapper.readTree(filter));
}
exportParameters.setExportType(JSON);
exportParameters.setStepId(parameters.getStepId());
final StreamingResponseBody body = executeSampleExportStrategy(exportParameters);
body.writeTo(temp);
} catch (IOException e) {
throw new TDPException(CommonErrorCodes.UNABLE_TO_AGGREGATE, e);
}
};
executor.execute(r);
} catch (IOException e) {
throw new TDPException(CommonErrorCodes.UNABLE_TO_AGGREGATE, e);
}
} else {
final DataSetGet dataSetGet = context.getBean(DataSetGet.class, parameters.getDatasetId(), false, true);
contentToAggregate = dataSetGet.execute();
}
// apply the aggregation
try (JsonParser parser = mapper.getFactory().createParser(new InputStreamReader(contentToAggregate, UTF_8))) {
final DataSet dataSet = mapper.readerFor(DataSet.class).readValue(parser);
return aggregationService.aggregate(parameters, dataSet);
} catch (IOException e) {
throw new TDPException(CommonErrorCodes.UNABLE_TO_PARSE_JSON, e);
} finally {
// don't forget to release the connection
if (contentToAggregate != null) {
try {
contentToAggregate.close();
} catch (IOException e) {
LOG.warn("Could not close dataset input stream while aggregating", e);
}
}
}
}
Aggregations