use of org.talend.dataprep.transformation.aggregation.api.AggregationParameters in project data-prep by Talend.
the class AggregationAPITest method should_aggregate_on_dataset.
@Test
public void should_aggregate_on_dataset() throws IOException {
// given
final String dataSetId = testClient.createDataset("dataset/dataset.csv", "tagada");
AggregationParameters params = getAggregationParameters("aggregation/aggregation_parameters.json");
params.setDatasetId(dataSetId);
params.setPreparationId(null);
params.setStepId(null);
// when
final String response = //
given().contentType(ContentType.JSON).body(//
mapper.writer().writeValueAsString(params)).when().post("/api/aggregate").asString();
// then
assertThat(response, sameJSONAsFile(this.getClass().getResourceAsStream("aggregation/aggregation_expected.json")));
}
use of org.talend.dataprep.transformation.aggregation.api.AggregationParameters in project data-prep by Talend.
the class AggregationAPITest method should_aggregate_on_preparation.
@Test
public void should_aggregate_on_preparation() throws IOException {
// given
final String preparationId = testClient.createPreparationFromFile("dataset/dataset.csv", "testPreparationContentGet", home.getId());
AggregationParameters params = getAggregationParameters("aggregation/aggregation_parameters.json");
params.setDatasetId(null);
params.setPreparationId(preparationId);
params.setStepId(null);
// when
final String response = //
given().contentType(ContentType.JSON).body(//
mapper.writer().writeValueAsString(params)).when().post("/api/aggregate").asString();
// then
assertThat(response, sameJSONAsFile(this.getClass().getResourceAsStream("aggregation/aggregation_expected.json")));
}
use of org.talend.dataprep.transformation.aggregation.api.AggregationParameters in project data-prep by Talend.
the class TransformationService method aggregate.
/**
* Compute the given aggregation.
*
* @param rawParams the aggregation rawParams as body rawParams.
*/
// @formatter:off
@RequestMapping(value = "/aggregate", method = POST, consumes = APPLICATION_JSON_VALUE)
@ApiOperation(value = "Compute the aggregation according to the request body rawParams", consumes = APPLICATION_JSON_VALUE)
@VolumeMetered
public AggregationResult aggregate(@ApiParam(value = "The aggregation rawParams in json") @RequestBody final String rawParams) {
// @formatter:on
// parse the aggregation parameters
final AggregationParameters parameters;
try {
parameters = mapper.readerFor(AggregationParameters.class).readValue(rawParams);
LOG.debug("Aggregation requested {}", parameters);
} catch (IOException e) {
throw new TDPException(CommonErrorCodes.BAD_AGGREGATION_PARAMETERS, e);
}
InputStream contentToAggregate;
// get the content of the preparation (internal call with piped streams)
if (StringUtils.isNotBlank(parameters.getPreparationId())) {
try {
PipedOutputStream temp = new PipedOutputStream();
contentToAggregate = new PipedInputStream(temp);
// because of piped streams, processing must be asynchronous
Runnable r = () -> {
try {
final ExportParameters exportParameters = new ExportParameters();
exportParameters.setPreparationId(parameters.getPreparationId());
exportParameters.setDatasetId(parameters.getDatasetId());
final String filter = parameters.getFilter();
if (filter != null) {
if (filter.isEmpty()) {
throw new TDPException(CommonErrorCodes.UNABLE_TO_AGGREGATE, new IllegalArgumentException("Source should not be empty"));
}
exportParameters.setFilter(mapper.readTree(filter));
}
exportParameters.setExportType(JSON);
exportParameters.setStepId(parameters.getStepId());
final StreamingResponseBody body = executeSampleExportStrategy(exportParameters);
body.writeTo(temp);
} catch (IOException e) {
throw new TDPException(CommonErrorCodes.UNABLE_TO_AGGREGATE, e);
}
};
executor.execute(r);
} catch (IOException e) {
throw new TDPException(CommonErrorCodes.UNABLE_TO_AGGREGATE, e);
}
} else {
final DataSetGet dataSetGet = context.getBean(DataSetGet.class, parameters.getDatasetId(), false, true);
contentToAggregate = dataSetGet.execute();
}
// apply the aggregation
try (JsonParser parser = mapper.getFactory().createParser(new InputStreamReader(contentToAggregate, UTF_8))) {
final DataSet dataSet = mapper.readerFor(DataSet.class).readValue(parser);
return aggregationService.aggregate(parameters, dataSet);
} catch (IOException e) {
throw new TDPException(CommonErrorCodes.UNABLE_TO_PARSE_JSON, e);
} finally {
// don't forget to release the connection
if (contentToAggregate != null) {
try {
contentToAggregate.close();
} catch (IOException e) {
LOG.warn("Could not close dataset input stream while aggregating", e);
}
}
}
}
use of org.talend.dataprep.transformation.aggregation.api.AggregationParameters in project data-prep by Talend.
the class AggregationTest method shouldAggregateFromPreparation.
@Test
public void shouldAggregateFromPreparation() throws IOException {
// given
final String datasetId = createDataset("../aggregation/aggregation_dataset.csv", "for a preparation", "text/csv");
final String preparationId = createEmptyPreparationFromDataset(datasetId, "preparation");
applyActionFromFile(preparationId, "../aggregation/uppercase_action.json");
// when
final String actionsAsJson = IOUtils.toString(this.getClass().getResourceAsStream("../aggregation/sum.json"), UTF_8);
final AggregationParameters parameters = mapper.readerFor(AggregationParameters.class).readValue(actionsAsJson);
parameters.setDatasetId(null);
parameters.setPreparationId(preparationId);
parameters.setStepId(null);
String actual = //
given().body(//
mapper.writeValueAsString(parameters)).contentType(//
APPLICATION_JSON_VALUE).when().post(//
"/aggregate").asString();
// then
assertThat(actual, sameJSONAsFile(this.getClass().getResourceAsStream("../aggregation/uppercase_sum_expected.json")));
}
use of org.talend.dataprep.transformation.aggregation.api.AggregationParameters in project data-prep by Talend.
the class AggregatorFactoryTest method shouldReturnEmptyAggregator.
@Test(expected = IllegalArgumentException.class)
public void shouldReturnEmptyAggregator() {
AggregationParameters parameters = new AggregationParameters();
parameters.addGroupBy("0000");
Aggregator actual = factory.get(parameters);
}
Aggregations