use of org.talend.dataprep.api.preparation.PreparationMessage in project data-prep by Talend.
the class APIPreparationConversionsTest method shouldDealWithExceptionInDataSetGet.
@Test
public void shouldDealWithExceptionInDataSetGet() {
// given
final PreparationMessage preparation = getPreparationMessage("DS-1234");
when(applicationContext.getBean(eq(DataSetGet.class), any(Object[].class))).thenAnswer(i -> {
final DataSetGet mock = mock(DataSetGet.class);
when(mock.execute()).thenThrow(new TDPException(DataSetErrorCodes.DATASET_DOES_NOT_EXIST));
return mock;
});
// when
final EnrichedPreparation actual = conversionService.convert(preparation, EnrichedPreparation.class);
// then
assertNotNull(actual);
assertNull(actual.getSummary());
assertNull(actual.getFolder());
}
use of org.talend.dataprep.api.preparation.PreparationMessage in project data-prep by Talend.
the class APIPreparationConversionsTest method shouldEnrichPreparationWithDataset.
@Test
public void shouldEnrichPreparationWithDataset() {
// given
DataSetMetadata metadata = getDataSetMetadata("super dataset", 1001L);
setupHystrixCommand(DataSetGetMetadata.class, metadata);
final PreparationMessage preparation = getPreparationMessage(metadata.getId());
Folder folder = getFolder("F-753854");
setupHystrixCommand(LocatePreparation.class, folder);
// when
final EnrichedPreparation actual = conversionService.convert(preparation, EnrichedPreparation.class);
// then
assertEquals(metadata.getId(), actual.getSummary().getDataSetId());
assertEquals(metadata.getName(), actual.getSummary().getDataSetName());
assertEquals(metadata.getContent().getNbRecords(), actual.getSummary().getDataSetNbRow());
final List<String> expectedSteps = preparation.getSteps().stream().map(Step::getId).collect(Collectors.toList());
final List<String> actualSteps = actual.getSteps();
assertNotNull(actualSteps);
assertEquals(expectedSteps.size(), expectedSteps.size());
expectedSteps.forEach(s -> assertTrue(actualSteps.contains(s)));
assertEquals(folder, actual.getFolder());
}
use of org.talend.dataprep.api.preparation.PreparationMessage in project data-prep by Talend.
the class PreparationClientTest method getDetails.
/**
* Return the details of a preparation at a given (optional) step.
*
* @param preparationId the wanted preparation id.
* @param wantedStepId the optional wanted step id.
* @return the details of a preparation at a given (optional) step.
*/
public PreparationMessage getDetails(String preparationId, String wantedStepId) {
final RequestSpecification specs = given();
if (StringUtils.isNotBlank(wantedStepId)) {
specs.queryParam("stepId", wantedStepId);
}
final Response response = specs.when().get("/preparations/{id}/details", preparationId);
if (response.getStatusCode() != 200) {
throw new MockTDPException(response);
}
try {
return mapper.readerFor(PreparationMessage.class).readValue(response.asString());
} catch (IOException e) {
throw new TDPException(UNABLE_TO_READ_CONTENT);
}
}
use of org.talend.dataprep.api.preparation.PreparationMessage in project data-prep by Talend.
the class OptimizedExportStrategy method performOptimizedTransform.
private void performOptimizedTransform(ExportParameters parameters, OutputStream outputStream) throws IOException {
// Initial check
final OptimizedPreparationInput optimizedPreparationInput = new OptimizedPreparationInput(parameters).invoke();
if (optimizedPreparationInput == null) {
throw new IllegalStateException("Unable to use this strategy (call accept() before calling this).");
}
final String preparationId = parameters.getPreparationId();
final String dataSetId = optimizedPreparationInput.getDataSetId();
final TransformationCacheKey transformationCacheKey = optimizedPreparationInput.getTransformationCacheKey();
final DataSetMetadata metadata = optimizedPreparationInput.getMetadata();
final String previousVersion = optimizedPreparationInput.getPreviousVersion();
final String version = optimizedPreparationInput.getVersion();
final ExportFormat format = getFormat(parameters.getExportType());
// Get content from previous step
try (JsonParser parser = mapper.getFactory().createParser(new InputStreamReader(contentCache.get(transformationCacheKey), UTF_8))) {
// Create dataset
final DataSet dataSet = mapper.readerFor(DataSet.class).readValue(parser);
dataSet.setMetadata(metadata);
// get the actions to apply (no preparation ==> dataset export ==> no actions)
final String actions = getActions(preparationId, previousVersion, version);
final PreparationMessage preparation = getPreparation(preparationId);
preparation.setSteps(getMatchingSteps(preparation.getSteps(), previousVersion, version));
LOGGER.debug("Running optimized strategy for preparation {} @ step #{}", preparationId, version);
// create tee to broadcast to cache + service output
final TransformationCacheKey key = //
cacheKeyGenerator.generateContentKey(//
dataSetId, //
preparationId, //
version, //
parameters.getExportType(), //
parameters.getFrom(), //
parameters.getArguments(), //
parameters.getFilter());
LOGGER.debug("Cache key: " + key.getKey());
LOGGER.debug("Cache key details: " + key.toString());
try (final TeeOutputStream tee = new TeeOutputStream(outputStream, contentCache.put(key, ContentCache.TimeToLive.DEFAULT))) {
final Configuration configuration = //
Configuration.builder().args(//
parameters.getArguments()).outFilter(//
rm -> filterService.build(parameters.getFilter(), rm)).sourceType(parameters.getFrom()).format(//
format.getName()).actions(//
actions).preparation(//
preparation).stepId(//
version).volume(//
Configuration.Volume.SMALL).output(//
tee).limit(//
limit).build();
factory.get(configuration).buildExecutable(dataSet, configuration).execute();
tee.flush();
} catch (Throwable e) {
// NOSONAR
contentCache.evict(key);
throw e;
}
} catch (TDPException e) {
throw e;
} catch (Exception e) {
throw new TDPException(TransformationErrorCodes.UNABLE_TO_TRANSFORM_DATASET, e);
}
}
use of org.talend.dataprep.api.preparation.PreparationMessage in project data-prep by Talend.
the class PreparationExportStrategy method performPreparation.
public void performPreparation(final ExportParameters parameters, final OutputStream outputStream) {
final String stepId = parameters.getStepId();
final String preparationId = parameters.getPreparationId();
final String formatName = parameters.getExportType();
final PreparationMessage preparation = getPreparation(preparationId, stepId);
final String dataSetId = preparation.getDataSetId();
final ExportFormat format = getFormat(parameters.getExportType());
// get the dataset content (in an auto-closable block to make sure it is properly closed)
boolean releasedIdentity = false;
// Allow get dataset and get dataset metadata access whatever share status is
securityProxy.asTechnicalUser();
final DataSetGet dataSetGet = applicationContext.getBean(DataSetGet.class, dataSetId, false, true);
final DataSetGetMetadata dataSetGetMetadata = applicationContext.getBean(DataSetGetMetadata.class, dataSetId);
try (InputStream datasetContent = dataSetGet.execute()) {
try (JsonParser parser = mapper.getFactory().createParser(new InputStreamReader(datasetContent, UTF_8))) {
// head is not allowed as step id
final String version = getCleanStepId(preparation, stepId);
// Create dataset
final DataSet dataSet = mapper.readerFor(DataSet.class).readValue(parser);
dataSet.setMetadata(dataSetGetMetadata.execute());
// All good, can already release identity
securityProxy.releaseIdentity();
releasedIdentity = true;
// get the actions to apply (no preparation ==> dataset export ==> no actions)
final String actions = getActions(preparationId, version);
final TransformationCacheKey key = //
cacheKeyGenerator.generateContentKey(//
dataSetId, //
preparationId, //
version, //
formatName, //
parameters.getFrom(), //
parameters.getArguments(), //
parameters.getFilter());
LOGGER.debug("Cache key: " + key.getKey());
LOGGER.debug("Cache key details: " + key.toString());
try (final TeeOutputStream tee = new TeeOutputStream(outputStream, contentCache.put(key, ContentCache.TimeToLive.DEFAULT))) {
final Configuration configuration = //
Configuration.builder().args(//
parameters.getArguments()).outFilter(//
rm -> filterService.build(parameters.getFilter(), rm)).sourceType(parameters.getFrom()).format(//
format.getName()).actions(//
actions).preparation(//
preparation).stepId(//
version).volume(//
Configuration.Volume.SMALL).output(//
tee).limit(//
limit).build();
factory.get(configuration).buildExecutable(dataSet, configuration).execute();
tee.flush();
} catch (Throwable e) {
// NOSONAR
contentCache.evict(key);
throw e;
}
}
} catch (TDPException e) {
throw e;
} catch (Exception e) {
throw new TDPException(TransformationErrorCodes.UNABLE_TO_TRANSFORM_DATASET, e);
} finally {
if (!releasedIdentity) {
// Release identity in case of error.
securityProxy.releaseIdentity();
}
}
}
Aggregations