use of org.talend.dataprep.command.dataset.DataSetGetMetadata in project data-prep by Talend.
the class DataSetExportStrategy method execute.
@Override
public StreamingResponseBody execute(ExportParameters parameters) {
final String formatName = parameters.getExportType();
final ExportFormat format = getFormat(formatName);
//
ExportUtils.setExportHeaders(//
parameters.getExportName(), //
parameters.getArguments().get(ExportFormat.PREFIX + CSVFormat.ParametersCSV.ENCODING), format);
return outputStream -> {
// get the dataset content (in an auto-closable block to make sure it is properly closed)
final String datasetId = parameters.getDatasetId();
final DataSetGet dataSetGet = applicationContext.getBean(DataSetGet.class, datasetId, false, true);
final DataSetGetMetadata dataSetGetMetadata = applicationContext.getBean(DataSetGetMetadata.class, datasetId);
try (InputStream datasetContent = dataSetGet.execute()) {
try (JsonParser parser = mapper.getFactory().createParser(new InputStreamReader(datasetContent, UTF_8))) {
// Create dataset
final DataSet dataSet = mapper.readerFor(DataSet.class).readValue(parser);
dataSet.setMetadata(dataSetGetMetadata.execute());
// get the actions to apply (no preparation ==> dataset export ==> no actions)
Configuration configuration = //
Configuration.builder().args(//
parameters.getArguments()).outFilter(//
rm -> filterService.build(parameters.getFilter(), rm)).format(//
format.getName()).volume(//
Configuration.Volume.SMALL).output(//
outputStream).limit(//
limit).build();
factory.get(configuration).buildExecutable(dataSet, configuration).execute();
}
} catch (TDPException e) {
throw e;
} catch (Exception e) {
throw new TDPException(TransformationErrorCodes.UNABLE_TO_TRANSFORM_DATASET, e);
}
};
}
use of org.talend.dataprep.command.dataset.DataSetGetMetadata in project data-prep by Talend.
the class PreparationExportStrategyTest method setUp.
@Before
public void setUp() throws Exception {
// Given
mapper.registerModule(new Jdk8Module());
strategy.setMapper(new ObjectMapper());
when(formatRegistrationService.getByName(eq("JSON"))).thenReturn(new JsonFormat());
final DataSetGetMetadata dataSetGetMetadata = mock(DataSetGetMetadata.class);
when(applicationContext.getBean(eq(DataSetGetMetadata.class), anyVararg())).thenReturn(dataSetGetMetadata);
DataSetGet dataSetGet = mock(DataSetGet.class);
final StringWriter dataSetAsString = new StringWriter();
DataSet dataSet = new DataSet();
final DataSetMetadata dataSetMetadata = new DataSetMetadata("ds-1234", "", "", 0L, 0L, new RowMetadata(), "");
final DataSetContent content = new DataSetContent();
dataSetMetadata.setContent(content);
dataSet.setMetadata(dataSetMetadata);
dataSet.setRecords(Stream.empty());
mapper.writerFor(DataSet.class).writeValue(dataSetAsString, dataSet);
when(dataSetGet.execute()).thenReturn(new ByteArrayInputStream(dataSetAsString.toString().getBytes()));
when(applicationContext.getBean(eq(DataSetGet.class), anyVararg())).thenReturn(dataSetGet);
final PreparationGetActions preparationGetActions = mock(PreparationGetActions.class);
when(preparationGetActions.execute()).thenReturn(new ByteArrayInputStream("{}".getBytes()));
when(applicationContext.getBean(eq(PreparationGetActions.class), eq("prep-1234"), anyString())).thenReturn(preparationGetActions);
final TransformationCacheKey cacheKey = mock(TransformationCacheKey.class);
when(cacheKey.getKey()).thenReturn("cache-1234");
when(cacheKeyGenerator.generateContentKey(anyString(), anyString(), anyString(), anyString(), any(), any(), anyString())).thenReturn(cacheKey);
final ExecutableTransformer executableTransformer = mock(ExecutableTransformer.class);
reset(transformer);
when(transformer.buildExecutable(any(), any())).thenReturn(executableTransformer);
when(factory.get(any())).thenReturn(transformer);
when(contentCache.put(any(), any())).thenReturn(new NullOutputStream());
}
use of org.talend.dataprep.command.dataset.DataSetGetMetadata in project data-prep by Talend.
the class PreparationExportStrategy method performPreparation.
public void performPreparation(final ExportParameters parameters, final OutputStream outputStream) {
final String stepId = parameters.getStepId();
final String preparationId = parameters.getPreparationId();
final String formatName = parameters.getExportType();
final PreparationMessage preparation = getPreparation(preparationId, stepId);
final String dataSetId = preparation.getDataSetId();
final ExportFormat format = getFormat(parameters.getExportType());
// get the dataset content (in an auto-closable block to make sure it is properly closed)
boolean releasedIdentity = false;
// Allow get dataset and get dataset metadata access whatever share status is
securityProxy.asTechnicalUser();
final DataSetGet dataSetGet = applicationContext.getBean(DataSetGet.class, dataSetId, false, true);
final DataSetGetMetadata dataSetGetMetadata = applicationContext.getBean(DataSetGetMetadata.class, dataSetId);
try (InputStream datasetContent = dataSetGet.execute()) {
try (JsonParser parser = mapper.getFactory().createParser(new InputStreamReader(datasetContent, UTF_8))) {
// head is not allowed as step id
final String version = getCleanStepId(preparation, stepId);
// Create dataset
final DataSet dataSet = mapper.readerFor(DataSet.class).readValue(parser);
dataSet.setMetadata(dataSetGetMetadata.execute());
// All good, can already release identity
securityProxy.releaseIdentity();
releasedIdentity = true;
// get the actions to apply (no preparation ==> dataset export ==> no actions)
final String actions = getActions(preparationId, version);
final TransformationCacheKey key = //
cacheKeyGenerator.generateContentKey(//
dataSetId, //
preparationId, //
version, //
formatName, //
parameters.getFrom(), //
parameters.getArguments(), //
parameters.getFilter());
LOGGER.debug("Cache key: " + key.getKey());
LOGGER.debug("Cache key details: " + key.toString());
try (final TeeOutputStream tee = new TeeOutputStream(outputStream, contentCache.put(key, ContentCache.TimeToLive.DEFAULT))) {
final Configuration configuration = //
Configuration.builder().args(//
parameters.getArguments()).outFilter(//
rm -> filterService.build(parameters.getFilter(), rm)).sourceType(parameters.getFrom()).format(//
format.getName()).actions(//
actions).preparation(//
preparation).stepId(//
version).volume(//
Configuration.Volume.SMALL).output(//
tee).limit(//
limit).build();
factory.get(configuration).buildExecutable(dataSet, configuration).execute();
tee.flush();
} catch (Throwable e) {
// NOSONAR
contentCache.evict(key);
throw e;
}
}
} catch (TDPException e) {
throw e;
} catch (Exception e) {
throw new TDPException(TransformationErrorCodes.UNABLE_TO_TRANSFORM_DATASET, e);
} finally {
if (!releasedIdentity) {
// Release identity in case of error.
securityProxy.releaseIdentity();
}
}
}
use of org.talend.dataprep.command.dataset.DataSetGetMetadata in project data-prep by Talend.
the class PreparationAPI method createPreparation.
// @formatter:off
@RequestMapping(value = "/api/preparations", method = POST, produces = TEXT_PLAIN_VALUE)
@ApiOperation(value = "Create a new preparation for preparation content in body.", notes = "Returns the created preparation id.")
@Timed
public String createPreparation(@ApiParam(name = "folder", value = "Where to store the preparation.") @RequestParam(value = "folder") String folder, @ApiParam(name = "body", value = "The original preparation. You may set all values, service will override values you can't write to.") @RequestBody Preparation preparation) {
if (LOG.isDebugEnabled()) {
LOG.debug("Creating a preparation in {} (pool: {} )...", folder, getConnectionStats());
}
DataSetGetMetadata dataSetMetadata = getCommand(DataSetGetMetadata.class, preparation.getDataSetId());
DataSetMetadata execute = dataSetMetadata.execute();
preparation.setRowMetadata(execute.getRowMetadata());
PreparationCreate preparationCreate = getCommand(PreparationCreate.class, preparation, folder);
final String preparationId = preparationCreate.execute();
LOG.info("New Preparation #{}, name: {}, created in folder {}", preparationId, preparation.getName(), folder);
return preparationId;
}
use of org.talend.dataprep.command.dataset.DataSetGetMetadata in project data-prep by Talend.
the class APIPreparationConversions method toEnrichedPreparation.
private EnrichedPreparation toEnrichedPreparation(PreparationMessage preparationMessage, EnrichedPreparation enrichedPreparation, ApplicationContext applicationContext) {
final SecurityProxy securityProxy = applicationContext.getBean(SecurityProxy.class);
// Add related dataset information
if (preparationMessage.getDataSetId() == null) {
return enrichedPreparation;
} else {
// get the dataset metadata
try {
// because dataset are not shared
securityProxy.asTechnicalUser();
final DataSetGetMetadata bean = applicationContext.getBean(DataSetGetMetadata.class, preparationMessage.getDataSetId());
final DataSetMetadata dataSetMetadata = bean.execute();
enrichedPreparation.setSummary(new EnrichedPreparation.DataSetMetadataSummary(dataSetMetadata));
} catch (Exception e) {
LOGGER.debug("error reading dataset metadata {} : {}", enrichedPreparation.getId(), e);
return enrichedPreparation;
} finally {
securityProxy.releaseIdentity();
}
}
// Add step ids
LinkedList<String> collected = new LinkedList<>();
preparationMessage.getSteps().stream().map(Step::getId).forEach(s -> {
if (s != null && (collected.isEmpty() || !collected.getLast().equals(s))) {
collected.add(s);
}
});
enrichedPreparation.setSteps(collected);
// Add folder information
final LocatePreparation command = applicationContext.getBean(LocatePreparation.class, enrichedPreparation.getId());
final Folder folder = command.execute();
enrichedPreparation.setFolder(folder);
return enrichedPreparation;
}
Aggregations