use of org.talend.dataprep.dataset.event.DatasetUpdatedEvent in project data-prep by Talend.
the class TransformAPITest method testShouldEvictPreparationCacheOnDataSetUpdate.
@Test
public void testShouldEvictPreparationCacheOnDataSetUpdate() throws Exception {
// given
final String preparationId = testClient.createPreparationFromFile("dataset/dataset_TDP-2165.csv", "testDataset", home.getId());
testClient.applyAction(preparationId, IOUtils.toString(this.getClass().getResourceAsStream("transformation/TDP-2165.json"), UTF_8));
testClient.getPreparation(preparationId);
final Preparation preparation = preparationRepository.get(preparationId, Preparation.class);
final TransformationCacheKey transformationCacheKey = //
cacheKeyGenerator.generateContentKey(//
preparation.getDataSetId(), //
preparationId, //
preparation.getHeadId(), //
"JSON", //
HEAD, // no filter
"");
assertTrue(contentCache.has(transformationCacheKey));
// when
context.publishEvent(new DatasetUpdatedEvent(dataSetMetadataRepository.get(preparation.getDataSetId())));
// then
assertFalse(contentCache.has(transformationCacheKey));
}
use of org.talend.dataprep.dataset.event.DatasetUpdatedEvent in project data-prep by Talend.
the class DataSetService method updateRawDataSet.
/**
* Updates a data set content and metadata. If no data set exists for given id, data set is silently created.
*
* @param dataSetId The id of data set to be updated.
* @param name The new name for the data set. Empty name (or <code>null</code>) does not update dataset name.
* @param dataSetContent The new content for the data set. If empty, existing content will <b>not</b> be replaced.
* For delete operation, look at {@link #delete(String)}.
*/
@RequestMapping(value = "/datasets/{id}/raw", method = PUT)
@ApiOperation(value = "Update a data set by id", notes = "Update a data set content based on provided id and PUT body. Id should be a UUID returned by the list operation. Not valid or non existing data set id returns empty content. For documentation purposes, body is typed as 'text/plain' but operation accepts binary content too.")
@Timed
@VolumeMetered
public String updateRawDataSet(//
@PathVariable(value = "id") @ApiParam(name = "id", value = "Id of the data set to update") String dataSetId, //
@RequestParam(value = "name", required = false) @ApiParam(name = "name", value = "New value for the data set name") String name, //
@RequestParam(value = "size", required = false) @ApiParam(name = "size", value = "The size of the dataSet") Long size, @ApiParam(value = "content") InputStream dataSetContent) {
LOG.debug("updating dataset content #{}", dataSetId);
if (name != null) {
checkDataSetName(name);
}
DataSetMetadata currentDataSetMetadata = dataSetMetadataRepository.get(dataSetId);
if (currentDataSetMetadata == null) {
return create(name, null, size, TEXT_PLAIN_VALUE, dataSetContent);
} else {
// just like the creation, let's make sure invalid size forbids dataset creation
if (size != null && size < 0) {
LOG.warn("invalid size provided {}", size);
throw new TDPException(UNSUPPORTED_CONTENT);
}
final UpdateDataSetCacheKey cacheKey = new UpdateDataSetCacheKey(currentDataSetMetadata.getId());
final DistributedLock lock = dataSetMetadataRepository.createDatasetMetadataLock(currentDataSetMetadata.getId());
try {
lock.lock();
// check the size if it's available (quick win)
if (size != null && size > 0) {
quotaService.checkIfAddingSizeExceedsAvailableStorage(Math.abs(size - currentDataSetMetadata.getDataSetSize()));
}
final DataSetMetadataBuilder datasetBuilder = metadataBuilder.metadata().id(currentDataSetMetadata.getId());
datasetBuilder.copyNonContentRelated(currentDataSetMetadata);
datasetBuilder.modified(System.currentTimeMillis());
if (!StringUtils.isEmpty(name)) {
datasetBuilder.name(name);
}
final DataSetMetadata updatedDataSetMetadata = datasetBuilder.build();
// Save data set content into cache to make sure there's enough space in the content store
final long maxDataSetSizeAllowed = getMaxDataSetSizeAllowed();
final StrictlyBoundedInputStream sizeCalculator = new StrictlyBoundedInputStream(dataSetContent, maxDataSetSizeAllowed);
try (OutputStream cacheEntry = cacheManager.put(cacheKey, TimeToLive.DEFAULT)) {
IOUtils.copy(sizeCalculator, cacheEntry);
}
// once fully copied to the cache, we know for sure that the content store has enough space, so let's copy
// from the cache to the content store
PipedInputStream toContentStore = new PipedInputStream();
PipedOutputStream fromCache = new PipedOutputStream(toContentStore);
Runnable r = () -> {
try (final InputStream input = cacheManager.get(cacheKey)) {
IOUtils.copy(input, fromCache);
// it's important to close this stream, otherwise the piped stream will never close
fromCache.close();
} catch (IOException e) {
throw new TDPException(UNABLE_TO_CREATE_OR_UPDATE_DATASET, e);
}
};
executor.execute(r);
contentStore.storeAsRaw(updatedDataSetMetadata, toContentStore);
// update the dataset metadata with its new size
updatedDataSetMetadata.setDataSetSize(sizeCalculator.getTotal());
dataSetMetadataRepository.save(updatedDataSetMetadata);
// publishing update event
publisher.publishEvent(new DatasetUpdatedEvent(updatedDataSetMetadata));
} catch (StrictlyBoundedInputStream.InputStreamTooLargeException e) {
LOG.warn("Dataset update {} cannot be done, new content is too big", currentDataSetMetadata.getId());
throw new TDPException(MAX_STORAGE_MAY_BE_EXCEEDED, e, build().put("limit", e.getMaxSize()));
} catch (IOException e) {
LOG.error("Error updating the dataset", e);
throw new TDPException(UNABLE_TO_CREATE_OR_UPDATE_DATASET, e);
} finally {
dataSetContentToNull(dataSetContent);
// whatever the outcome the cache needs to be cleaned
if (cacheManager.has(cacheKey)) {
cacheManager.evict(cacheKey);
}
lock.unlock();
}
// Content was changed, so queue events (format analysis, content indexing for search...)
analyzeDataSet(currentDataSetMetadata.getId(), true, emptyList());
return currentDataSetMetadata.getId();
}
}
use of org.talend.dataprep.dataset.event.DatasetUpdatedEvent in project data-prep by Talend.
the class DatasetUpdateListener method onUpdate.
@EventListener
public void onUpdate(DatasetUpdatedEvent event) {
// when we update a dataset we need to clean cache
final DataSetMetadata dataSetMetadata = event.getSource();
final ContentCacheKey sampleKey = () -> "dataset-sample_" + dataSetMetadata.getId();
LOGGER.debug("Evicting sample cache entry for #{}", dataSetMetadata.getId());
publisher.publishEvent(new CleanCacheEvent(sampleKey));
LOGGER.debug("Evicting sample cache entry for #{} done.", dataSetMetadata.getId());
LOGGER.debug("Evicting transformation cache entry for dataset #{}", dataSetMetadata.getId());
publisher.publishEvent(new CleanCacheEvent(new ContentCacheKey() {
@Override
public String getKey() {
return dataSetMetadata.getId();
}
@Override
public Predicate<String> getMatcher() {
String regex = ".*_" + getKey() + "_.*";
// Build regular expression matcher
final Pattern pattern = Pattern.compile(regex);
return str -> pattern.matcher(str).matches();
}
}, Boolean.TRUE));
LOGGER.debug("Evicting transformation cache entry for dataset #{} done.", dataSetMetadata.getId());
}
use of org.talend.dataprep.dataset.event.DatasetUpdatedEvent in project data-prep by Talend.
the class DataSetService method updateDataSet.
/**
* Updates a data set metadata. If no data set exists for given id, a {@link TDPException} is thrown.
*
* @param dataSetId The id of data set to be updated.
* @param dataSetMetadata The new content for the data set. If empty, existing content will <b>not</b> be replaced.
* For delete operation, look at {@link #delete(String)}.
*/
@RequestMapping(value = "/datasets/{id}", method = PUT)
@ApiOperation(value = "Update a data set metadata by id", notes = "Update a data set metadata according to the content of the PUT body. Id should be a UUID returned by the list operation. Not valid or non existing data set id return an error response.")
@Timed
public void updateDataSet(@PathVariable(value = "id") @ApiParam(name = "id", value = "Id of the data set to update") String dataSetId, @RequestBody DataSetMetadata dataSetMetadata) {
if (dataSetMetadata != null && dataSetMetadata.getName() != null) {
checkDataSetName(dataSetMetadata.getName());
}
final DistributedLock lock = dataSetMetadataRepository.createDatasetMetadataLock(dataSetId);
lock.lock();
try {
DataSetMetadata metadataForUpdate = dataSetMetadataRepository.get(dataSetId);
if (metadataForUpdate == null) {
// No need to silently create the data set metadata: associated content will most likely not exist.
throw new TDPException(DataSetErrorCodes.DATASET_DOES_NOT_EXIST, build().put("id", dataSetId));
}
LOG.debug("updateDataSet: {}", dataSetMetadata);
publisher.publishEvent(new DatasetUpdatedEvent(dataSetMetadata));
//
// Only part of the metadata can be updated, so the original dataset metadata is loaded and updated
//
DataSetMetadata original = metadataBuilder.metadata().copy(metadataForUpdate).build();
try {
// update the name
metadataForUpdate.setName(dataSetMetadata.getName());
// update the sheet content (in case of a multi-sheet excel file)
if (metadataForUpdate.getSchemaParserResult() != null) {
Optional<Schema.SheetContent> sheetContentFound = metadataForUpdate.getSchemaParserResult().getSheetContents().stream().filter(sheetContent -> dataSetMetadata.getSheetName().equals(sheetContent.getName())).findFirst();
if (sheetContentFound.isPresent()) {
List<ColumnMetadata> columnMetadatas = sheetContentFound.get().getColumnMetadatas();
if (metadataForUpdate.getRowMetadata() == null) {
metadataForUpdate.setRowMetadata(new RowMetadata(emptyList()));
}
metadataForUpdate.getRowMetadata().setColumns(columnMetadatas);
}
metadataForUpdate.setSheetName(dataSetMetadata.getSheetName());
metadataForUpdate.setSchemaParserResult(null);
}
// Location updates
metadataForUpdate.setLocation(dataSetMetadata.getLocation());
// update parameters & encoding (so that user can change import parameters for CSV)
metadataForUpdate.getContent().setParameters(dataSetMetadata.getContent().getParameters());
metadataForUpdate.setEncoding(dataSetMetadata.getEncoding());
// update limit
final Optional<Long> newLimit = dataSetMetadata.getContent().getLimit();
newLimit.ifPresent(limit -> metadataForUpdate.getContent().setLimit(limit));
// Validate that the new data set metadata and removes the draft status
final String formatFamilyId = dataSetMetadata.getContent().getFormatFamilyId();
if (formatFamilyFactory.hasFormatFamily(formatFamilyId)) {
FormatFamily format = formatFamilyFactory.getFormatFamily(formatFamilyId);
try {
DraftValidator draftValidator = format.getDraftValidator();
DraftValidator.Result result = draftValidator.validate(dataSetMetadata);
if (result.isDraft()) {
// This is not an exception case: data set may remain a draft after update (although rather
// unusual)
LOG.warn("Data set #{} is still a draft after update.", dataSetId);
return;
}
// Data set metadata to update is no longer a draft
metadataForUpdate.setDraft(false);
} catch (UnsupportedOperationException e) {
// no need to validate draft here
}
}
// update schema
formatAnalyzer.update(original, metadataForUpdate);
// save the result
dataSetMetadataRepository.save(metadataForUpdate);
// all good mate!! so send that to jms
// Asks for a in depth schema analysis (for column type information).
analyzeDataSet(dataSetId, true, singletonList(FormatAnalysis.class));
} catch (TDPException e) {
throw e;
} catch (Exception e) {
throw new TDPException(UNABLE_TO_CREATE_OR_UPDATE_DATASET, e);
}
} finally {
lock.unlock();
}
}
Aggregations