use of org.talend.dataprep.api.preparation.Preparation in project data-prep by Talend.
the class SortAndOrderHelper method getPreparationComparator.
public static Comparator<Preparation> getPreparationComparator(Sort sortKey, Order orderKey, Function<? super Preparation, ? extends DataSetMetadata> dataSetFinder) {
Comparator<Comparable> comparisonOrder = getOrderComparator(orderKey);
// Select comparator for sort (either by name or date)
Function<Preparation, Comparable> keyExtractor;
if (sortKey == null) {
// default to NAME sort
keyExtractor = preparation -> preparation.getName().toUpperCase();
} else {
switch(sortKey) {
// In case of API call error, default to NAME sort
case NB_RECORDS:
case NAME:
keyExtractor = preparation -> Optional.ofNullable(preparation).map(p -> p.getName().toUpperCase()).orElse(StringUtils.EMPTY);
break;
case AUTHOR:
keyExtractor = preparation -> {
// in order to just call a method to retrieve the author name
if (preparation instanceof UserPreparation) {
Owner owner = ((UserPreparation) preparation).getOwner();
return (owner != null) ? StringUtils.upperCase(owner.getDisplayName()) : StringUtils.EMPTY;
}
return preparation.getAuthor();
};
break;
case CREATION_DATE:
case DATE:
keyExtractor = Preparation::getCreationDate;
break;
case LAST_MODIFICATION_DATE:
keyExtractor = Preparation::getLastModificationDate;
break;
case NB_STEPS:
keyExtractor = preparation -> preparation.getSteps().size();
break;
case DATASET_NAME:
if (dataSetFinder != null) {
keyExtractor = p -> getUpperCaseNameFromNullable(dataSetFinder.apply(p));
} else {
LOGGER.debug("There is no dataset finding function to sort preparations on dataset name. Default to natural name order.");
// default to sort on name
keyExtractor = preparation -> preparation.getName().toUpperCase();
}
break;
default:
// this should not be possible
throw new TDPException(ILLEGAL_SORT_FOR_LIST, build().put("sort", sortKey));
}
}
return Comparator.comparing(keyExtractor, comparisonOrder);
}
use of org.talend.dataprep.api.preparation.Preparation in project data-prep by Talend.
the class ToPEPersistentIdentifiable method run.
@Override
public void run() {
LOGGER.debug("starting upgrade from {} to {}.", Step.class, PersistentStep.class);
final AtomicLong counter = new AtomicLong(0L);
fileSystemPreparationRepository.list(Step.class).forEach(s -> {
fileSystemPreparationRepository.remove(s);
PersistentStep persistentStep = turnToPersistentStep(s);
preparationRepository.add(persistentStep);
LOGGER.debug("step {} updated to {}", s, persistentStep);
counter.incrementAndGet();
});
LOGGER.info("Upgrade from {} to {} done, {} steps processed.", Step.class, PersistentStep.class, counter.get());
LOGGER.debug("starting upgrade from {} to {}.", Preparation.class, PersistentPreparation.class);
final Stream<Preparation> preparations = fileSystemPreparationRepository.list(Preparation.class);
preparations.forEach(p -> {
fileSystemPreparationRepository.remove(p);
PersistentPreparation persistentPreparation = turnToPersistentPreparation(p);
preparationRepository.add(persistentPreparation);
});
LOGGER.info("Upgrade from {} to {} done.", Preparation.class, PersistentPreparation.class);
LOGGER.info("Migration of step ids in preparation...");
final Stream<PersistentPreparation> persistentPreparations = preparationRepository.list(PersistentPreparation.class);
persistentPreparations.forEach(p -> {
LOGGER.info("Migration of preparation #{}", p.getId());
final List<String> stepsIds = preparationUtils.listStepsIds(p.getHeadId(), preparationRepository);
p.setSteps(stepsIds);
final DataSetMetadata metadata = dataSetMetadataRepository.get(p.getDataSetId());
if (metadata != null) {
LOGGER.info("Set metadata {} in preparation {}.", p.getDataSetId(), p.getId());
p.setRowMetadata(metadata.getRowMetadata());
} else {
LOGGER.info("Metadata {} not found for preparation {}.", p.getDataSetId(), p.getId());
p.setRowMetadata(new RowMetadata());
}
preparationRepository.add(p);
LOGGER.info("Migration of preparation #{} done ({} steps)", p.getId(), stepsIds.size());
});
LOGGER.info("Migration of step ids in preparation done.");
}
use of org.talend.dataprep.api.preparation.Preparation in project data-prep by Talend.
the class CachedExportStrategyTest method setUp.
@Before
public void setUp() {
super.setUp();
final Preparation preparation = new Preparation("1234", "1.0");
preparation.setDataSetId("1234");
preparation.setHeadId("0");
preparationRepository.add(preparation);
final TransformationCacheKey cacheKey = cacheKeyGenerator.generateContentKey("1234", "1234", "0", "text", HEAD, "");
try (OutputStream text = cache.put(cacheKey, ContentCache.TimeToLive.DEFAULT)) {
text.write("{}".getBytes());
} catch (IOException e) {
e.printStackTrace();
}
}
use of org.talend.dataprep.api.preparation.Preparation in project data-prep by Talend.
the class OptimizedExportStrategyTest method testAcceptKO_withMetadataCacheNoContentCache.
@Test
public void testAcceptKO_withMetadataCacheNoContentCache() throws Exception {
// Given
final String preparation = createEmptyPreparationFromDataset("1234", "test");
applyAction(preparation, "[{}]");
applyAction(preparation, "[{}]");
final Preparation preparationDetails = getPreparation(preparation);
for (Step step : preparationDetails.getSteps()) {
try (OutputStream content = contentCache.put(cacheKeyGenerator.generateMetadataKey(preparation, step.id(), HEAD), ContentCache.TimeToLive.DEFAULT)) {
content.write("{}".getBytes());
content.flush();
}
}
ExportParameters exportParameters = new ExportParameters();
exportParameters.setPreparationId(preparation);
exportParameters.setFrom(HEAD);
// Then
assertFalse(optimizedExportStrategy.accept(exportParameters));
}
use of org.talend.dataprep.api.preparation.Preparation in project data-prep by Talend.
the class OptimizedExportStrategyTest method testExecute.
@Test
public void testExecute() throws Exception {
// Given
final String datasetId = "1234";
final String format = "JSON";
final String preparation = createEmptyPreparationFromDataset(datasetId, "test");
applyAction(preparation, "[{}]");
applyAction(preparation, "[{}]");
final Preparation preparationDetails = getPreparation(preparation);
for (Step step : preparationDetails.getSteps()) {
try (OutputStream content = contentCache.put(cacheKeyGenerator.generateMetadataKey(preparation, step.id(), HEAD), ContentCache.TimeToLive.DEFAULT)) {
content.write("{}".getBytes());
content.flush();
}
final TransformationCacheKey key = //
cacheKeyGenerator.generateContentKey(//
datasetId, //
preparation, //
step.id(), //
format, //
HEAD, // no filter
"");
try (OutputStream content = contentCache.put(key, ContentCache.TimeToLive.DEFAULT)) {
content.write("{\"records\": [{\"0000\": \"a\"}]}".getBytes());
content.flush();
}
}
ExportParameters exportParameters = new ExportParameters();
exportParameters.setPreparationId(preparation);
exportParameters.setDatasetId(datasetId);
exportParameters.setExportType(format);
exportParameters.setFrom(HEAD);
// Then
optimizedExportStrategy.execute(exportParameters);
}
Aggregations