use of org.talend.dataprep.metrics.Timed in project data-prep by Talend.
the class DataSetService method setFavorites.
/**
* update the current user data dataset favorites list by adding or removing the dataSetId according to the unset
* flag. The user data for the current will be created if it does not exist. If no data set exists for given id, a
* {@link TDPException} is thrown.
*
* @param unset, if true this will remove the dataSetId from the list of favorites, if false then it adds the
* dataSetId to the favorite list
* @param dataSetId, the id of the favorites data set. If the data set does not exists nothing is done.
*/
@RequestMapping(value = "/datasets/{id}/favorite", method = PUT)
@ApiOperation(value = "set or unset a dataset as favorite", notes = "Specify if a dataset is or is not a favorite for the current user.")
@Timed
public void setFavorites(@RequestParam(defaultValue = "false") @ApiParam(name = "unset", value = "if true then unset the dataset as favorite, if false (default value) set the favorite flag") boolean unset, @PathVariable(value = "id") @ApiParam(name = "id", value = "Id of the favorite data set, do nothing is the id does not exist.") String dataSetId) {
String userId = security.getUserId();
// check that dataset exists
DataSetMetadata dataSetMetadata = dataSetMetadataRepository.get(dataSetId);
if (dataSetMetadata != null) {
// $NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
LOG.debug("{} favorite dataset for #{} for user {}", unset ? "Unset" : "Set", dataSetId, userId);
UserData userData = userDataRepository.get(userId);
if (unset) {
// unset the favorites
if (userData != null) {
userData.getFavoritesDatasets().remove(dataSetId);
userDataRepository.save(userData);
}
// no user data for this user so nothing to unset
} else {
// set the favorites
if (userData == null) {
// let's create a new UserData
userData = new UserData(userId, versionService.version().getVersionId());
}
// else already created so just update it.
userData.addFavoriteDataset(dataSetId);
userDataRepository.save(userData);
}
} else {
// no dataset found so throws an error
throw new TDPException(DataSetErrorCodes.DATASET_DOES_NOT_EXIST, build().put("id", dataSetId));
}
}
use of org.talend.dataprep.metrics.Timed in project data-prep by Talend.
the class FolderService method getFolderAndHierarchyById.
/**
* Get a folder metadata with its hierarchy
*
* @param id the folder id.
* @return the folder metadata with its hierarchy.
*/
@RequestMapping(value = "/folders/{id}", method = GET)
@ApiOperation(value = "Get folder by id", notes = "GET a folder by id")
@Timed
public FolderInfo getFolderAndHierarchyById(@PathVariable(value = "id") final String id) {
final Folder folder = folderRepository.getFolderById(id);
if (folder == null) {
throw new TDPException(FOLDER_NOT_FOUND, ExceptionContext.build().put("path", id));
}
final List<Folder> hierarchy = folderRepository.getHierarchy(folder);
return new FolderInfo(folder, hierarchy);
}
use of org.talend.dataprep.metrics.Timed in project data-prep by Talend.
the class FolderService method search.
/**
* Search for folders.
*
* @param name the folder name to search.
* @param strict strict mode means the name is the full name.
* @return the folders whose part of their name match the given path.
*/
@RequestMapping(value = "/folders/search", method = GET)
@ApiOperation(value = "Search Folders with parameter as part of the name")
@Timed
public Stream<Folder> search(@RequestParam(required = false, defaultValue = "") final String name, @RequestParam(required = false, defaultValue = "false") final Boolean strict, @RequestParam(required = false) final String path) {
Stream<Folder> folders;
if (path == null) {
folders = folderRepository.searchFolders(name, strict);
} else {
folders = folderRepository.searchFolders(name, strict).filter(f -> f.getPath().equals(path));
}
AtomicInteger foldersFound = new AtomicInteger(0);
folders = folders.peek(folder -> {
folder.setNbPreparations(folderRepository.count(folder.getId(), PREPARATION));
foldersFound.incrementAndGet();
});
LOGGER.info("Found {} folder(s) searching for {}", foldersFound, name);
return folders;
}
use of org.talend.dataprep.metrics.Timed in project data-prep by Talend.
the class TransformationService method getPreparationExportTypesForPreparation.
/**
* Get the available export formats for preparation
*/
@RequestMapping(value = "/export/formats/preparations/{preparationId}", method = GET)
@ApiOperation(value = "Get the available format types for the preparation")
@Timed
public Stream<ExportFormatMessage> getPreparationExportTypesForPreparation(@PathVariable String preparationId) {
final Preparation preparation = getPreparation(preparationId);
final DataSetMetadata metadata = context.getBean(DataSetGetMetadata.class, preparation.getDataSetId()).execute();
return getPreparationExportTypesForDataSet(metadata.getId());
}
use of org.talend.dataprep.metrics.Timed in project data-prep by Talend.
the class TransformationService method getPreparationColumnSemanticCategories.
/**
* Return the semantic types for a given preparation / column.
*
* @param preparationId the preparation id.
* @param columnId the column id.
* @param stepId the step id (optional, if not specified, it's 'head')
* @return the semantic types for a given preparation / column.
*/
@RequestMapping(value = "/preparations/{preparationId}/columns/{columnId}/types", method = GET)
@ApiOperation(value = "list the types of the wanted column", notes = "This list can be used by user to change the column type.")
@Timed
@PublicAPI
public List<SemanticDomain> getPreparationColumnSemanticCategories(@ApiParam(value = "The preparation id") @PathVariable String preparationId, @ApiParam(value = "The column id") @PathVariable String columnId, @ApiParam(value = "The preparation version") @RequestParam(defaultValue = "head") String stepId) {
LOG.debug("listing preparation semantic categories for preparation #{} column #{}@{}", preparationId, columnId, stepId);
// get the preparation
final Preparation preparation = getPreparation(preparationId);
// get the step (in case of 'head', the real step id must be found)
final String version = //
StringUtils.equals("head", stepId) ? preparation.getSteps().get(preparation.getSteps().size() - 1).getId() : stepId;
/*
* OK, this one is a bit tricky so pay attention.
*
* To be able to get the semantic types, the analyzer service needs to run on the result of the preparation.
*
* The result must be found in the cache, so if the preparation is not cached, the preparation is run so that
* it gets cached.
*
* Then, the analyzer service just gets the data from the cache. That's it.
*/
// generate the cache keys for both metadata & content
final ContentCacheKey metadataKey = cacheKeyGenerator.metadataBuilder().preparationId(preparationId).stepId(version).sourceType(HEAD).build();
final ContentCacheKey contentKey = cacheKeyGenerator.contentBuilder().datasetId(preparation.getDataSetId()).preparationId(preparationId).stepId(//
version).format(JSON).sourceType(//
HEAD).build();
// if the preparation is not cached, let's compute it to have some cache
if (!contentCache.has(metadataKey) || !contentCache.has(contentKey)) {
addPreparationInCache(preparation, stepId);
}
// run the analyzer service on the cached content
try (final InputStream metadataCache = contentCache.get(metadataKey);
final InputStream contentCache = this.contentCache.get(contentKey)) {
final DataSetMetadata metadata = mapper.readerFor(DataSetMetadata.class).readValue(metadataCache);
final List<SemanticDomain> semanticDomains = getSemanticDomains(metadata, columnId, contentCache);
LOG.debug("found {} for preparation #{}, column #{}", semanticDomains, preparationId, columnId);
return semanticDomains;
} catch (IOException e) {
throw new TDPException(UNEXPECTED_EXCEPTION, e);
}
}
Aggregations