use of io.swagger.annotations.ApiParam in project vorto by eclipse.
the class NamespaceController method requestAccessToNamespace.
@PostMapping("/requestAccess")
@PreAuthorize("isAuthenticated()")
public ResponseEntity<OperationResult> requestAccessToNamespace(@RequestBody @ApiParam(value = "The request body specifying who initiates the request, the namespace, whom the request is intended for, and an optional collection of suggested roles", required = true) NamespaceAccessRequestDTO request) {
Optional<OperationResult> validationError = NamespaceValidator.validateAccessRequest(request);
if (validationError.isPresent()) {
return new ResponseEntity<>(validationError.get(), HttpStatus.BAD_REQUEST);
}
// checks namespace exists
// should only occur if namespace was deleted after user search, but before sending request
Namespace target;
try {
target = namespaceService.getByName(request.getNamespaceName());
} catch (DoesNotExistException dnee) {
return new ResponseEntity<>(OperationResult.failure("Namespace not found."), HttpStatus.NOT_FOUND);
}
// checks any admin with an e-mail address set
Set<User> adminsWithEmail = userNamespaceRoleRepository.findAllByNamespace(target).stream().map(UserNamespaceRoles::getUser).filter(u -> !Strings.nullToEmpty(u.getEmailAddress()).trim().isEmpty()).collect(Collectors.toSet());
if (adminsWithEmail.isEmpty()) {
return new ResponseEntity<>(OperationResult.failure(String.format("None of the users administrating namespace %s has set their own e-mail. Please contact them directly. ", request.getNamespaceName())), HttpStatus.PRECONDITION_FAILED);
}
int successCount = adminsWithEmail.size();
// attempts to send the e-mails
// ugly exception handling here, due to the way this was designed in the service
Collection<IMessage> messages = adminsWithEmail.stream().map(u -> new RequestAccessToNamespaceMessage(request, u, host)).collect(Collectors.toList());
for (IMessage message : messages) {
try {
emailNotificationService.sendNotification(message);
} catch (NotificationProblem np) {
successCount--;
}
}
// worked for all recipients
if (successCount == adminsWithEmail.size()) {
return new ResponseEntity<>(OperationResult.success(), HttpStatus.OK);
} else // worked for some recipients
if (successCount > 0) {
return new ResponseEntity<>(OperationResult.success("The message could not be sent to all administrators."), HttpStatus.OK);
} else // did not work for any recipient
{
return new ResponseEntity<>(OperationResult.failure("The message could not be sent to any administrator."), HttpStatus.SERVICE_UNAVAILABLE);
}
}
use of io.swagger.annotations.ApiParam in project data-prep by Talend.
the class DataSetAPI method list.
@RequestMapping(value = "/api/datasets", method = GET, produces = APPLICATION_JSON_VALUE)
@ApiOperation(value = "List data sets.", produces = APPLICATION_JSON_VALUE, notes = "Returns a list of data sets the user can use.")
@Timed
public Stream<DatasetDTO> list(@ApiParam(value = "Sort key (by name or date), defaults to 'date'.") @RequestParam(defaultValue = "creationDate") Sort sort, @ApiParam(value = "Order for sort key (desc or asc), defaults to 'desc'.") @RequestParam(defaultValue = "desc") Order order, @ApiParam(value = "Filter on name containing the specified name") @RequestParam(defaultValue = "") String name, @ApiParam(value = "Filter on certified data sets") @RequestParam(defaultValue = "false") boolean certified, @ApiParam(value = "Filter on favorite data sets") @RequestParam(defaultValue = "false") boolean favorite, @ApiParam(value = "Filter on recent data sets") @RequestParam(defaultValue = "false") boolean limit) {
try {
CertificationState certification = certified ? CERTIFIED : null;
Boolean filterOnFavorite = favorite ? Boolean.TRUE : null;
Stream<DatasetDTO> datasetStream = datasetClient.listDataSetMetadata(certification, filterOnFavorite);
if (isNotBlank(name)) {
datasetStream = datasetStream.filter(ds -> containsIgnoreCase(ds.getName(), name));
}
if (certified) {
datasetStream = datasetStream.filter(dataset -> dataset.getCertification() == CERTIFIED);
}
if (limit) {
datasetStream = datasetStream.limit(datasetListLimit);
}
return //
datasetStream.sorted(SortAndOrderHelper.getDatasetDTOComparator(sort, order));
} finally {
LOG.info("listing datasets done [favorite: {}, certified: {}, name: {}, limit: {}]", favorite, certified, name, limit);
}
}
use of io.swagger.annotations.ApiParam in project data-prep by Talend.
the class DataSetService method get.
/**
* Returns the <b>full</b> data set content for given id.
*
* @param metadata If <code>true</code>, includes data set metadata information.
* @param dataSetId A data set id.
* @return The full data set.
*/
@RequestMapping(value = "/datasets/{id}/content", method = RequestMethod.GET)
@ApiOperation(value = "Get a data set by id", notes = "Get a data set content based on provided id. Id should be a UUID returned by the list operation. Not valid or non existing data set id returns empty content.")
@Timed
@ResponseBody
public Callable<DataSet> get(@RequestParam(defaultValue = "true") @ApiParam(name = "metadata", value = "Include metadata information in the response") boolean metadata, @RequestParam(defaultValue = "false") @ApiParam(name = "includeInternalContent", value = "Include internal content in the response") boolean includeInternalContent, //
@RequestParam(defaultValue = "-1") @ApiParam(name = STORAGE_LIMIT, value = STORAGE_LIMIT) long limit, @ApiParam(value = "Filter for retrieved content.") @RequestParam(value = "filter", defaultValue = "") String filter, @PathVariable(value = "id") @ApiParam(name = "id", value = "Id of the requested data set") String dataSetId) {
return () -> {
final Marker marker = Markers.dataset(dataSetId);
LOG.debug(marker, "Get data set #{}", dataSetId);
Stream<DataSetRow> stream = null;
try {
DataSetMetadata dataSetMetadata = dataSetMetadataRepository.get(dataSetId);
assertDataSetMetadata(dataSetMetadata, dataSetId);
// Build the result
DataSet dataSet = new DataSet();
if (metadata) {
dataSet.setMetadata(conversionService.convert(dataSetMetadata, UserDataSetMetadata.class));
}
// Disable line limit
stream = contentStore.stream(dataSetMetadata, limit);
// on-demand analyzer for dataset (See TDP-4404, migration problems)
if (dataSetMetadata.getRowMetadata().getColumns().stream().anyMatch(c -> c.getStatistics().getWordPatternFrequencyTable().isEmpty())) {
stream = insertWordPatternAnalysis(dataSetMetadata, stream);
}
if (!includeInternalContent) {
LOG.debug("Skip internal content when serving data set #{} content.", dataSetId);
stream = stream.map(r -> {
final Map<String, Object> values = r.values();
final Map<String, Object> filteredValues = new HashMap<>(values);
// Remove technical properties from returned values.
values.forEach((k, v) -> {
if (k != null && k.startsWith(FlagNames.INTERNAL_PROPERTY_PREFIX)) {
filteredValues.remove(k);
}
});
// Include TDP_ID anyway
filteredValues.put(FlagNames.TDP_ID, r.getTdpId());
return new DataSetRow(r.getRowMetadata(), filteredValues);
});
}
// Filter content
stream = stream.filter(filterService.build(filter, dataSetMetadata.getRowMetadata()));
dataSet.setRecords(stream);
return dataSet;
} catch (Exception e) {
if (stream != null) {
stream.close();
}
throw e;
} finally {
LOG.debug(marker, "Get done.");
}
};
}
use of io.swagger.annotations.ApiParam in project data-prep by Talend.
the class DataSetService method getDataSetColumnSemanticCategories.
/**
* Return the semantic types for a given dataset / column.
*
* @param datasetId the datasetId id.
* @param columnId the column id.
* @return the semantic types for a given dataset / column.
*/
@RequestMapping(value = "/datasets/{datasetId}/columns/{columnId}/types", method = GET)
@ApiOperation(value = "list the types of the wanted column", notes = "This list can be used by user to change the column type.")
@Timed
@PublicAPI
public List<SemanticDomain> getDataSetColumnSemanticCategories(@ApiParam(value = "The dataset id") @PathVariable String datasetId, @ApiParam(value = "The column id") @PathVariable String columnId) {
LOG.debug("listing semantic categories for dataset #{} column #{}", datasetId, columnId);
final DataSetMetadata metadata = dataSetMetadataRepository.get(datasetId);
if (metadata == null) {
throw new TDPException(DataSetErrorCodes.DATASET_DOES_NOT_EXIST, ExceptionContext.withBuilder().put("id", datasetId).build());
} else {
try (final Stream<DataSetRow> records = contentStore.stream(metadata)) {
final ColumnMetadata columnMetadata = metadata.getRowMetadata().getById(columnId);
final Analyzer<Analyzers.Result> analyzer = analyzerService.build(columnMetadata, SEMANTIC);
analyzer.init();
records.map(r -> r.get(columnId)).forEach(analyzer::analyze);
analyzer.end();
final List<Analyzers.Result> analyzerResult = analyzer.getResult();
final StatisticsAdapter statisticsAdapter = new StatisticsAdapter(40);
statisticsAdapter.adapt(singletonList(columnMetadata), analyzerResult);
LOG.debug("found {} for dataset #{}, column #{}", columnMetadata.getSemanticDomains(), datasetId, columnId);
return columnMetadata.getSemanticDomains();
}
}
}
use of io.swagger.annotations.ApiParam in project data-prep by Talend.
the class DataSetService method updateDataSet.
/**
* Updates a data set metadata. If no data set exists for given id, a {@link TDPException} is thrown.
*
* @param dataSetId The id of data set to be updated.
* @param dataSetMetadata The new content for the data set. If empty, existing content will <b>not</b> be replaced.
* For delete operation, look at {@link #delete(String)}.
*/
@RequestMapping(value = "/datasets/{id}", method = PUT)
@ApiOperation(value = "Update a data set metadata by id", notes = "Update a data set metadata according to the content of the PUT body. Id should be a UUID returned by the list operation. Not valid or non existing data set id return an error response.")
@Timed
public void updateDataSet(@PathVariable(value = "id") @ApiParam(name = "id", value = "Id of the data set to update") String dataSetId, @RequestBody DataSetMetadata dataSetMetadata) {
if (dataSetMetadata != null && dataSetMetadata.getName() != null) {
checkDataSetName(dataSetMetadata.getName());
}
final DistributedLock lock = dataSetMetadataRepository.createDatasetMetadataLock(dataSetId);
lock.lock();
try {
DataSetMetadata metadataForUpdate = dataSetMetadataRepository.get(dataSetId);
if (metadataForUpdate == null) {
// No need to silently create the data set metadata: associated content will most likely not exist.
throw new TDPException(DataSetErrorCodes.DATASET_DOES_NOT_EXIST, build().put("id", dataSetId));
}
LOG.debug("updateDataSet: {}", dataSetMetadata);
//
// Only part of the metadata can be updated, so the original dataset metadata is loaded and updated
//
DataSetMetadata original = metadataBuilder.metadata().copy(metadataForUpdate).build();
try {
// update the name
metadataForUpdate.setName(dataSetMetadata.getName());
// update the sheet content (in case of a multi-sheet excel file)
if (metadataForUpdate.getSchemaParserResult() != null) {
Optional<Schema.SheetContent> sheetContentFound = metadataForUpdate.getSchemaParserResult().getSheetContents().stream().filter(sheetContent -> dataSetMetadata.getSheetName().equals(//
sheetContent.getName())).findFirst();
if (sheetContentFound.isPresent()) {
List<ColumnMetadata> columnMetadatas = sheetContentFound.get().getColumnMetadatas();
if (metadataForUpdate.getRowMetadata() == null) {
metadataForUpdate.setRowMetadata(new RowMetadata(emptyList()));
}
metadataForUpdate.getRowMetadata().setColumns(columnMetadatas);
}
metadataForUpdate.setSheetName(dataSetMetadata.getSheetName());
metadataForUpdate.setSchemaParserResult(null);
}
// Location updates
if (dataSetMetadata.getLocation() != null) {
metadataForUpdate.setLocation(dataSetMetadata.getLocation());
}
// update parameters & encoding (so that user can change import parameters for CSV)
metadataForUpdate.getContent().setParameters(dataSetMetadata.getContent().getParameters());
metadataForUpdate.setEncoding(dataSetMetadata.getEncoding());
// update limit
final Optional<Long> newLimit = dataSetMetadata.getContent().getLimit();
newLimit.ifPresent(limit -> metadataForUpdate.getContent().setLimit(limit));
// Validate that the new data set metadata and removes the draft status
final String formatFamilyId = dataSetMetadata.getContent().getFormatFamilyId();
if (formatFamilyFactory.hasFormatFamily(formatFamilyId)) {
FormatFamily format = formatFamilyFactory.getFormatFamily(formatFamilyId);
try {
DraftValidator draftValidator = format.getDraftValidator();
DraftValidator.Result result = draftValidator.validate(dataSetMetadata);
if (result.isDraft()) {
// This is not an exception case: data set may remain a draft after update (although rather
// unusual)
LOG.warn("Data set #{} is still a draft after update.", dataSetId);
return;
}
// Data set metadata to update is no longer a draft
metadataForUpdate.setDraft(false);
} catch (UnsupportedOperationException e) {
// no need to validate draft here
}
}
// update schema
formatAnalyzer.update(original, metadataForUpdate);
// save the result
metadataForUpdate.getLifecycle().setInProgress(true);
metadataForUpdate.getContent().setNbRecords(0);
dataSetMetadataRepository.save(metadataForUpdate);
// Asks for a in depth schema analysis (for column type information).
analyzeDataSet(dataSetId, singletonList(FormatAnalysis.class));
} catch (TDPException e) {
throw e;
} catch (Exception e) {
throw new TDPException(UNABLE_TO_CREATE_OR_UPDATE_DATASET, e);
}
} finally {
lock.unlock();
}
publisher.publishEvent(new DatasetUpdatedEvent(dataSetMetadata));
}
Aggregations