use of org.springframework.beans.factory.annotation.Value in project data-prep by Talend.
the class DataSetAPI method getPreparation.
/**
* Return the list of preparation using a dataset
*
* @param id the wanted dataset.
* @return the list of preparation using the dataset
*/
@RequestMapping(value = "/api/datasets/{id}/preparations", method = GET, produces = APPLICATION_JSON_VALUE)
@ApiOperation(value = "Get the list of preparation using a dataset by the dataset id.", produces = APPLICATION_JSON_VALUE, notes = "Get the list of preparation using a dataset by the dataset id.")
@Timed
public List<DatasetDetailsDTO.Preparation> getPreparation(@ApiParam(value = "Id of the data set to get") @PathVariable(value = "id") String id) {
if (LOG.isDebugEnabled()) {
LOG.debug("Requesting preparations using dataset #{} (pool: {})...", id, getConnectionStats());
}
try {
DatasetDetailsDTO details = datasetClient.getDataSetDetails(id);
// Add the related preparations list to the given dataset metadata.
final PreparationSearchByDataSetId getPreparations = getCommand(PreparationSearchByDataSetId.class, details.getId());
List<DatasetDetailsDTO.Preparation> preps = new ArrayList<>();
//
toStream(PreparationDTO.class, mapper, getPreparations).filter(p -> p.getSteps() != null).forEach(p -> preps.add(new DatasetDetailsDTO.Preparation(p.getId(), p.getName(), (long) p.getSteps().size(), p.getLastModificationDate())));
return preps;
} finally {
if (LOG.isDebugEnabled()) {
LOG.debug("Request preparations using dataset #{} (pool: {}) done.", id, getConnectionStats());
}
}
}
use of org.springframework.beans.factory.annotation.Value in project data-prep by Talend.
the class XlsSchemaParser method guessColumnType.
/**
* @param colId the column id.
* @param columnRows all rows with previously guessed type: key=row number, value= guessed type
* @param averageHeaderSize
* @return
*/
private Type guessColumnType(Integer colId, SortedMap<Integer, String> columnRows, int averageHeaderSize) {
// calculate number per type
Map<String, Long> perTypeNumber = columnRows.tailMap(averageHeaderSize).values().stream().collect(Collectors.groupingBy(w -> w, Collectors.counting()));
OptionalLong maxOccurrence = perTypeNumber.values().stream().mapToLong(Long::longValue).max();
if (!maxOccurrence.isPresent()) {
return ANY;
}
List<String> duplicatedMax = new ArrayList<>();
perTypeNumber.forEach((type1, aLong) -> {
if (aLong >= maxOccurrence.getAsLong()) {
duplicatedMax.add(type1);
}
});
String guessedType;
if (duplicatedMax.size() == 1) {
guessedType = duplicatedMax.get(0);
} else {
// as we have more than one type we guess ANY
guessedType = ANY.getName();
}
LOGGER.debug("guessed type for column #{} is {}", colId, guessedType);
return Type.get(guessedType);
}
use of org.springframework.beans.factory.annotation.Value in project data-prep by Talend.
the class DataSetService method preview.
/**
* Returns preview of the the data set content for given id (first 100 rows). Service might return
* {@link org.apache.http.HttpStatus#SC_ACCEPTED} if the data set exists but analysis is not yet fully
* completed so content is not yet ready to be served.
*
* @param metadata If <code>true</code>, includes data set metadata information.
* @param sheetName the sheet name to preview
* @param dataSetId A data set id.
*/
@RequestMapping(value = "/datasets/{id}/preview", method = RequestMethod.GET)
@ApiOperation(value = "Get a data preview set by id", notes = "Get a data set preview content based on provided id. Not valid or non existing data set id returns empty content. Data set not in drat status will return a redirect 301")
@Timed
@ResponseBody
public DataSet preview(@RequestParam(defaultValue = "true") @ApiParam(name = "metadata", value = "Include metadata information in the response") boolean metadata, @RequestParam(defaultValue = "") @ApiParam(name = "sheetName", value = "Sheet name to preview") String sheetName, @PathVariable(value = "id") @ApiParam(name = "id", value = "Id of the requested data set") String dataSetId) {
DataSetMetadata dataSetMetadata = dataSetMetadataRepository.get(dataSetId);
if (dataSetMetadata == null) {
HttpResponseContext.status(HttpStatus.NO_CONTENT);
// No data set, returns empty content.
return DataSet.empty();
}
if (!dataSetMetadata.isDraft()) {
// Moved to get data set content operation
HttpResponseContext.status(HttpStatus.MOVED_PERMANENTLY);
HttpResponseContext.header("Location", "/datasets/" + dataSetId + "/content");
// dataset not anymore a draft so preview doesn't make sense.
return DataSet.empty();
}
if (StringUtils.isNotEmpty(sheetName)) {
dataSetMetadata.setSheetName(sheetName);
}
// take care of previous data without schema parser result
if (dataSetMetadata.getSchemaParserResult() != null) {
// sheet not yet set correctly so use the first one
if (StringUtils.isEmpty(dataSetMetadata.getSheetName())) {
String theSheetName = dataSetMetadata.getSchemaParserResult().getSheetContents().get(0).getName();
LOG.debug("preview for dataSetMetadata: {} with sheetName: {}", dataSetId, theSheetName);
dataSetMetadata.setSheetName(theSheetName);
}
String theSheetName = dataSetMetadata.getSheetName();
Optional<Schema.SheetContent> sheetContentFound = dataSetMetadata.getSchemaParserResult().getSheetContents().stream().filter(//
sheetContent -> theSheetName.equals(sheetContent.getName())).findFirst();
if (!sheetContentFound.isPresent()) {
HttpResponseContext.status(HttpStatus.NO_CONTENT);
// No sheet found, returns empty content.
return DataSet.empty();
}
List<ColumnMetadata> columnMetadatas = sheetContentFound.get().getColumnMetadatas();
if (dataSetMetadata.getRowMetadata() == null) {
dataSetMetadata.setRowMetadata(new RowMetadata(emptyList()));
}
dataSetMetadata.getRowMetadata().setColumns(columnMetadatas);
} else {
LOG.warn("dataset#{} has draft status but any SchemaParserResult", dataSetId);
}
// Build the result
DataSet dataSet = new DataSet();
if (metadata) {
dataSet.setMetadata(conversionService.convert(dataSetMetadata, UserDataSetMetadata.class));
}
dataSet.setRecords(contentStore.stream(dataSetMetadata).limit(100));
return dataSet;
}
use of org.springframework.beans.factory.annotation.Value in project topcom-cloud by 545314690.
the class DataXUtil method setDataxLogPath.
@Value("${dataxLogPath:/home/topcom/dataxlog/ }")
public void setDataxLogPath(String dataxLogPath) {
DataXUtil.dataxLogPath = dataxLogPath;
File parentLogPath = new File(DataXUtil.dataxLogPath);
if (!parentLogPath.exists()) {
parentLogPath.mkdirs();
}
}
use of org.springframework.beans.factory.annotation.Value in project ArachneCentralAPI by OHDSI.
the class BaseAnalysisController method doAddCommonEntityToAnalysis.
protected void doAddCommonEntityToAnalysis(T analysis, DataReference dataReference, IUser user, CommonAnalysisType analysisType, List<MultipartFile> files) throws IOException {
files.stream().filter(f -> !CommonAnalysisType.COHORT.equals(analysisType) || !f.getName().endsWith(CommonFileUtils.OHDSI_JSON_EXT)).forEach(f -> {
try {
analysisService.saveFile(f, user, analysis, f.getName(), detectExecutable(analysisType, f), dataReference);
} catch (IOException e) {
LOGGER.error("Failed to save file", e);
}
});
if (analysisType.equals(CommonAnalysisType.COHORT)) {
final ByteArrayOutputStream out = new ByteArrayOutputStream();
class StringContainer {
String value = CommonAnalysisType.COHORT.getTitle();
}
final StringContainer generatedFileName = new StringContainer();
try (final ZipOutputStream zos = new ZipOutputStream(out)) {
files.forEach(file -> {
try {
if (file.getName().endsWith(CommonFileUtils.OHDSI_SQL_EXT)) {
String statement = org.apache.commons.io.IOUtils.toString(file.getInputStream(), "UTF-8");
String renderedSql = SqlRender.renderSql(statement, null, null);
DBMSType[] dbTypes = new DBMSType[] { DBMSType.POSTGRESQL, DBMSType.ORACLE, DBMSType.MS_SQL_SERVER, DBMSType.REDSHIFT, DBMSType.PDW };
String baseName = FilenameUtils.getBaseName(file.getOriginalFilename());
String extension = FilenameUtils.getExtension(file.getOriginalFilename());
for (final DBMSType dialect : dbTypes) {
final String sql = SqlTranslate.translateSql(renderedSql, DBMSType.MS_SQL_SERVER.getOhdsiDB(), dialect.getOhdsiDB());
final String fileName = baseName + "." + dialect.getLabel().replaceAll(" ", "-") + "." + extension;
ZipUtil.addZipEntry(zos, fileName, new ByteArrayInputStream(sql.getBytes("UTF-8")));
}
final String shortBaseName = baseName.replaceAll("\\.ohdsi", "");
if (!generatedFileName.value.contains(shortBaseName)) {
generatedFileName.value += "_" + shortBaseName;
}
} else {
String fileName = file.getName();
ZipUtil.addZipEntry(zos, fileName, file.getInputStream());
}
} catch (IOException e) {
LOGGER.error("Failed to add file to archive", e);
throw new RuntimeIOException(e.getMessage(), e);
}
});
}
String fileName = generatedFileName.value + ".zip";
final MultipartFile sqlArchive = new MockMultipartFile(fileName, fileName, "application/zip", out.toByteArray());
analysisService.saveFile(sqlArchive, user, analysis, fileName, false, dataReference);
}
}
Aggregations