use of com.odysseusinc.arachne.portal.model.DataReference in project ArachneCentralAPI by OHDSI.
the class BaseAnalysisController method addCommonEntityToAnalysis.
@ApiOperation("Add common entity to analysis")
@RequestMapping(value = "/api/v1/analysis-management/analyses/{analysisId}/entities", method = POST)
public JsonResult addCommonEntityToAnalysis(@PathVariable("analysisId") Long analysisId, @RequestBody @Valid DataReferenceDTO entityReference, @RequestParam(value = "type", required = false, defaultValue = "COHORT") CommonAnalysisType analysisType, Principal principal) throws NotExistException, JMSException, IOException, PermissionDeniedException, URISyntaxException {
if (!analysisModificationLock.add(analysisId)) {
throw new ValidationRuntimeException("Analysis import rejected", Collections.singletonMap(entityReference.getEntityGuid(), Collections.singletonList("Another import into this analysis is in progress")));
}
try {
LOGGER.debug("Started import into analysis {}", analysisId);
final IUser user = getUser(principal);
final DataNode dataNode = dataNodeService.getById(entityReference.getDataNodeId());
final T analysis = analysisService.getById(analysisId);
final DataReference dataReference = dataReferenceService.addOrUpdate(entityReference.getEntityGuid(), dataNode);
final List<MultipartFile> entityFiles = getEntityFiles(entityReference, dataNode, analysisType);
String description = doAddCommonEntityToAnalysis(analysis, dataReference, user, analysisType, entityFiles);
return new JsonResult(NO_ERROR, description);
} finally {
analysisModificationLock.remove(analysisId);
LOGGER.debug("Completed import into analysis {}", analysisId);
}
}
use of com.odysseusinc.arachne.portal.model.DataReference in project ArachneCentralAPI by OHDSI.
the class BaseAnalysisController method doAddCommonEntityToAnalysis.
protected void doAddCommonEntityToAnalysis(T analysis, DataReference dataReference, IUser user, CommonAnalysisType analysisType, List<MultipartFile> files) throws IOException {
files.stream().filter(f -> !CommonAnalysisType.COHORT.equals(analysisType) || !f.getName().endsWith(CommonFileUtils.OHDSI_JSON_EXT)).forEach(f -> {
try {
analysisService.saveFile(f, user, analysis, f.getName(), detectExecutable(analysisType, f), dataReference);
} catch (IOException e) {
LOGGER.error("Failed to save file", e);
}
});
if (analysisType.equals(CommonAnalysisType.COHORT)) {
final ByteArrayOutputStream out = new ByteArrayOutputStream();
class StringContainer {
String value = CommonAnalysisType.COHORT.getTitle();
}
final StringContainer generatedFileName = new StringContainer();
try (final ZipOutputStream zos = new ZipOutputStream(out)) {
files.forEach(file -> {
try {
if (file.getName().endsWith(CommonFileUtils.OHDSI_SQL_EXT)) {
String statement = org.apache.commons.io.IOUtils.toString(file.getInputStream(), "UTF-8");
String renderedSql = SqlRender.renderSql(statement, null, null);
DBMSType[] dbTypes = new DBMSType[] { DBMSType.POSTGRESQL, DBMSType.ORACLE, DBMSType.MS_SQL_SERVER, DBMSType.REDSHIFT, DBMSType.PDW };
String baseName = FilenameUtils.getBaseName(file.getOriginalFilename());
String extension = FilenameUtils.getExtension(file.getOriginalFilename());
for (final DBMSType dialect : dbTypes) {
final String sql = SqlTranslate.translateSql(renderedSql, DBMSType.MS_SQL_SERVER.getOhdsiDB(), dialect.getOhdsiDB());
final String fileName = baseName + "." + dialect.getLabel().replaceAll(" ", "-") + "." + extension;
ZipUtil.addZipEntry(zos, fileName, new ByteArrayInputStream(sql.getBytes("UTF-8")));
}
final String shortBaseName = baseName.replaceAll("\\.ohdsi", "");
if (!generatedFileName.value.contains(shortBaseName)) {
generatedFileName.value += "_" + shortBaseName;
}
} else {
String fileName = file.getName();
ZipUtil.addZipEntry(zos, fileName, file.getInputStream());
}
} catch (IOException e) {
LOGGER.error("Failed to add file to archive", e);
throw new RuntimeIOException(e.getMessage(), e);
}
});
}
String fileName = generatedFileName.value + ".zip";
final MultipartFile sqlArchive = new MockMultipartFile(fileName, fileName, "application/zip", out.toByteArray());
analysisService.saveFile(sqlArchive, user, analysis, fileName, false, dataReference);
}
}
use of com.odysseusinc.arachne.portal.model.DataReference in project ArachneCentralAPI by OHDSI.
the class AnalysisFilesSavingServiceImpl method saveFiles.
@PreAuthorize("hasPermission(#analysis, " + "T(com.odysseusinc.arachne.portal.security.ArachnePermission).UPLOAD_ANALYSIS_FILES)")
protected List<AnalysisFile> saveFiles(List<MultipartFile> multipartFiles, IUser user, A analysis, DataReference dataReference, BiPredicate<String, CommonAnalysisType> checkFileExecutabilityPredicate) {
List<MultipartFile> filteredFiles = multipartFiles.stream().filter(file -> !(CommonAnalysisType.COHORT.equals(analysis.getType()) && file.getName().endsWith(OHDSI_JSON_EXT))).filter(file -> !file.getName().startsWith(ANALYSIS_INFO_FILE_DESCRIPTION)).collect(Collectors.toList());
List<AnalysisFile> savedFiles = new ArrayList<>();
List<String> errorFileMessages = new ArrayList<>();
for (MultipartFile file : filteredFiles) {
try {
final boolean isExecutable = checkFileExecutabilityPredicate.test(file.getOriginalFilename(), analysis.getType());
savedFiles.add(saveFile(file, user, analysis, file.getName(), isExecutable, dataReference));
} catch (AlreadyExistException e) {
errorFileMessages.add(e.getMessage());
}
}
if (!errorFileMessages.isEmpty()) {
throw new ValidationRuntimeException("Failed to save files", ImmutableMap.of(dataReference.getGuid(), errorFileMessages));
}
return savedFiles;
}
use of com.odysseusinc.arachne.portal.model.DataReference in project ArachneCentralAPI by OHDSI.
the class AnalysisFilesSavingServiceImpl method saveCohortAnalysisArchive.
@PreAuthorize("hasPermission(#analysis, " + "T(com.odysseusinc.arachne.portal.security.ArachnePermission).UPLOAD_ANALYSIS_FILES)")
public void saveCohortAnalysisArchive(A analysis, DataReference dataReference, IUser user, List<MultipartFile> files) {
MultipartFile genericSqlFile = files.stream().filter(file -> file.getName().endsWith(OHDSI_SQL_EXT)).findAny().orElseThrow(() -> new ArachneSystemRuntimeException(String.format("There is no sql file for %s analysis.", analysis.getId())));
Collection<MultipartFile> filesForArchive = files.stream().filter(file -> ObjectUtils.notEqual(file, genericSqlFile)).filter(file -> !StringUtils.equals(ANALYSIS_INFO_FILE_DESCRIPTION, file.getName())).collect(Collectors.toList());
ByteArrayOutputStream out = new ByteArrayOutputStream();
try (ZipOutputStream zos = new ZipOutputStream(out)) {
generateFilesForEachDialectAndAddToZip(zos, genericSqlFile);
ZipUtil.addZipEntries(zos, filesForArchive);
} catch (IOException e) {
log.error("Failed to create zip file for {} analysis", analysis.getId(), e);
throw new ArachneSystemRuntimeException(e);
}
String fileName = AnalysisArchiveUtils.getArchiveFileName(CommonAnalysisType.COHORT, getAnalysisName(genericSqlFile));
MultipartFile sqlArchive = new MockMultipartFile(fileName, fileName, "application/zip", out.toByteArray());
try {
saveFile(sqlArchive, user, analysis, fileName, false, dataReference);
} catch (Exception e) {
log.error("Failed to save zip file for {} analysis", analysis.getId(), e);
throw new ArachneSystemRuntimeException(e);
}
}
use of com.odysseusinc.arachne.portal.model.DataReference in project ArachneCentralAPI by OHDSI.
the class BaseAnalysisController method updateCommonEntityInAnalysis.
@ApiOperation("update common entity in analysis")
@RequestMapping(value = "/api/v1/analysis-management/analyses/{analysisId}/entities/{fileUuid}", method = PUT)
public JsonResult updateCommonEntityInAnalysis(@PathVariable("analysisId") Long analysisId, @PathVariable("fileUuid") String fileUuid, @RequestParam(value = "type", required = false, defaultValue = "COHORT") CommonAnalysisType analysisType, Principal principal) throws IOException, JMSException, PermissionDeniedException, URISyntaxException {
final IUser user = getUser(principal);
final AnalysisFile analysisFile = analysisService.getAnalysisFile(analysisId, fileUuid);
T analysis = (T) analysisFile.getAnalysis();
final DataReference dataReference = analysisFile.getDataReference();
final DataReferenceDTO entityReference = new DataReferenceDTO(dataReference.getDataNode().getId(), dataReference.getGuid());
final List<MultipartFile> entityFiles = getEntityFiles(entityReference, dataReference.getDataNode(), analysisType);
analysisService.findAnalysisFilesByDataReference(analysis, dataReference).forEach(existingAnalysisFile -> {
analysisService.deleteAnalysisFile(analysis, existingAnalysisFile);
analysis.getFiles().remove(existingAnalysisFile);
});
final String description = doAddCommonEntityToAnalysis(analysis, dataReference, user, analysisType, entityFiles);
return new JsonResult(NO_ERROR, description);
}
Aggregations