use of org.molgenis.data.importer.ImportService in project molgenis by molgenis.
the class EmxImportServiceIT method testDoImportAddEmx.
private void testDoImportAddEmx(File file, Map<String, Integer> entityCountMap, Set<String> addedEntityTypes, Runnable entityValidationMethod) {
FileRepositoryCollection repoCollection = fileRepositoryCollectionFactory.createFileRepositoryCollection(file);
ImportService importService = importServiceFactory.getImportService(file, repoCollection);
EntityImportReport importReport = importService.doImport(repoCollection, ADD, PACKAGE_DEFAULT);
validateImportReport(importReport, entityCountMap, addedEntityTypes);
entityValidationMethod.run();
}
use of org.molgenis.data.importer.ImportService in project molgenis by molgenis.
the class OntologyImportServiceIT method testDoImportObo.
private void testDoImportObo() {
String fileName = "ontology-small.obo.zip";
File file = getFile("/obo/" + fileName);
FileRepositoryCollection repoCollection = fileRepositoryCollectionFactory.createFileRepositoryCollection(file);
ImportService importService = importServiceFactory.getImportService(file, repoCollection);
EntityImportReport importReport = importService.doImport(repoCollection, ADD, PACKAGE_DEFAULT);
validateImportReport(importReport, ImmutableMap.of("sys_ont_OntologyTermDynamicAnnotation", 0, "sys_ont_OntologyTermSynonym", 5, "sys_ont_OntologyTermNodePath", 5, "sys_ont_Ontology", 1, "sys_ont_OntologyTerm", 5), emptySet());
// Verify the import as system as we need write permissions on sys tables to carry out the verification
runAsSystem(this::verifyOboAsSystem);
}
use of org.molgenis.data.importer.ImportService in project molgenis by molgenis.
the class AmazonBucketIngester method ingest.
public FileMeta ingest(String jobExecutionID, String targetEntityTypeName, String bucket, String key, String extension, String accessKey, String secretKey, String region, boolean isExpression, Progress progress) {
FileMeta fileMeta;
try {
progress.setProgressMax(3);
progress.progress(0, "Connection to Amazon Bucket with accessKey '" + accessKey + "'");
AmazonS3 client = amazonBucketClient.getClient(accessKey, secretKey, region);
progress.progress(1, "downloading...");
File file = amazonBucketClient.downloadFile(client, fileStore, jobExecutionID, bucket, key, extension, isExpression, targetEntityTypeName);
if (targetEntityTypeName != null && ExcelUtils.isExcelFile(file.getName())) {
if (ExcelUtils.getNumberOfSheets(file) == 1) {
ExcelUtils.renameSheet(targetEntityTypeName, file, 0);
} else {
throw new MolgenisDataException("Amazon Bucket imports to a specified entityType are only possible with CSV files or Excel files with one sheet");
}
}
progress.progress(2, "Importing...");
ImportService importService = importServiceFactory.getImportService(file.getName());
File renamed = new File(String.format("%s%s%s.%s", file.getParent(), File.separatorChar, targetEntityTypeName, extension));
Files.copy(file.toPath(), renamed.toPath(), StandardCopyOption.REPLACE_EXISTING);
RepositoryCollection repositoryCollection = fileRepositoryCollectionFactory.createFileRepositoryCollection(renamed);
EntityImportReport report = importService.doImport(repositoryCollection, DatabaseAction.ADD_UPDATE_EXISTING, "base");
progress.status("Download and import from Amazon Bucket done.");
progress.progress(3, "Successfully imported " + report.getNrImportedEntitiesMap().keySet().toString() + " entities.");
fileMeta = createFileMeta(jobExecutionID, file);
} catch (Exception e) {
throw new MolgenisDataException(e);
}
return fileMeta;
}
use of org.molgenis.data.importer.ImportService in project molgenis by molgenis.
the class FileIngester method ingest.
/**
* Imports a csv file defined in the fileIngest entity
*
* @see FileIngestJobExecutionMetaData
*/
public FileMeta ingest(String entityTypeId, String url, String loader, String jobExecutionID, Progress progress) {
if (!"CSV".equals(loader)) {
throw new FileIngestException("Unknown loader '" + loader + "'");
}
progress.setProgressMax(2);
progress.progress(0, "Downloading url '" + url + "'");
File file = fileStoreDownload.downloadFile(url, jobExecutionID, entityTypeId + ".csv");
progress.progress(1, "Importing...");
FileRepositoryCollection repoCollection = fileRepositoryCollectionFactory.createFileRepositoryCollection(file);
ImportService importService = importServiceFactory.getImportService(file, repoCollection);
EntityImportReport report = importService.doImport(repoCollection, ADD_UPDATE_EXISTING, PACKAGE_DEFAULT);
progress.status("Ingestion of url '" + url + "' done.");
Integer count = report.getNrImportedEntitiesMap().get(entityTypeId);
count = count != null ? count : 0;
progress.progress(2, "Successfully imported " + count + " " + entityTypeId + " entities.");
FileMeta fileMeta = createFileMeta(jobExecutionID, file);
FileIngestJobExecution fileIngestJobExecution = (FileIngestJobExecution) progress.getJobExecution();
fileIngestJobExecution.setFile(fileMeta);
dataService.add(FILE_META, fileMeta);
return fileMeta;
}
Aggregations