use of org.molgenis.data.meta.model.EntityType in project molgenis by molgenis.
the class ElasticsearchService method rebuildIndex.
@Override
public void rebuildIndex(Repository<? extends Entity> repository) {
EntityType entityType = repository.getEntityType();
if (hasIndex(entityType)) {
deleteIndex(entityType);
}
createIndex(entityType);
repository.forEachBatched(createFetchForReindexing(entityType), entities -> index(entityType, entities.stream()), BATCH_SIZE);
}
use of org.molgenis.data.meta.model.EntityType in project molgenis by molgenis.
the class QueryGenerator method createQueryClauseIn.
private QueryBuilder createQueryClauseIn(QueryRule queryRule, EntityType entityType) {
List<Attribute> attributePath = getAttributePath(queryRule.getField(), entityType);
Attribute attr = attributePath.get(attributePath.size() - 1);
Object queryRuleValue = queryRule.getValue();
if (queryRuleValue == null) {
throw new MolgenisQueryException("Query value cannot be null");
}
if (!(queryRuleValue instanceof Iterable<?>)) {
throw new MolgenisQueryException("Query value must be a Iterable instead of [" + queryRuleValue.getClass().getSimpleName() + "]");
}
Object[] queryValues = StreamSupport.stream(((Iterable<?>) queryRuleValue).spliterator(), false).map(aQueryRuleValue -> getQueryValue(attr, aQueryRuleValue)).toArray();
QueryBuilder queryBuilder;
String fieldName = getQueryFieldName(attr);
AttributeType dataType = attr.getDataType();
switch(dataType) {
case BOOL:
case DATE:
case DATE_TIME:
case DECIMAL:
case EMAIL:
case ENUM:
case HTML:
case HYPERLINK:
case INT:
case LONG:
case SCRIPT:
case STRING:
case TEXT:
if (useNotAnalyzedField(attr)) {
fieldName = fieldName + '.' + FIELD_NOT_ANALYZED;
}
// note: inFilter expects array, not iterable
queryBuilder = QueryBuilders.termsQuery(fieldName, queryValues);
queryBuilder = nestedQueryBuilder(attributePath, queryBuilder);
break;
case CATEGORICAL:
case CATEGORICAL_MREF:
case MREF:
case XREF:
case FILE:
case ONE_TO_MANY:
if (attributePath.size() > 1) {
throw new UnsupportedOperationException("Can not filter on references deeper than 1.");
}
Attribute refIdAttr = attr.getRefEntity().getIdAttribute();
List<Attribute> refAttributePath = concat(attributePath.stream(), of(refIdAttr)).collect(toList());
String indexFieldName = getQueryFieldName(refAttributePath);
if (useNotAnalyzedField(refIdAttr)) {
indexFieldName = indexFieldName + '.' + FIELD_NOT_ANALYZED;
}
queryBuilder = QueryBuilders.termsQuery(indexFieldName, queryValues);
queryBuilder = QueryBuilders.nestedQuery(fieldName, queryBuilder, ScoreMode.Avg);
break;
case COMPOUND:
throw new MolgenisQueryException("Illegal data type [" + dataType + "] for operator [" + QueryRule.Operator.IN + "]");
default:
throw new UnexpectedEnumException(dataType);
}
return QueryBuilders.constantScoreQuery(queryBuilder);
}
use of org.molgenis.data.meta.model.EntityType in project molgenis by molgenis.
the class SortaController method createJobExecution.
private SortaJobExecution createJobExecution(Repository<Entity> inputData, String jobName, String ontologyIri) {
String resultEntityName = idGenerator.generateId();
SortaJobExecution sortaJobExecution = sortaJobExecutionFactory.create();
sortaJobExecution.setIdentifier(resultEntityName);
sortaJobExecution.setName(jobName);
sortaJobExecution.setUser(userAccountService.getCurrentUser());
sortaJobExecution.setSourceEntityName(inputData.getName());
sortaJobExecution.setDeleteUrl(getSortaServiceMenuUrl() + "/delete/" + resultEntityName);
sortaJobExecution.setResultEntityName(resultEntityName);
sortaJobExecution.setThreshold(DEFAULT_THRESHOLD);
sortaJobExecution.setOntologyIri(ontologyIri);
RunAsSystemAspect.runAsSystem(() -> {
createInputRepository(inputData);
createEmptyResultRepository(jobName, resultEntityName, inputData.getEntityType());
dataService.add(SORTA_JOB_EXECUTION, sortaJobExecution);
});
EntityType resultEntityType = entityTypeFactory.create(resultEntityName);
permissionSystemService.giveUserWriteMetaPermissions(asList(inputData.getEntityType(), resultEntityType));
return sortaJobExecution;
}
use of org.molgenis.data.meta.model.EntityType in project molgenis by molgenis.
the class SortaController method createEmptyResultRepository.
private void createEmptyResultRepository(String jobName, String resultEntityName, EntityType sourceMetaData) {
EntityType resultEntityType = EntityType.newInstance(matchingTaskContentMetaData, DEEP_COPY_ATTRS, attrMetaFactory);
resultEntityType.setId(resultEntityName);
resultEntityType.setPackage(null);
resultEntityType.setAbstract(false);
resultEntityType.addAttribute(attrMetaFactory.create().setName(INPUT_TERM).setDataType(XREF).setRefEntity(sourceMetaData).setDescription("Reference to the input term").setNillable(false));
resultEntityType.setLabel(jobName + " output");
dataService.getMeta().addEntityType(resultEntityType);
}
use of org.molgenis.data.meta.model.EntityType in project molgenis by molgenis.
the class OneClickImportJobTest method testGetEntityTypeWithZip.
@Test
public void testGetEntityTypeWithZip() throws InvalidFormatException, IOException, URISyntaxException, UnknownFileTypeException, EmptySheetException {
Progress progress = mock(Progress.class);
String filename = "simple-valid.zip";
when(oneClickImporterNamingService.createValidIdFromFileName(filename)).thenReturn("simple_valid");
File file = loadFile(OneClickImportJobTest.class, "/" + filename);
when(fileStore.getFile(filename)).thenReturn(file);
File zipFile1 = loadFile(OneClickImportJobTest.class, "/zip_file_1.csv");
when(oneClickImporterNamingService.createValidIdFromFileName("zip_file_1.csv")).thenReturn("zip_file_1");
File zipFile2 = loadFile(OneClickImportJobTest.class, "/zip_file_2.csv");
when(oneClickImporterNamingService.createValidIdFromFileName("zip_file_2.csv")).thenReturn("zip_file_2");
File zipFile3 = loadFile(OneClickImportJobTest.class, "/zip_file_3.csv");
when(oneClickImporterNamingService.createValidIdFromFileName("zip_file_3.csv")).thenReturn("zip_file_3");
File zipFile4 = loadFile(OneClickImportJobTest.class, "/zip_file_4.csv");
when(oneClickImporterNamingService.createValidIdFromFileName("zip_file_4.csv")).thenReturn("zip_file_4");
List<String[]> lines1 = new ArrayList<>();
lines1.add(new String[] { "name,age", "piet,25" });
when(csvService.buildLinesFromFile(zipFile1)).thenReturn(lines1);
List<String[]> lines2 = new ArrayList<>();
lines2.add(new String[] { "name,age", "klaas,30" });
when(csvService.buildLinesFromFile(zipFile2)).thenReturn(lines2);
List<String[]> lines3 = new ArrayList<>();
lines3.add(new String[] { "name,age", "Jan,35" });
when(csvService.buildLinesFromFile(zipFile3)).thenReturn(lines3);
List<String[]> lines4 = new ArrayList<>();
lines4.add(new String[] { "name,age", "Henk,40" });
when(csvService.buildLinesFromFile(zipFile4)).thenReturn(lines4);
DataCollection dataCollection1 = mock(DataCollection.class);
when(dataCollection1.getName()).thenReturn("zip_file_1");
when(oneClickImporterService.buildDataCollectionFromCsv("zip_file_1", lines1)).thenReturn(dataCollection1);
DataCollection dataCollection2 = mock(DataCollection.class);
when(dataCollection2.getName()).thenReturn("zip_file_2");
when(oneClickImporterService.buildDataCollectionFromCsv("zip_file_2", lines2)).thenReturn(dataCollection2);
DataCollection dataCollection3 = mock(DataCollection.class);
when(dataCollection3.getName()).thenReturn("zip_file_3");
when(oneClickImporterService.buildDataCollectionFromCsv("zip_file_3", lines3)).thenReturn(dataCollection3);
DataCollection dataCollection4 = mock(DataCollection.class);
when(dataCollection4.getName()).thenReturn("zip_file_4");
when(oneClickImporterService.buildDataCollectionFromCsv("zip_file_4", lines4)).thenReturn(dataCollection4);
EntityType entityType1 = mock(EntityType.class);
when(entityService.createEntityType(dataCollection1, "simple_valid")).thenReturn(entityType1);
EntityType entityType2 = mock(EntityType.class);
when(entityService.createEntityType(dataCollection2, "simple_valid")).thenReturn(entityType2);
EntityType entityType3 = mock(EntityType.class);
when(entityService.createEntityType(dataCollection3, "simple_valid")).thenReturn(entityType3);
EntityType entityType4 = mock(EntityType.class);
when(entityService.createEntityType(dataCollection4, "simple_valid")).thenReturn(entityType4);
oneClickImporterJob = new OneClickImportJob(excelService, csvService, oneClickImporterService, oneClickImporterNamingService, entityService, fileStore);
oneClickImporterJob.getEntityType(progress, filename);
verify(progress).status("Preparing import");
verify(csvService).buildLinesFromFile(zipFile1);
verify(oneClickImporterService).buildDataCollectionFromCsv("zip_file_1", lines1);
verify(csvService).buildLinesFromFile(zipFile2);
verify(oneClickImporterService).buildDataCollectionFromCsv("zip_file_2", lines2);
verify(csvService).buildLinesFromFile(zipFile3);
verify(oneClickImporterService).buildDataCollectionFromCsv("zip_file_3", lines3);
verify(csvService).buildLinesFromFile(zipFile4);
verify(oneClickImporterService).buildDataCollectionFromCsv("zip_file_4", lines4);
verify(progress).status("Importing [zip_file_1] into package [simple_valid]");
verify(entityService).createEntityType(dataCollection1, "simple_valid");
verify(progress).status("Importing [zip_file_2] into package [simple_valid]");
verify(entityService).createEntityType(dataCollection2, "simple_valid");
verify(progress).status("Importing [zip_file_3] into package [simple_valid]");
verify(entityService).createEntityType(dataCollection3, "simple_valid");
verify(progress).status("Importing [zip_file_4] into package [simple_valid]");
verify(entityService).createEntityType(dataCollection4, "simple_valid");
}
Aggregations