use of com.bakdata.conquery.models.identifiable.ids.specific.DatasetId in project conquery by bakdata.
the class FilterResolutionExactTest method execute.
@Override
public void execute(StandaloneSupport conquery) throws Exception {
// read test sepcification
String testJson = In.resource("/tests/query/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY.test.json").withUTF8().readAll();
DatasetId dataset = conquery.getDataset().getId();
ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);
ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
CSVConfig csvConf = conquery.getConfig().getCsv();
test.importRequiredData(conquery);
FilterSearch.updateSearch(conquery.getNamespace().getNamespaces(), Collections.singleton(conquery.getNamespace().getDataset()), conquery.getDatasetsProcessor().getJobManager(), csvConf);
conquery.waitUntilWorkDone();
Concept<?> concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
Connector connector = concept.getConnectors().iterator().next();
AbstractSelectFilter<?> filter = (AbstractSelectFilter<?>) connector.getFilters().iterator().next();
// Copy search csv from resources to tmp folder.
final Path tmpCSv = Files.createTempFile("conquery_search", "csv");
Files.write(tmpCSv, String.join(csvConf.getLineSeparator(), lines).getBytes(), StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE, StandardOpenOption.WRITE);
filter.setSearchType(FilterSearch.FilterSearchType.EXACT);
filter.setTemplate(new FilterTemplate(tmpCSv.toString(), Arrays.asList("HEADER"), "HEADER", "", ""));
FilterSearch.createSourceSearch(filter, csvConf);
assertThat(filter.getSourceSearch()).isNotNull();
ConceptsProcessor processor = new ConceptsProcessor(conquery.getNamespace().getNamespaces());
// from csv
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("a", "aaa", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("a", "aaa");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
// from column values
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("f", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("f");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
}
use of com.bakdata.conquery.models.identifiable.ids.specific.DatasetId in project conquery by bakdata.
the class FilterResolutionPrefixTest method execute.
@Override
public void execute(StandaloneSupport conquery) throws Exception {
// read test specification
String testJson = In.resource("/tests/query/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY.test.json").withUTF8().readAll();
DatasetId dataset = conquery.getDataset().getId();
ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);
ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
test.importRequiredData(conquery);
CSVConfig csvConf = conquery.getConfig().getCsv();
FilterSearch.updateSearch(conquery.getNamespace().getNamespaces(), Collections.singleton(conquery.getNamespace().getDataset()), conquery.getDatasetsProcessor().getJobManager(), csvConf);
conquery.waitUntilWorkDone();
Concept<?> concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
Connector connector = concept.getConnectors().iterator().next();
AbstractSelectFilter<?> filter = (AbstractSelectFilter<?>) connector.getFilters().iterator().next();
// Copy search csv from resources to tmp folder.
final Path tmpCSv = Files.createTempFile("conquery_search", "csv");
Files.write(tmpCSv, String.join(csvConf.getLineSeparator(), lines).getBytes(), StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE, StandardOpenOption.WRITE);
filter.setSearchType(FilterSearch.FilterSearchType.PREFIX);
filter.setTemplate(new FilterTemplate(tmpCSv.toString(), Arrays.asList("HEADER"), "HEADER", "", ""));
FilterSearch.createSourceSearch(filter, csvConf);
assertThat(filter.getSourceSearch()).isNotNull();
ConceptsProcessor processor = new ConceptsProcessor(conquery.getNamespace().getNamespaces());
// from csv
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("a", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("a", "aaa", "aab");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
// from column values
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("f", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("f");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
}
use of com.bakdata.conquery.models.identifiable.ids.specific.DatasetId in project conquery by bakdata.
the class MetadataCollectionTest method execute.
@Override
public void execute(StandaloneSupport conquery) throws Exception {
// read test sepcification
String testJson = In.resource("/tests/query/SIMPLE_TREECONCEPT_QUERY/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
DatasetId dataset = conquery.getDataset().getId();
ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);
ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
test.importRequiredData(conquery);
// ensure the metadata is collected
conquery.getNamespace().sendToAll(new UpdateMatchingStatsMessage());
conquery.waitUntilWorkDone();
TreeConcept concept = (TreeConcept) conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
// check the number of matched events
assertThat(concept.getMatchingStats().countEvents()).isEqualTo(4);
assertThat(concept.getChildren()).allSatisfy(c -> {
assertThat(c.getMatchingStats().countEvents()).isEqualTo(2);
});
// check the date ranges
assertThat(concept.getMatchingStats().spanEvents()).isEqualTo(CDateRange.of(LocalDate.parse("2010-07-15"), LocalDate.parse("2013-11-10")));
assertThat(concept.getChildren().get(0).getMatchingStats().spanEvents()).isEqualTo(CDateRange.of(LocalDate.parse("2012-01-01"), LocalDate.parse("2013-11-10")));
assertThat(concept.getChildren().get(1).getMatchingStats().spanEvents()).isEqualTo(CDateRange.of(LocalDate.parse("2010-07-15"), LocalDate.parse("2012-11-11")));
}
use of com.bakdata.conquery.models.identifiable.ids.specific.DatasetId in project conquery by bakdata.
the class RoleUITest method execute.
@Override
public void execute(StandaloneSupport conquery) throws Exception {
MetaStorage storage = conquery.getMetaStorage();
Role mandator = new Role("testMandatorName", "testMandatorLabel", storage);
RoleId mandatorId = mandator.getId();
User user = new User("testUser@test.de", "testUserName", storage);
UserId userId = user.getId();
try {
ConqueryPermission permission = DatasetPermission.onInstance(Ability.READ.asSet(), new DatasetId("testDatasetId"));
storage.addRole(mandator);
storage.addUser(user);
// override permission object, because it might have changed by the subject
// owning the permission
mandator.addPermission(permission);
user.addRole(mandator);
URI classBase = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), RoleUIResource.class, "getRole").buildFromMap(Map.of(ROLE_ID, mandatorId.toString()));
Response response = conquery.getClient().target(classBase).request().get();
assertThat(response.getStatus()).isEqualTo(200);
// Check for Freemarker Errors
assertThat(response.readEntity(String.class).toLowerCase()).doesNotContain(List.of("freemarker", "debug"));
} finally {
storage.removeRole(mandatorId);
storage.removeUser(userId);
}
}
use of com.bakdata.conquery.models.identifiable.ids.specific.DatasetId in project conquery by bakdata.
the class FormConfigTest method setupTestClass.
@BeforeAll
public void setupTestClass() throws Exception {
datasetId = dataset.getId();
datasetId1 = dataset1.getId();
// Mock DatasetRegistry for translation
namespacesMock = Mockito.mock(DatasetRegistry.class);
doAnswer(invocation -> {
throw new UnsupportedOperationException("Not yet implemented");
}).when(namespacesMock).getOptional(any());
doAnswer(invocation -> {
final DatasetId id = invocation.getArgument(0);
Namespace namespaceMock = Mockito.mock(Namespace.class);
if (id.equals(datasetId)) {
when(namespaceMock.getDataset()).thenReturn(dataset);
} else if (id.equals(datasetId1)) {
when(namespaceMock.getDataset()).thenReturn(dataset1);
} else {
throw new IllegalStateException("Unkown dataset id.");
}
return namespaceMock;
}).when(namespacesMock).get(any(DatasetId.class));
when(namespacesMock.getAllDatasets()).thenReturn(List.of(dataset, dataset1));
when(namespacesMock.injectIntoNew(any(ObjectMapper.class))).thenCallRealMethod();
when(namespacesMock.inject(any(MutableInjectableValues.class))).thenCallRealMethod();
storage = new NonPersistentStoreFactory().createMetaStorage();
((MutableInjectableValues) FormConfigProcessor.getMAPPER().getInjectableValues()).add(IdResolveContext.class, namespacesMock);
processor = new FormConfigProcessor(validator, storage, namespacesMock);
controller = new AuthorizationController(storage, new DevelopmentAuthorizationConfig());
controller.start();
}
Aggregations