use of com.bakdata.conquery.models.identifiable.ids.specific.DatasetId in project conquery by bakdata.
the class IdTests method testJacksonSerialization.
@Test
public void testJacksonSerialization() throws JsonParseException, JsonMappingException, JsonProcessingException, IOException {
ConceptTreeChildId id = new ConceptTreeChildId(new ConceptTreeChildId(new ConceptId(new DatasetId("1"), "2"), "3"), "4");
ObjectMapper mapper = Jackson.MAPPER;
ConceptTreeChildId copy = mapper.readValue(mapper.writeValueAsBytes(id), ConceptTreeChildId.class);
assertThat(copy).isEqualTo(id);
assertThat(copy).hasSameHashCodeAs(id);
assertThat(copy.toString()).isEqualTo(id.toString());
}
use of com.bakdata.conquery.models.identifiable.ids.specific.DatasetId in project conquery by bakdata.
the class IdTests method testStringSerialization.
@Test
public void testStringSerialization() {
ConceptTreeChildId id = new ConceptTreeChildId(new ConceptTreeChildId(new ConceptId(new DatasetId("1"), "2"), "3"), "4");
ConceptTreeChildId copy = ConceptTreeChildId.Parser.INSTANCE.parse(id.toString());
assertThat(copy).isEqualTo(id);
assertThat(copy).hasSameHashCodeAs(id);
assertThat(copy.toString()).isEqualTo(id.toString());
}
use of com.bakdata.conquery.models.identifiable.ids.specific.DatasetId in project conquery by bakdata.
the class ApiTokenRealmTest method execute.
@Override
public void execute(StandaloneSupport conquery) throws Exception {
final User testUser = conquery.getTestUser();
final ApiTokenRealm realm = conquery.getAuthorizationController().getAuthenticationRealms().stream().filter(ApiTokenRealm.class::isInstance).map(ApiTokenRealm.class::cast).collect(MoreCollectors.onlyElement());
final ConqueryTokenRealm conqueryTokenRealm = conquery.getAuthorizationController().getConqueryTokenRealm();
final String userToken = conqueryTokenRealm.createTokenForUser(testUser.getId());
// Request ApiToken
final ApiTokenDataRepresentation.Request tokenRequest1 = new ApiTokenDataRepresentation.Request();
tokenRequest1.setName("test-token");
tokenRequest1.setScopes(EnumSet.of(Scopes.DATASET));
tokenRequest1.setExpirationDate(LocalDate.now().plus(1, ChronoUnit.DAYS));
ApiToken apiToken1 = requestApiToken(conquery, userToken, tokenRequest1);
assertThat(apiToken1.getToken()).isNotBlank();
// List ApiToken
List<ApiTokenDataRepresentation.Response> apiTokens = conquery.getClient().target(HierarchyHelper.hierarchicalPath(conquery.defaultApiURIBuilder(), ApiTokenResource.class, "listUserTokens")).request(MediaType.APPLICATION_JSON_TYPE).header("Authorization", "Bearer " + userToken).get(new GenericType<List<ApiTokenDataRepresentation.Response>>() {
});
final ApiTokenDataRepresentation.Response expected = new ApiTokenDataRepresentation.Response();
expected.setLastUsed(null);
expected.setCreationDate(LocalDate.now());
expected.setExpirationDate(LocalDate.now().plus(1, ChronoUnit.DAYS));
expected.setScopes(EnumSet.of(Scopes.DATASET));
expected.setName("test-token");
assertThat(apiTokens).hasSize(1);
assertThat(apiTokens.get(0)).usingRecursiveComparison().ignoringFields("id").isEqualTo(expected);
// Request ApiToken 2
final ApiTokenDataRepresentation.Request tokenRequest2 = new ApiTokenDataRepresentation.Request();
tokenRequest2.setName("test-token");
tokenRequest2.setScopes(EnumSet.of(Scopes.ADMIN));
tokenRequest2.setExpirationDate(LocalDate.now().plus(1, ChronoUnit.DAYS));
ApiToken apiToken2 = requestApiToken(conquery, userToken, tokenRequest2);
assertThat(apiToken2.getToken()).isNotBlank();
// List ApiToken 2
apiTokens = requestTokenList(conquery, userToken);
assertThat(apiTokens).hasSize(2);
// Use ApiToken1 to get Datasets
List<IdLabel<DatasetId>> datasets = requestDatasets(conquery, apiToken1);
assertThat(datasets).isNotEmpty();
// Use ApiToken2 to get Datasets
datasets = requestDatasets(conquery, apiToken2);
assertThat(datasets).as("The second token has no scope for dataset").isEmpty();
// Use ApiToken2 to access Admin
List<DatasetId> adminDatasets = conquery.getClient().target(HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), AdminDatasetsResource.class, "listDatasets")).request(MediaType.APPLICATION_JSON_TYPE).header("Authorization", "Bearer " + apiToken2.getToken()).get(new GenericType<>() {
});
assertThat(adminDatasets).as("The second token has scope for admin").isNotEmpty();
// Try to delete ApiToken2 with ApiToken (should fail)
final UUID id2 = apiTokens.stream().filter(t -> t.getScopes().contains(Scopes.ADMIN)).map(ApiTokenDataRepresentation.Response::getId).collect(MoreCollectors.onlyElement());
Response response = conquery.getClient().target(HierarchyHelper.hierarchicalPath(conquery.defaultApiURIBuilder(), ApiTokenResource.class, "deleteToken")).resolveTemplate(ApiTokenResource.TOKEN, id2).request(MediaType.APPLICATION_JSON_TYPE).header("Authorization", "Bearer " + apiToken2.getToken()).delete(Response.class);
assertThat(response.getStatus()).as("It is forbidden to act on ApiTokens with ApiTokens").isEqualTo(403);
// Delete ApiToken2 with user token
response = conquery.getClient().target(HierarchyHelper.hierarchicalPath(conquery.defaultApiURIBuilder(), ApiTokenResource.class, "deleteToken")).resolveTemplate(ApiTokenResource.TOKEN, id2).request(MediaType.APPLICATION_JSON_TYPE).header("Authorization", "Bearer " + userToken).delete(Response.class);
assertThat(response.getStatus()).as("It is okay to act on ApiTokens with UserTokens").isEqualTo(200);
assertThat(realm.listUserToken(testUser)).hasSize(1);
// Try to use the deleted token to access Admin
response = conquery.getClient().target(HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), AdminDatasetsResource.class, "listDatasets")).request(MediaType.APPLICATION_JSON_TYPE).header("Authorization", "Bearer " + apiToken2.getToken()).get(Response.class);
assertThat(response.getStatus()).as("Cannot use deleted token").isEqualTo(401);
// Try to act on tokens from another user
final MetaStorage metaStorage = conquery.getMetaStorage();
final User user2 = new User("TestUser2", "TestUser2", metaStorage);
metaStorage.addUser(user2);
final String user2Token = conqueryTokenRealm.createTokenForUser(user2.getId());
// Try to delete ApiToken2 with ApiToken (should fail)
final UUID id1 = apiTokens.stream().filter(t -> t.getScopes().contains(Scopes.DATASET)).map(ApiTokenDataRepresentation.Response::getId).collect(MoreCollectors.onlyElement());
response = conquery.getClient().target(HierarchyHelper.hierarchicalPath(conquery.defaultApiURIBuilder(), ApiTokenResource.class, "deleteToken")).resolveTemplate(ApiTokenResource.TOKEN, id1).request(MediaType.APPLICATION_JSON_TYPE).header("Authorization", "Bearer " + user2Token).delete(Response.class);
assertThat(response.getStatus()).as("It is forbidden to act on someone else ApiTokens").isEqualTo(403);
// Request ApiToken 3 (expired)
final ApiTokenDataRepresentation.Request tokenRequest3 = new ApiTokenDataRepresentation.Request();
tokenRequest3.setName("test-token");
tokenRequest3.setScopes(EnumSet.of(Scopes.DATASET));
tokenRequest3.setExpirationDate(LocalDate.now().minus(1, ChronoUnit.DAYS));
assertThatThrownBy(() -> requestApiToken(conquery, userToken, tokenRequest3)).as("Expiration date is in the past").isExactlyInstanceOf(ClientErrorException.class).hasMessageContaining("HTTP 422");
// Craft expired token behind validation to simulate the use of an expired token
ApiToken apiToken3 = realm.createApiToken(user2, tokenRequest3);
assertThatThrownBy(() -> requestDatasets(conquery, apiToken3)).as("Expired token").isExactlyInstanceOf(NotAuthorizedException.class);
}
use of com.bakdata.conquery.models.identifiable.ids.specific.DatasetId in project conquery by bakdata.
the class ConceptResolutionTest method execute.
@Override
public void execute(StandaloneSupport conquery) throws Exception {
// read test sepcification
String testJson = In.resource("/tests/query/SIMPLE_TREECONCEPT_QUERY/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
DatasetId dataset = conquery.getDataset().getId();
ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);
ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
test.importRequiredData(conquery);
FilterSearch.updateSearch(conquery.getNamespace().getNamespaces(), Collections.singleton(conquery.getNamespace().getDataset()), conquery.getDatasetsProcessor().getJobManager(), conquery.getConfig().getCsv());
conquery.waitUntilWorkDone();
ConceptsProcessor processor = new ConceptsProcessor(conquery.getNamespace().getNamespaces());
TreeConcept concept = (TreeConcept) conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
ResolvedConceptsResult resolved = processor.resolveConceptElements(concept, List.of("A1", "unknown"));
// check the resolved values
assertThat(resolved).isNotNull();
assertThat(resolved.getResolvedConcepts().stream().map(IId::toString)).containsExactlyInAnyOrder("ConceptResolutionTest.test_tree.test_child1");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
use of com.bakdata.conquery.models.identifiable.ids.specific.DatasetId in project conquery by bakdata.
the class FilterResolutionContainsTest method execute.
@Override
public void execute(StandaloneSupport conquery) throws Exception {
// read test sepcification
String testJson = In.resource("/tests/query/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY.test.json").withUTF8().readAll();
DatasetId dataset = conquery.getDataset().getId();
ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);
ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
test.importRequiredData(conquery);
CSVConfig csvConf = conquery.getConfig().getCsv();
FilterSearch.updateSearch(conquery.getNamespace().getNamespaces(), Collections.singleton(conquery.getNamespace().getDataset()), conquery.getDatasetsProcessor().getJobManager(), csvConf);
conquery.waitUntilWorkDone();
Concept<?> concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
Connector connector = concept.getConnectors().iterator().next();
AbstractSelectFilter<?> filter = (AbstractSelectFilter<?>) connector.getFilters().iterator().next();
// Copy search csv from resources to tmp folder.
final Path tmpCSv = Files.createTempFile("conquery_search", ".csv");
Out.file(tmpCSv.toFile()).withUTF8().writeLines(lines);
Files.write(tmpCSv, String.join(csvConf.getLineSeparator(), lines).getBytes(), StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE, StandardOpenOption.WRITE);
filter.setSearchType(FilterSearch.FilterSearchType.CONTAINS);
filter.setTemplate(new FilterTemplate(tmpCSv.toString(), Arrays.asList("HEADER"), "HEADER", "", ""));
FilterSearch.createSourceSearch(filter, csvConf);
assertThat(filter.getSourceSearch()).isNotNull();
ConceptsProcessor processor = new ConceptsProcessor(conquery.getNamespace().getNamespaces());
// from csv
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("a", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("a", "aaa", "aab", "baaa");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
// from column values
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("f", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("f");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
}
Aggregations