Search in sources :

Example 6 with Query

use of com.bakdata.conquery.apiv1.query.Query in project conquery by bakdata.

the class DatasetDeletionTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    final StandaloneSupport conquery = testConquery.getSupport(name);
    final MetaStorage storage = conquery.getMetaStorage();
    final Dataset dataset = conquery.getDataset();
    Namespace namespace = conquery.getNamespace();
    final String testJson = In.resource("/tests/query/DELETE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
    }
    final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    final int nImports = namespace.getStorage().getAllImports().size();
    log.info("Checking state before deletion");
    // Assert state before deletion.
    {
        // Must contain the import.
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(dataset.getId())).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).isNotEmpty();
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
            }
        }
        log.info("Executing query before deletion");
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Delete Dataset.
    {
        log.info("Issuing deletion of import {}", dataset);
        // Delete the import.
        // But, we do not allow deletion of tables with associated connectors, so this should throw!
        assertThatThrownBy(() -> conquery.getDatasetsProcessor().deleteDataset(dataset)).isInstanceOf(WebApplicationException.class);
        // TODO use api
        conquery.getNamespace().getStorage().getTables().forEach(tableId -> conquery.getDatasetsProcessor().deleteTable(tableId, true));
        conquery.waitUntilWorkDone();
        // Finally delete dataset
        conquery.getDatasetsProcessor().deleteDataset(dataset);
        conquery.waitUntilWorkDone();
        assertThat(storage.getCentralRegistry().getOptional(dataset.getId())).isEmpty();
    }
    // State after deletion.
    {
        log.info("Checking state after deletion");
        // We have deleted an import now there should be two less!
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(0);
        // The deleted import should not be found.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().getTable().getDataset().equals(dataset.getId())).isEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                // No bucket should be found referencing the import.
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).filteredOn(bucket -> bucket.getTable().getDataset().getId().equals(dataset.getId())).isEmpty();
                // No CBlock associated with import may exist
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).filteredOn(cBlock -> cBlock.getBucket().getTable().getDataset().getId().equals(dataset.getId())).isEmpty();
            }
        }
        // It's not exactly possible to issue a query for a non-existant dataset, so we assert that parsing the fails.
        assertThatThrownBy(() -> {
            IntegrationUtils.parseQuery(conquery, test.getRawQuery());
        }).isNotNull();
        IntegrationUtils.assertQueryResult(conquery, query, 0, ExecutionState.FAILED, conquery.getTestUser(), 404);
    }
    // Reload the dataset and assert the state.
    // We have to do some weird trix with StandaloneSupport to open it with another Dataset
    final StandaloneSupport conqueryReimport = testConquery.getSupport(namespace.getDataset().getName());
    {
        // only import the deleted import/table
        LoadingUtil.importTables(conqueryReimport, test.getContent().getTables());
        assertThat(conqueryReimport.getNamespace().getStorage().getTables()).isNotEmpty();
        conqueryReimport.waitUntilWorkDone();
        LoadingUtil.importTableContents(conqueryReimport, test.getContent().getTables());
        conqueryReimport.waitUntilWorkDone();
        LoadingUtil.importConcepts(conqueryReimport, test.getRawConcepts());
        conqueryReimport.waitUntilWorkDone();
        assertThat(conqueryReimport.getDatasetsProcessor().getDatasetRegistry().get(conqueryReimport.getDataset().getId())).describedAs("Dataset after re-import.").isNotNull();
        assertThat(conqueryReimport.getNamespace().getStorage().getAllImports().size()).isEqualTo(nImports);
        for (ShardNode node : conqueryReimport.getShardNodes()) {
            assertThat(node.getWorkers().getWorkers().values()).filteredOn(w -> w.getInfo().getDataset().equals(conqueryReimport.getDataset().getId())).describedAs("Workers for node {}", node.getName()).isNotEmpty();
        }
        log.info("Executing query after re-import");
        final Query query2 = IntegrationUtils.parseQuery(conqueryReimport, test.getRawQuery());
        // Issue a query and assert that it has the same content as the first time around.
        IntegrationUtils.assertQueryResult(conqueryReimport, query2, 2L, ExecutionState.DONE, conqueryReimport.getTestUser(), 201);
    }
    // Finally, restart conquery and assert again, that the data is correct.
    {
        testConquery.shutdown();
        // restart
        testConquery.beforeAll();
        final StandaloneSupport conqueryRestart = testConquery.openDataset(conqueryReimport.getDataset().getId());
        log.info("Checking state after re-start");
        assertThat(conqueryRestart.getNamespace().getStorage().getAllImports().size()).isEqualTo(2);
        for (ShardNode node : conqueryRestart.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getTable().getDataset().getId().equals(dataset.getId()))).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
            }
        }
        log.info("Executing query after restart");
        final Query query3 = IntegrationUtils.parseQuery(conqueryRestart, test.getRawQuery());
        // Issue a query and assert that it has the same content as the first time around.
        IntegrationUtils.assertQueryResult(conqueryRestart, query3, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
}
Also used : IntegrationUtils(com.bakdata.conquery.integration.common.IntegrationUtils) ProgrammaticIntegrationTest(com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest) ExecutionState(com.bakdata.conquery.models.execution.ExecutionState) TestConquery(com.bakdata.conquery.util.support.TestConquery) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) ShardNode(com.bakdata.conquery.commands.ShardNode) QueryTest(com.bakdata.conquery.integration.json.QueryTest) LoadingUtil(com.bakdata.conquery.integration.common.LoadingUtil) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Dataset(com.bakdata.conquery.models.datasets.Dataset) ValidatorHelper(com.bakdata.conquery.models.exceptions.ValidatorHelper) Worker(com.bakdata.conquery.models.worker.Worker) Slf4j(lombok.extern.slf4j.Slf4j) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) In(com.github.powerlibraries.io.In) WebApplicationException(javax.ws.rs.WebApplicationException) JsonIntegrationTest(com.bakdata.conquery.integration.json.JsonIntegrationTest) Query(com.bakdata.conquery.apiv1.query.Query) LoadingUtil.importSecondaryIds(com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Namespace(com.bakdata.conquery.models.worker.Namespace) Query(com.bakdata.conquery.apiv1.query.Query) WebApplicationException(javax.ws.rs.WebApplicationException) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Dataset(com.bakdata.conquery.models.datasets.Dataset) Namespace(com.bakdata.conquery.models.worker.Namespace) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) ShardNode(com.bakdata.conquery.commands.ShardNode) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Worker(com.bakdata.conquery.models.worker.Worker) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport)

Example 7 with Query

use of com.bakdata.conquery.apiv1.query.Query in project conquery by bakdata.

the class FilterTest method parseQuery.

private Query parseQuery(StandaloneSupport support) throws JSONException, IOException {
    rawFilterValue.put("filter", support.getDataset().getName() + ".concept.connector.filter");
    FilterValue<?> result = parseSubTree(support, rawFilterValue, Jackson.MAPPER.getTypeFactory().constructType(FilterValue.class));
    CQTable cqTable = new CQTable();
    cqTable.setFilters(Collections.singletonList(result));
    cqTable.setConnector(connector);
    CQConcept cqConcept = new CQConcept();
    cqTable.setConcept(cqConcept);
    cqConcept.setElements(Collections.singletonList(concept));
    cqConcept.setTables(Collections.singletonList(cqTable));
    if (dateRange != null) {
        CQDateRestriction restriction = new CQDateRestriction();
        restriction.setDateRange(dateRange);
        restriction.setChild(cqConcept);
        return new ConceptQuery(restriction);
    }
    return new ConceptQuery(cqConcept);
}
Also used : CQDateRestriction(com.bakdata.conquery.apiv1.query.concept.specific.CQDateRestriction) CQTable(com.bakdata.conquery.apiv1.query.concept.filter.CQTable) CQConcept(com.bakdata.conquery.apiv1.query.concept.specific.CQConcept) ConceptQuery(com.bakdata.conquery.apiv1.query.ConceptQuery) FilterValue(com.bakdata.conquery.apiv1.query.concept.filter.FilterValue)

Example 8 with Query

use of com.bakdata.conquery.apiv1.query.Query in project conquery by bakdata.

the class ConceptPermissionTest method execute.

@Override
public void execute(StandaloneSupport conquery) throws Exception {
    final MetaStorage storage = conquery.getMetaStorage();
    final Dataset dataset = conquery.getDataset();
    final String testJson = In.resource("/tests/query/SIMPLE_TREECONCEPT_QUERY/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset.getId(), testJson);
    final QueryProcessor processor = new QueryProcessor(conquery.getDatasetRegistry(), storage, conquery.getConfig());
    final User user = new User("testUser", "testUserLabel", storage);
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        storage.addUser(user);
        user.addPermission(DatasetPermission.onInstance(Ability.READ, dataset.getId()));
    }
    // Query cannot be deserialized without Namespace set up
    final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    // Id of the lone concept that is used in the test.
    Concept<?> conceptId = conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
    IntegrationUtils.assertQueryResult(conquery, query, -1, ExecutionState.FAILED, user, 403);
    // Add the necessary Permission
    {
        final ConqueryPermission permission = conceptId.createPermission(Ability.READ.asSet());
        log.info("Adding the Permission[{}] to User[{}]", permission, user);
        user.addPermission(permission);
    }
    // Only assert permissions
    IntegrationUtils.assertQueryResult(conquery, query, -1, ExecutionState.DONE, user, 201);
    conquery.waitUntilWorkDone();
    // Clean up
    {
        storage.removeUser(user.getId());
    }
}
Also used : ConqueryPermission(com.bakdata.conquery.models.auth.permissions.ConqueryPermission) User(com.bakdata.conquery.models.auth.entities.User) Query(com.bakdata.conquery.apiv1.query.Query) QueryTest(com.bakdata.conquery.integration.json.QueryTest) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Dataset(com.bakdata.conquery.models.datasets.Dataset) QueryProcessor(com.bakdata.conquery.apiv1.QueryProcessor)

Example 9 with Query

use of com.bakdata.conquery.apiv1.query.Query in project conquery by bakdata.

the class AbstractQueryEngineTest method executeTest.

@Override
public void executeTest(StandaloneSupport standaloneSupport) throws IOException {
    Query query = getQuery();
    assertThat(standaloneSupport.getValidator().validate(query)).describedAs("Query Validation Errors").isEmpty();
    log.info("{} QUERY INIT", getLabel());
    final User testUser = standaloneSupport.getTestUser();
    final ManagedExecutionId executionId = IntegrationUtils.assertQueryResult(standaloneSupport, query, -1, ExecutionState.DONE, testUser, 201);
    final ManagedQuery execution = (ManagedQuery) standaloneSupport.getMetaStorage().getExecution(executionId);
    // check result info size
    List<ResultInfo> resultInfos = execution.getResultInfos();
    assertThat(execution.streamResults().flatMap(EntityResult::streamValues)).as("Should have same size as result infos").allSatisfy(v -> assertThat(v).hasSameSizeAs(resultInfos));
    // Get the actual response and compare with expected result.
    final Response csvResponse = standaloneSupport.getClient().target(HierarchyHelper.hierarchicalPath(standaloneSupport.defaultApiURIBuilder(), ResultCsvResource.class, "getAsCsv").buildFromMap(Map.of(DATASET, standaloneSupport.getDataset().getName(), QUERY, execution.getId().toString()))).queryParam("pretty", false).request(AdditionalMediaTypes.CSV).acceptLanguage(Locale.ENGLISH).get();
    List<String> actual = In.stream(((InputStream) csvResponse.getEntity())).readLines();
    ResourceFile expectedCsv = getExpectedCsv();
    List<String> expected = In.stream(expectedCsv.stream()).readLines();
    assertThat(actual).as("Results for %s are not as expected.", this).containsExactlyInAnyOrderElementsOf(expected);
    // check that getLastResultCount returns the correct size
    if (execution.streamResults().noneMatch(MultilineEntityResult.class::isInstance)) {
        assertThat(execution.getLastResultCount()).as("Result count for %s is not as expected.", this).isEqualTo(expected.size() - 1);
    }
    log.info("INTEGRATION TEST SUCCESSFUL {} {} on {} rows", getClass().getSimpleName(), this, expected.size());
}
Also used : Response(javax.ws.rs.core.Response) ResourceFile(com.bakdata.conquery.integration.common.ResourceFile) User(com.bakdata.conquery.models.auth.entities.User) ManagedQuery(com.bakdata.conquery.models.query.ManagedQuery) Query(com.bakdata.conquery.apiv1.query.Query) ManagedExecutionId(com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId) ManagedQuery(com.bakdata.conquery.models.query.ManagedQuery) ResultInfo(com.bakdata.conquery.models.query.resultinfo.ResultInfo) MultilineEntityResult(com.bakdata.conquery.models.query.results.MultilineEntityResult)

Example 10 with Query

use of com.bakdata.conquery.apiv1.query.Query in project conquery by bakdata.

the class LoadingUtil method importPreviousQueries.

public static void importPreviousQueries(StandaloneSupport support, RequiredData content, User user) throws IOException {
    // Load previous query results if available
    int id = 1;
    for (ResourceFile queryResults : content.getPreviousQueryResults()) {
        UUID queryId = new UUID(0L, id++);
        final CsvParser parser = support.getConfig().getCsv().withParseHeaders(false).withSkipHeader(false).createParser();
        String[][] data = parser.parseAll(queryResults.stream()).toArray(new String[0][]);
        ConceptQuery q = new ConceptQuery(new CQExternal(Arrays.asList("ID", "DATE_SET"), data));
        ManagedExecution<?> managed = support.getNamespace().getExecutionManager().createQuery(support.getNamespace().getNamespaces(), q, queryId, user, support.getNamespace().getDataset());
        user.addPermission(managed.createPermission(AbilitySets.QUERY_CREATOR));
        if (managed.getState() == ExecutionState.FAILED) {
            fail("Query failed");
        }
    }
    for (JsonNode queryNode : content.getPreviousQueries()) {
        ObjectMapper mapper = new SingletonNamespaceCollection(support.getNamespaceStorage().getCentralRegistry()).injectIntoNew(Jackson.MAPPER);
        mapper = support.getDataset().injectIntoNew(mapper);
        Query query = mapper.readerFor(Query.class).readValue(queryNode);
        UUID queryId = new UUID(0L, id++);
        ManagedExecution<?> managed = support.getNamespace().getExecutionManager().createQuery(support.getNamespace().getNamespaces(), query, queryId, user, support.getNamespace().getDataset());
        user.addPermission(ExecutionPermission.onInstance(AbilitySets.QUERY_CREATOR, managed.getId()));
        if (managed.getState() == ExecutionState.FAILED) {
            fail("Query failed");
        }
    }
    // wait only if we actually did anything
    if (!content.getPreviousQueryResults().isEmpty()) {
        support.waitUntilWorkDone();
    }
}
Also used : ConceptQuery(com.bakdata.conquery.apiv1.query.ConceptQuery) Query(com.bakdata.conquery.apiv1.query.Query) SingletonNamespaceCollection(com.bakdata.conquery.models.worker.SingletonNamespaceCollection) CsvParser(com.univocity.parsers.csv.CsvParser) CQExternal(com.bakdata.conquery.apiv1.query.concept.specific.external.CQExternal) JsonNode(com.fasterxml.jackson.databind.JsonNode) UUID(java.util.UUID) ConceptQuery(com.bakdata.conquery.apiv1.query.ConceptQuery) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Aggregations

ConceptQuery (com.bakdata.conquery.apiv1.query.ConceptQuery)12 Dataset (com.bakdata.conquery.models.datasets.Dataset)12 Query (com.bakdata.conquery.apiv1.query.Query)11 ManagedQuery (com.bakdata.conquery.models.query.ManagedQuery)11 MetaStorage (com.bakdata.conquery.io.storage.MetaStorage)10 QueryTest (com.bakdata.conquery.integration.json.QueryTest)8 ExecutionState (com.bakdata.conquery.models.execution.ExecutionState)8 Slf4j (lombok.extern.slf4j.Slf4j)8 Namespace (com.bakdata.conquery.models.worker.Namespace)7 CQAnd (com.bakdata.conquery.apiv1.query.concept.specific.CQAnd)6 CQConcept (com.bakdata.conquery.apiv1.query.concept.specific.CQConcept)6 CQReusedQuery (com.bakdata.conquery.apiv1.query.concept.specific.CQReusedQuery)6 User (com.bakdata.conquery.models.auth.entities.User)6 Response (javax.ws.rs.core.Response)6 CQExternal (com.bakdata.conquery.apiv1.query.concept.specific.external.CQExternal)5 ShardNode (com.bakdata.conquery.commands.ShardNode)5 IntegrationUtils (com.bakdata.conquery.integration.common.IntegrationUtils)5 LoadingUtil (com.bakdata.conquery.integration.common.LoadingUtil)5 LoadingUtil.importSecondaryIds (com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds)5 JsonIntegrationTest (com.bakdata.conquery.integration.json.JsonIntegrationTest)5