Search in sources :

Example 6 with QueryTest

use of com.bakdata.conquery.integration.json.QueryTest in project conquery by bakdata.

the class TableDeletionTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    final StandaloneSupport conquery = testConquery.getSupport(name);
    final MetaStorage storage = conquery.getMetaStorage();
    final String testJson = In.resource("/tests/query/DELETE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
    final Dataset dataset = conquery.getDataset();
    final Namespace namespace = conquery.getNamespace();
    final TableId tableId = TableId.Parser.INSTANCE.parse(dataset.getName(), "test_table2");
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
    }
    final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    final int nImports = namespace.getStorage().getAllImports().size();
    // State before deletion.
    {
        log.info("Checking state before deletion");
        // Must contain the import.
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(tableId)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).isNotEmpty();
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
            }
        }
        log.info("Executing query before deletion");
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Delete the import.
    {
        log.info("Issuing deletion of import {}", tableId);
        // Delete the import via API.
        // But, we do not allow deletion of tables with associated connectors, so this should throw!
        final URI deleteTable = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), AdminTablesResource.class, "remove").buildFromMap(Map.of(ResourceConstants.DATASET, conquery.getDataset().getName(), ResourceConstants.TABLE, tableId.toString()));
        final Response failed = conquery.getClient().target(deleteTable).request().delete();
        assertThat(failed.getStatusInfo().getFamily()).isEqualTo(Response.Status.Family.CLIENT_ERROR);
        conquery.getDatasetsProcessor().deleteConcept(conquery.getNamespace().getStorage().getAllConcepts().iterator().next());
        Thread.sleep(100);
        conquery.waitUntilWorkDone();
        final Response success = conquery.getClient().target(deleteTable).request().delete();
        assertThat(success.getStatusInfo().getStatusCode()).isEqualTo(Response.Status.OK.getStatusCode());
        Thread.sleep(100);
        conquery.waitUntilWorkDone();
    }
    // State after deletion.
    {
        log.info("Checking state after deletion");
        // We have deleted an import now there should be two less!
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports - 1);
        // The deleted import should not be found.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().getTable().equals(tableId)).isEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                // No bucket should be found referencing the import.
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).filteredOn(bucket -> bucket.getImp().getTable().getId().equals(tableId)).isEmpty();
                // No CBlock associated with import may exist
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).filteredOn(cBlock -> cBlock.getBucket().getImp().getTable().getId().equals(tableId)).isEmpty();
            }
        }
        log.info("Executing query after deletion");
        // Issue a query and asseert that it has less content.
        IntegrationUtils.assertQueryResult(conquery, query, 0L, ExecutionState.FAILED, conquery.getTestUser(), 400);
    }
    conquery.waitUntilWorkDone();
    // Load the same import into the same table, with only the deleted import/table
    {
        // only import the deleted import/table
        LoadingUtil.importTables(conquery, test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(tableId.getTable())).collect(Collectors.toList()));
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(tableId.getTable())).collect(Collectors.toList()));
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        assertThat(namespace.getStorage().getTable(tableId)).describedAs("Table after re-import.").isNotNull();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                assertThat(value.getStorage().getCentralRegistry().resolve(tableId)).describedAs("Table in worker storage.").isNotNull();
            }
        }
    }
    // Test state after reimport.
    {
        log.info("Checking state after re-import");
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports);
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
            }
        }
        log.info("Executing query after re-import");
        // Issue a query and assert that it has the same content as the first time around.
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Finally, restart conquery and assert again, that the data is correct.
    {
        testConquery.shutdown();
        // restart
        testConquery.beforeAll();
        StandaloneSupport conquery2 = testConquery.openDataset(dataset.getId());
        log.info("Checking state after re-start");
        {
            assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(2);
            for (ShardNode node : conquery2.getShardNodes()) {
                for (Worker value : node.getWorkers().getWorkers().values()) {
                    if (!value.getInfo().getDataset().equals(dataset.getId())) {
                        continue;
                    }
                    final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                    assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
                }
            }
            log.info("Executing query after re-import");
            // Issue a query and assert that it has the same content as the first time around.
            IntegrationUtils.assertQueryResult(conquery2, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
        }
    }
}
Also used : TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) ExecutionState(com.bakdata.conquery.models.execution.ExecutionState) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AdminTablesResource(com.bakdata.conquery.resources.admin.rest.AdminTablesResource) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Worker(com.bakdata.conquery.models.worker.Worker) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) In(com.github.powerlibraries.io.In) Map(java.util.Map) TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) LoadingUtil.importSecondaryIds(com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds) URI(java.net.URI) IntegrationUtils(com.bakdata.conquery.integration.common.IntegrationUtils) ProgrammaticIntegrationTest(com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest) TestConquery(com.bakdata.conquery.util.support.TestConquery) ShardNode(com.bakdata.conquery.commands.ShardNode) ResourceConstants(com.bakdata.conquery.resources.ResourceConstants) LoadingUtil(com.bakdata.conquery.integration.common.LoadingUtil) Collectors(java.util.stream.Collectors) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Dataset(com.bakdata.conquery.models.datasets.Dataset) ValidatorHelper(com.bakdata.conquery.models.exceptions.ValidatorHelper) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) JsonIntegrationTest(com.bakdata.conquery.integration.json.JsonIntegrationTest) Query(com.bakdata.conquery.apiv1.query.Query) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) HierarchyHelper(com.bakdata.conquery.resources.hierarchies.HierarchyHelper) Namespace(com.bakdata.conquery.models.worker.Namespace) Query(com.bakdata.conquery.apiv1.query.Query) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Dataset(com.bakdata.conquery.models.datasets.Dataset) URI(java.net.URI) Namespace(com.bakdata.conquery.models.worker.Namespace) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) Response(javax.ws.rs.core.Response) ShardNode(com.bakdata.conquery.commands.ShardNode) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Worker(com.bakdata.conquery.models.worker.Worker) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport)

Example 7 with QueryTest

use of com.bakdata.conquery.integration.json.QueryTest in project conquery by bakdata.

the class DownloadLinkGeneration method execute.

@Override
public void execute(StandaloneSupport conquery) throws Exception {
    final MetaStorage storage = conquery.getMetaStorage();
    final User user = new User("testU", "testU", storage);
    final String testJson = In.resource("/tests/query/SIMPLE_TREECONCEPT_QUERY/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(conquery.getDataset(), testJson);
    storage.updateUser(user);
    // Manually import data
    ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
    test.importRequiredData(conquery);
    // Create execution for download
    ManagedQuery exec = new ManagedQuery(test.getQuery(), user, conquery.getDataset());
    storage.addExecution(exec);
    user.addPermission(DatasetPermission.onInstance(Set.of(Ability.READ), conquery.getDataset().getId()));
    {
        // Try to generate a download link: should not be possible, because the execution isn't run yet
        FullExecutionStatus status = IntegrationUtils.getExecutionStatus(conquery, exec.getId(), user, 200);
        assertThat(status.getResultUrls()).isEmpty();
    }
    {
        // Thinker the state of the execution and try again: still not possible because of missing permissions
        exec.setState(ExecutionState.DONE);
        FullExecutionStatus status = IntegrationUtils.getExecutionStatus(conquery, exec.getId(), user, 200);
        assertThat(status.getResultUrls()).isEmpty();
    }
    {
        // Add permission to download: now it should be possible
        user.addPermission(DatasetPermission.onInstance(Set.of(Ability.DOWNLOAD), conquery.getDataset().getId()));
        FullExecutionStatus status = IntegrationUtils.getExecutionStatus(conquery, exec.getId(), user, 200);
        // This Url is missing the `/api` path part, because we use the standard UriBuilder here
        assertThat(status.getResultUrls()).contains(new URL(String.format("%s/datasets/%s/result/%s.csv", conquery.defaultApiURIBuilder().toString(), conquery.getDataset().getId(), exec.getId())));
    }
}
Also used : User(com.bakdata.conquery.models.auth.entities.User) QueryTest(com.bakdata.conquery.integration.json.QueryTest) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) ManagedQuery(com.bakdata.conquery.models.query.ManagedQuery) FullExecutionStatus(com.bakdata.conquery.apiv1.FullExecutionStatus) URL(java.net.URL)

Example 8 with QueryTest

use of com.bakdata.conquery.integration.json.QueryTest in project conquery by bakdata.

the class ReusedQueryTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    final StandaloneSupport conquery = testConquery.getSupport(name);
    final String testJson = In.resource("/tests/query/SECONDARY_ID_MIXED/SECONDARY_IDS_MIXED.test.json").withUTF8().readAll();
    final Dataset dataset = conquery.getDataset();
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
    }
    final SecondaryIdQuery query = (SecondaryIdQuery) IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    final ManagedExecutionId id = IntegrationUtils.assertQueryResult(conquery, query, 4L, ExecutionState.DONE, conquery.getTestUser(), 201);
    assertThat(id).isNotNull();
    final MetaStorage metaStorage = conquery.getMetaStorage();
    final ManagedQuery execution = (ManagedQuery) metaStorage.getExecution(id);
    // Normal reuse
    {
        final ConceptQuery reused = new ConceptQuery(new CQReusedQuery(execution.getId()));
        IntegrationUtils.assertQueryResult(conquery, reused, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Reuse by API
    {
        final URI reexecuteUri = HierarchyHelper.hierarchicalPath(conquery.defaultApiURIBuilder(), QueryResource.class, "reexecute").buildFromMap(Map.of(ResourceConstants.DATASET, conquery.getDataset().getName(), ResourceConstants.QUERY, execution.getId().toString()));
        final FullExecutionStatus status = conquery.getClient().target(reexecuteUri).request(MediaType.APPLICATION_JSON).post(Entity.entity(null, MediaType.APPLICATION_JSON_TYPE)).readEntity(FullExecutionStatus.class);
        assertThat(status.getStatus()).isIn(ExecutionState.RUNNING, ExecutionState.DONE);
    }
    // Reuse in SecondaryId
    {
        final SecondaryIdQuery reused = new SecondaryIdQuery();
        reused.setRoot(new CQReusedQuery(execution.getId()));
        reused.setSecondaryId(query.getSecondaryId());
        IntegrationUtils.assertQueryResult(conquery, reused, 4L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Reuse in SecondaryId, but do exclude
    {
        final SecondaryIdQuery reused = new SecondaryIdQuery();
        final CQAnd root = new CQAnd();
        reused.setRoot(root);
        final CQReusedQuery reuse = new CQReusedQuery(execution.getId());
        reuse.setExcludeFromSecondaryId(true);
        // We select only a single event of the query by the exact filtering.
        final CQConcept cqConcept = new CQConcept();
        final ConceptId conceptId = new ConceptId(conquery.getDataset().getId(), "concept");
        final Concept<?> concept = conquery.getNamespaceStorage().getConcept(conceptId);
        cqConcept.setElements(List.of(concept));
        final CQTable cqTable = new CQTable();
        cqTable.setConcept(cqConcept);
        final CentralRegistry centralRegistry = conquery.getNamespaceStorage().getCentralRegistry();
        final Connector connector = centralRegistry.resolve(new ConnectorId(conceptId, "connector1"));
        cqTable.setConnector(connector);
        cqTable.setFilters(List.of(new FilterValue.CQRealRangeFilter((Filter<Range<BigDecimal>>) centralRegistry.resolve(new FilterId(connector.getId(), "filter")), new Range<>(BigDecimal.valueOf(1.01d), BigDecimal.valueOf(1.01d)))));
        cqConcept.setTables(List.of(cqTable));
        cqConcept.setExcludeFromSecondaryId(false);
        root.setChildren(List.of(reuse, cqConcept));
        reused.setSecondaryId(query.getSecondaryId());
        IntegrationUtils.assertQueryResult(conquery, reused, 1L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Reuse Multiple times with different query types
    {
        final SecondaryIdQuery reused1 = new SecondaryIdQuery();
        reused1.setRoot(new CQReusedQuery(execution.getId()));
        reused1.setSecondaryId(query.getSecondaryId());
        final ManagedExecutionId reused1Id = IntegrationUtils.assertQueryResult(conquery, reused1, 4L, ExecutionState.DONE, conquery.getTestUser(), 201);
        final ManagedQuery execution1 = (ManagedQuery) metaStorage.getExecution(reused1Id);
        {
            final SecondaryIdQuery reused2 = new SecondaryIdQuery();
            reused2.setRoot(new CQReusedQuery(execution1.getId()));
            reused2.setSecondaryId(query.getSecondaryId());
            final ManagedExecutionId reused2Id = IntegrationUtils.assertQueryResult(conquery, reused2, 4L, ExecutionState.DONE, conquery.getTestUser(), 201);
            final ManagedQuery execution2 = (ManagedQuery) metaStorage.getExecution(reused2Id);
            assertThat(reused2Id).as("Query should be reused.").isEqualTo(reused1Id);
            // Now we change to ConceptQuery
            final ConceptQuery reused3 = new ConceptQuery(new CQReusedQuery(execution2.getId()));
            IntegrationUtils.assertQueryResult(conquery, reused3, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
        }
        {
            final SecondaryIdQuery reusedDiffId = new SecondaryIdQuery();
            reusedDiffId.setRoot(new CQReusedQuery(execution1.getId()));
            // ignored is a single global value and therefore the same as by-PID
            reusedDiffId.setSecondaryId(conquery.getNamespace().getStorage().getSecondaryId(new SecondaryIdDescriptionId(conquery.getDataset().getId(), "ignored")));
            final ManagedExecutionId executionId = IntegrationUtils.assertQueryResult(conquery, reusedDiffId, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
            assertThat(executionId).as("Query should NOT be reused.").isNotEqualTo(reused1Id);
        }
        {
            // Reuse by another user (create a copy of the actual query)
            final SecondaryIdQuery reused = new SecondaryIdQuery();
            reused.setRoot(new CQReusedQuery(execution.getId()));
            reused.setSecondaryId(query.getSecondaryId());
            User shareHolder = new User("shareholder", "ShareHolder", conquery.getMetaStorage());
            conquery.getMetaProcessor().addUser(shareHolder);
            shareHolder.addPermissions(Set.of(dataset.createPermission(Set.of(Ability.READ)), execution.createPermission(Set.of(Ability.READ))));
            ManagedExecutionId copyId = IntegrationUtils.assertQueryResult(conquery, reused, 4L, ExecutionState.DONE, shareHolder, 201);
            ManagedExecution<?> copy = metaStorage.getExecution(copyId);
            // Contentwise the label and tags should be the same
            assertThat(copy).usingRecursiveComparison().comparingOnlyFields("label", "tags").isEqualTo(execution);
            // However the Object holding the tags must be different, so the two are not linked here
            assertThat(copy.getTags()).isNotSameAs(execution.getTags());
            // And the ids must be different
            assertThat(copy.getId()).isNotSameAs(execution.getId());
        }
    }
}
Also used : Connector(com.bakdata.conquery.models.datasets.concepts.Connector) User(com.bakdata.conquery.models.auth.entities.User) CQTable(com.bakdata.conquery.apiv1.query.concept.filter.CQTable) CQConcept(com.bakdata.conquery.apiv1.query.concept.specific.CQConcept) CentralRegistry(com.bakdata.conquery.models.identifiable.CentralRegistry) URI(java.net.URI) FilterId(com.bakdata.conquery.models.identifiable.ids.specific.FilterId) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) ManagedQuery(com.bakdata.conquery.models.query.ManagedQuery) CQAnd(com.bakdata.conquery.apiv1.query.concept.specific.CQAnd) ConnectorId(com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId) Concept(com.bakdata.conquery.models.datasets.concepts.Concept) CQConcept(com.bakdata.conquery.apiv1.query.concept.specific.CQConcept) SecondaryIdDescriptionId(com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId) CQReusedQuery(com.bakdata.conquery.apiv1.query.concept.specific.CQReusedQuery) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Dataset(com.bakdata.conquery.models.datasets.Dataset) Range(com.bakdata.conquery.models.common.Range) FullExecutionStatus(com.bakdata.conquery.apiv1.FullExecutionStatus) BigDecimal(java.math.BigDecimal) ConceptId(com.bakdata.conquery.models.identifiable.ids.specific.ConceptId) FilterValue(com.bakdata.conquery.apiv1.query.concept.filter.FilterValue) Filter(com.bakdata.conquery.models.datasets.concepts.filters.Filter) SecondaryIdQuery(com.bakdata.conquery.apiv1.query.SecondaryIdQuery) ManagedExecutionId(com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) ConceptQuery(com.bakdata.conquery.apiv1.query.ConceptQuery)

Aggregations

QueryTest (com.bakdata.conquery.integration.json.QueryTest)8 MetaStorage (com.bakdata.conquery.io.storage.MetaStorage)7 Dataset (com.bakdata.conquery.models.datasets.Dataset)7 Query (com.bakdata.conquery.apiv1.query.Query)6 StandaloneSupport (com.bakdata.conquery.util.support.StandaloneSupport)6 ShardNode (com.bakdata.conquery.commands.ShardNode)5 IntegrationUtils (com.bakdata.conquery.integration.common.IntegrationUtils)5 LoadingUtil (com.bakdata.conquery.integration.common.LoadingUtil)5 LoadingUtil.importSecondaryIds (com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds)5 JsonIntegrationTest (com.bakdata.conquery.integration.json.JsonIntegrationTest)5 ModificationShieldedWorkerStorage (com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage)5 ValidatorHelper (com.bakdata.conquery.models.exceptions.ValidatorHelper)5 ExecutionState (com.bakdata.conquery.models.execution.ExecutionState)5 Namespace (com.bakdata.conquery.models.worker.Namespace)5 Worker (com.bakdata.conquery.models.worker.Worker)5 TestConquery (com.bakdata.conquery.util.support.TestConquery)5 In (com.github.powerlibraries.io.In)5 Slf4j (lombok.extern.slf4j.Slf4j)5 Assertions.assertThat (org.assertj.core.api.Assertions.assertThat)5 ProgrammaticIntegrationTest (com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest)4