Search in sources :

Example 6 with StandaloneSupport

use of com.bakdata.conquery.util.support.StandaloneSupport in project conquery by bakdata.

the class ImportDeletionTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    final StandaloneSupport conquery = testConquery.getSupport(name);
    MetaStorage storage = conquery.getMetaStorage();
    final String testJson = In.resource("/tests/query/DELETE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
    final Dataset dataset = conquery.getDataset();
    final Namespace namespace = conquery.getNamespace();
    final ImportId importId = ImportId.Parser.INSTANCE.parse(dataset.getName(), "test_table2", "test_table2");
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
    }
    final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    final int nImports = namespace.getStorage().getAllImports().size();
    // State before deletion.
    {
        log.info("Checking state before deletion");
        // Must contain the import.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().equals(importId)).isNotEmpty();
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(importId)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker worker : node.getWorkers().getWorkers().values()) {
                if (!worker.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", worker.getInfo().getId()).filteredOn(block -> block.getBucket().getId().getDataset().equals(dataset.getId())).isNotEmpty();
                assertThat(workerStorage.getAllBuckets()).filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())).describedAs("Buckets for Worker %s", worker.getInfo().getId()).isNotEmpty();
                // Must contain the import.
                assertThat(workerStorage.getImport(importId)).isNotNull();
            }
        }
        log.info("Executing query before deletion");
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Delete the import.
    {
        log.info("Issuing deletion of import {}", importId);
        final URI deleteImportUri = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), AdminTablesResource.class, "deleteImport").buildFromMap(Map.of(ResourceConstants.DATASET, conquery.getDataset().getId(), ResourceConstants.TABLE, importId.getTable(), ResourceConstants.IMPORT_ID, importId));
        final Response delete = conquery.getClient().target(deleteImportUri).request(MediaType.APPLICATION_JSON).delete();
        assertThat(delete.getStatusInfo().getFamily()).isEqualTo(Response.Status.Family.SUCCESSFUL);
        conquery.waitUntilWorkDone();
    }
    // State after deletion.
    {
        log.info("Checking state after deletion");
        // We have deleted an import now there should be one less!
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports - 1);
        // The deleted import should not be found.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().equals(importId)).isEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker worker : node.getWorkers().getWorkers().values()) {
                if (!worker.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                // No bucket should be found referencing the import.
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", worker.getInfo().getId()).filteredOn(bucket -> bucket.getImp().getId().equals(importId)).isEmpty();
                // No CBlock associated with import may exist
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", worker.getInfo().getId()).filteredOn(cBlock -> cBlock.getBucket().getId().getImp().equals(importId)).isEmpty();
                // Import should not exists anymore
                assertThat(workerStorage.getImport(importId)).describedAs("Import for Worker %s", worker.getInfo().getId()).isNull();
            }
        }
        log.info("Executing query after deletion");
        // Issue a query and assert that it has less content.
        IntegrationUtils.assertQueryResult(conquery, query, 1L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    conquery.waitUntilWorkDone();
    // Load more data under the same name into the same table, with only the deleted import/table
    {
        // only import the deleted import/table
        final RequiredTable import2Table = test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(importId.getTable().getTable())).findFirst().orElseThrow();
        final ResourceFile csv = import2Table.getCsv();
        final String path = csv.getPath();
        // copy csv to tmp folder
        // Content 2.2 contains an extra entry of a value that hasn't been seen before.
        FileUtils.copyInputStreamToFile(In.resource(path.substring(0, path.lastIndexOf('/')) + "/" + "content2.2.csv").asStream(), new File(conquery.getTmpDir(), csv.getName()));
        File descriptionFile = new File(conquery.getTmpDir(), import2Table.getName() + ConqueryConstants.EXTENSION_DESCRIPTION);
        File preprocessedFile = new File(conquery.getTmpDir(), import2Table.getName() + ConqueryConstants.EXTENSION_PREPROCESSED);
        // create import descriptor
        TableImportDescriptor desc = new TableImportDescriptor();
        desc.setName(import2Table.getName());
        desc.setTable(import2Table.getName());
        TableInputDescriptor input = new TableInputDescriptor();
        {
            input.setPrimary(import2Table.getPrimaryColumn().createOutput());
            input.setSourceFile(import2Table.getCsv().getName());
            input.setOutput(new OutputDescription[import2Table.getColumns().length]);
            for (int i = 0; i < import2Table.getColumns().length; i++) {
                input.getOutput()[i] = import2Table.getColumns()[i].createOutput();
            }
        }
        desc.setInputs(new TableInputDescriptor[] { input });
        Jackson.MAPPER.writeValue(descriptionFile, desc);
        // preprocess
        conquery.preprocessTmp(conquery.getTmpDir(), List.of(descriptionFile));
        // import preprocessedFiles
        conquery.getDatasetsProcessor().addImport(conquery.getNamespace(), new GZIPInputStream(new FileInputStream(preprocessedFile)));
        conquery.waitUntilWorkDone();
    }
    // State after reimport.
    {
        log.info("Checking state after re-import");
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports);
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker worker : node.getWorkers().getWorkers().values()) {
                if (!worker.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", worker.getInfo().getId()).filteredOn(bucket -> bucket.getImp().getId().equals(importId)).filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())).isNotEmpty();
            }
        }
        log.info("Executing query after re-import");
        // Issue a query and assert that it has the same content as the first time around.
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Finally, restart conquery and assert again, that the data is correct.
    {
        testConquery.shutdown();
        // restart
        testConquery.beforeAll();
        StandaloneSupport conquery2 = testConquery.openDataset(dataset.getId());
        log.info("Checking state after re-start");
        {
            assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(2);
            for (ShardNode node : conquery2.getShardNodes()) {
                for (Worker worker : node.getWorkers().getWorkers().values()) {
                    if (!worker.getInfo().getDataset().equals(dataset.getId()))
                        continue;
                    final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                    assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", worker.getInfo().getId()).filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())).filteredOn(bucket -> bucket.getImp().getId().equals(importId)).isNotEmpty();
                }
            }
            log.info("Executing query after re-import");
            // Issue a query and assert that it has the same content as the first time around.
            IntegrationUtils.assertQueryResult(conquery2, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
        }
    }
}
Also used : GZIPInputStream(java.util.zip.GZIPInputStream) ExecutionState(com.bakdata.conquery.models.execution.ExecutionState) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AdminTablesResource(com.bakdata.conquery.resources.admin.rest.AdminTablesResource) RequiredTable(com.bakdata.conquery.integration.common.RequiredTable) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Worker(com.bakdata.conquery.models.worker.Worker) MediaType(javax.ws.rs.core.MediaType) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) In(com.github.powerlibraries.io.In) Map(java.util.Map) LoadingUtil.importSecondaryIds(com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds) URI(java.net.URI) ConqueryConstants(com.bakdata.conquery.ConqueryConstants) ResourceFile(com.bakdata.conquery.integration.common.ResourceFile) IntegrationUtils(com.bakdata.conquery.integration.common.IntegrationUtils) ProgrammaticIntegrationTest(com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest) TableImportDescriptor(com.bakdata.conquery.models.preproc.TableImportDescriptor) TestConquery(com.bakdata.conquery.util.support.TestConquery) ShardNode(com.bakdata.conquery.commands.ShardNode) ResourceConstants(com.bakdata.conquery.resources.ResourceConstants) FileUtils(org.apache.commons.io.FileUtils) FileInputStream(java.io.FileInputStream) LoadingUtil(com.bakdata.conquery.integration.common.LoadingUtil) File(java.io.File) ImportId(com.bakdata.conquery.models.identifiable.ids.specific.ImportId) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Dataset(com.bakdata.conquery.models.datasets.Dataset) ValidatorHelper(com.bakdata.conquery.models.exceptions.ValidatorHelper) OutputDescription(com.bakdata.conquery.models.preproc.outputs.OutputDescription) List(java.util.List) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) TableInputDescriptor(com.bakdata.conquery.models.preproc.TableInputDescriptor) JsonIntegrationTest(com.bakdata.conquery.integration.json.JsonIntegrationTest) Query(com.bakdata.conquery.apiv1.query.Query) Jackson(com.bakdata.conquery.io.jackson.Jackson) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) HierarchyHelper(com.bakdata.conquery.resources.hierarchies.HierarchyHelper) Namespace(com.bakdata.conquery.models.worker.Namespace) Query(com.bakdata.conquery.apiv1.query.Query) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Dataset(com.bakdata.conquery.models.datasets.Dataset) ImportId(com.bakdata.conquery.models.identifiable.ids.specific.ImportId) URI(java.net.URI) Namespace(com.bakdata.conquery.models.worker.Namespace) FileInputStream(java.io.FileInputStream) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) Response(javax.ws.rs.core.Response) GZIPInputStream(java.util.zip.GZIPInputStream) ResourceFile(com.bakdata.conquery.integration.common.ResourceFile) ShardNode(com.bakdata.conquery.commands.ShardNode) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Worker(com.bakdata.conquery.models.worker.Worker) TableInputDescriptor(com.bakdata.conquery.models.preproc.TableInputDescriptor) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) ResourceFile(com.bakdata.conquery.integration.common.ResourceFile) File(java.io.File) TableImportDescriptor(com.bakdata.conquery.models.preproc.TableImportDescriptor) RequiredTable(com.bakdata.conquery.integration.common.RequiredTable)

Example 7 with StandaloneSupport

use of com.bakdata.conquery.util.support.StandaloneSupport in project conquery by bakdata.

the class TableDeletionTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    final StandaloneSupport conquery = testConquery.getSupport(name);
    final MetaStorage storage = conquery.getMetaStorage();
    final String testJson = In.resource("/tests/query/DELETE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
    final Dataset dataset = conquery.getDataset();
    final Namespace namespace = conquery.getNamespace();
    final TableId tableId = TableId.Parser.INSTANCE.parse(dataset.getName(), "test_table2");
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
    }
    final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    final int nImports = namespace.getStorage().getAllImports().size();
    // State before deletion.
    {
        log.info("Checking state before deletion");
        // Must contain the import.
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(tableId)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).isNotEmpty();
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
            }
        }
        log.info("Executing query before deletion");
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Delete the import.
    {
        log.info("Issuing deletion of import {}", tableId);
        // Delete the import via API.
        // But, we do not allow deletion of tables with associated connectors, so this should throw!
        final URI deleteTable = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), AdminTablesResource.class, "remove").buildFromMap(Map.of(ResourceConstants.DATASET, conquery.getDataset().getName(), ResourceConstants.TABLE, tableId.toString()));
        final Response failed = conquery.getClient().target(deleteTable).request().delete();
        assertThat(failed.getStatusInfo().getFamily()).isEqualTo(Response.Status.Family.CLIENT_ERROR);
        conquery.getDatasetsProcessor().deleteConcept(conquery.getNamespace().getStorage().getAllConcepts().iterator().next());
        Thread.sleep(100);
        conquery.waitUntilWorkDone();
        final Response success = conquery.getClient().target(deleteTable).request().delete();
        assertThat(success.getStatusInfo().getStatusCode()).isEqualTo(Response.Status.OK.getStatusCode());
        Thread.sleep(100);
        conquery.waitUntilWorkDone();
    }
    // State after deletion.
    {
        log.info("Checking state after deletion");
        // We have deleted an import now there should be two less!
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports - 1);
        // The deleted import should not be found.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().getTable().equals(tableId)).isEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                // No bucket should be found referencing the import.
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).filteredOn(bucket -> bucket.getImp().getTable().getId().equals(tableId)).isEmpty();
                // No CBlock associated with import may exist
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).filteredOn(cBlock -> cBlock.getBucket().getImp().getTable().getId().equals(tableId)).isEmpty();
            }
        }
        log.info("Executing query after deletion");
        // Issue a query and asseert that it has less content.
        IntegrationUtils.assertQueryResult(conquery, query, 0L, ExecutionState.FAILED, conquery.getTestUser(), 400);
    }
    conquery.waitUntilWorkDone();
    // Load the same import into the same table, with only the deleted import/table
    {
        // only import the deleted import/table
        LoadingUtil.importTables(conquery, test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(tableId.getTable())).collect(Collectors.toList()));
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(tableId.getTable())).collect(Collectors.toList()));
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        assertThat(namespace.getStorage().getTable(tableId)).describedAs("Table after re-import.").isNotNull();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                assertThat(value.getStorage().getCentralRegistry().resolve(tableId)).describedAs("Table in worker storage.").isNotNull();
            }
        }
    }
    // Test state after reimport.
    {
        log.info("Checking state after re-import");
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports);
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
            }
        }
        log.info("Executing query after re-import");
        // Issue a query and assert that it has the same content as the first time around.
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Finally, restart conquery and assert again, that the data is correct.
    {
        testConquery.shutdown();
        // restart
        testConquery.beforeAll();
        StandaloneSupport conquery2 = testConquery.openDataset(dataset.getId());
        log.info("Checking state after re-start");
        {
            assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(2);
            for (ShardNode node : conquery2.getShardNodes()) {
                for (Worker value : node.getWorkers().getWorkers().values()) {
                    if (!value.getInfo().getDataset().equals(dataset.getId())) {
                        continue;
                    }
                    final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                    assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
                }
            }
            log.info("Executing query after re-import");
            // Issue a query and assert that it has the same content as the first time around.
            IntegrationUtils.assertQueryResult(conquery2, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
        }
    }
}
Also used : TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) ExecutionState(com.bakdata.conquery.models.execution.ExecutionState) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AdminTablesResource(com.bakdata.conquery.resources.admin.rest.AdminTablesResource) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Worker(com.bakdata.conquery.models.worker.Worker) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) In(com.github.powerlibraries.io.In) Map(java.util.Map) TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) LoadingUtil.importSecondaryIds(com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds) URI(java.net.URI) IntegrationUtils(com.bakdata.conquery.integration.common.IntegrationUtils) ProgrammaticIntegrationTest(com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest) TestConquery(com.bakdata.conquery.util.support.TestConquery) ShardNode(com.bakdata.conquery.commands.ShardNode) ResourceConstants(com.bakdata.conquery.resources.ResourceConstants) LoadingUtil(com.bakdata.conquery.integration.common.LoadingUtil) Collectors(java.util.stream.Collectors) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Dataset(com.bakdata.conquery.models.datasets.Dataset) ValidatorHelper(com.bakdata.conquery.models.exceptions.ValidatorHelper) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) JsonIntegrationTest(com.bakdata.conquery.integration.json.JsonIntegrationTest) Query(com.bakdata.conquery.apiv1.query.Query) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) HierarchyHelper(com.bakdata.conquery.resources.hierarchies.HierarchyHelper) Namespace(com.bakdata.conquery.models.worker.Namespace) Query(com.bakdata.conquery.apiv1.query.Query) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Dataset(com.bakdata.conquery.models.datasets.Dataset) URI(java.net.URI) Namespace(com.bakdata.conquery.models.worker.Namespace) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) Response(javax.ws.rs.core.Response) ShardNode(com.bakdata.conquery.commands.ShardNode) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Worker(com.bakdata.conquery.models.worker.Worker) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport)

Example 8 with StandaloneSupport

use of com.bakdata.conquery.util.support.StandaloneSupport in project conquery by bakdata.

the class RestartTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    // read test specification
    String testJson = In.resource("/tests/query/RESTART_TEST_DATA/SIMPLE_TREECONCEPT_Query.json").withUTF8().readAll();
    Validator validator = Validators.newValidator();
    EntityIdMap entityIdMap = IdMapSerialisationTest.createTestPersistentMap();
    ManagerNode manager = testConquery.getStandaloneCommand().getManager();
    AdminDatasetProcessor adminDatasetProcessor = manager.getAdmin().getAdminDatasetProcessor();
    AdminProcessor adminProcessor = manager.getAdmin().getAdminProcessor();
    StandaloneSupport conquery = testConquery.getSupport(name);
    DatasetId dataset = conquery.getDataset().getId();
    ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);
    ValidatorHelper.failOnError(log, validator.validate(test));
    test.importRequiredData(conquery);
    test.executeTest(conquery);
    final int numberOfExecutions = conquery.getMetaStorage().getAllExecutions().size();
    // IDMapping Testing
    NamespaceStorage namespaceStorage = conquery.getNamespaceStorage();
    namespaceStorage.updateIdMapping(entityIdMap);
    final Dataset dataset1 = adminDatasetProcessor.addDataset(TEST_DATASET_1);
    final Dataset dataset2 = adminDatasetProcessor.addDataset(TEST_DATASET_2);
    final Dataset dataset3 = adminDatasetProcessor.addDataset(TEST_DATASET_3);
    final Dataset dataset4 = adminDatasetProcessor.addDataset(TEST_DATASET_4);
    final Dataset dataset5 = adminDatasetProcessor.addDataset(TEST_DATASET_5);
    final Dataset dataset6 = adminDatasetProcessor.addDataset(TEST_DATASET_6);
    MetaStorage storage = conquery.getMetaStorage();
    Role role = new Role("role", "ROLE", storage);
    Role roleToDelete = new Role("roleDelete", "ROLE_DELETE", storage);
    User user = new User("user@test.email", "USER", storage);
    User userToDelete = new User("userDelete@test.email", "USER_DELETE", storage);
    Group group = new Group("group", "GROUP", storage);
    Group groupToDelete = new Group("groupDelete", "GROUP_DELETE", storage);
    {
        // Auth testing (deletion and permission grant)
        // build constellation
        // TODO USE APIS
        adminProcessor.addUser(user);
        adminProcessor.addUser(userToDelete);
        adminProcessor.addRole(role);
        adminProcessor.addRole(roleToDelete);
        adminProcessor.addGroup(group);
        adminProcessor.addGroup(groupToDelete);
        adminProcessor.addRoleTo(user, role);
        adminProcessor.addRoleTo(user, roleToDelete);
        adminProcessor.addRoleTo(userToDelete, role);
        adminProcessor.addRoleTo(userToDelete, roleToDelete);
        adminProcessor.addRoleTo(group, role);
        adminProcessor.addRoleTo(group, roleToDelete);
        adminProcessor.addRoleTo(groupToDelete, role);
        adminProcessor.addRoleTo(groupToDelete, roleToDelete);
        adminProcessor.addUserToGroup(group, user);
        adminProcessor.addUserToGroup(group, userToDelete);
        adminProcessor.addUserToGroup(groupToDelete, user);
        adminProcessor.addUserToGroup(groupToDelete, userToDelete);
        // Adding Permissions
        adminProcessor.createPermission(user, dataset1.createPermission(Ability.READ.asSet()));
        adminProcessor.createPermission(userToDelete, dataset2.createPermission(Ability.READ.asSet()));
        adminProcessor.createPermission(role, dataset3.createPermission(Ability.READ.asSet()));
        adminProcessor.createPermission(roleToDelete, dataset4.createPermission(Ability.READ.asSet()));
        adminProcessor.createPermission(group, dataset5.createPermission(Ability.READ.asSet()));
        adminProcessor.createPermission(groupToDelete, dataset6.createPermission(Ability.READ.asSet()));
        // Delete entities
        // TODO use API
        adminProcessor.deleteUser(userToDelete);
        adminProcessor.deleteRole(roleToDelete);
        adminProcessor.deleteGroup(groupToDelete);
    }
    log.info("Shutting down for restart");
    testConquery.shutdown();
    log.info("Restarting");
    testConquery.beforeAll();
    final StandaloneSupport support = testConquery.openDataset(dataset);
    log.info("Restart complete");
    DatasetRegistry datasetRegistry = support.getDatasetsProcessor().getDatasetRegistry();
    assertThat(support.getMetaStorage().getAllExecutions().size()).as("Executions after restart").isEqualTo(numberOfExecutions);
    test.executeTest(support);
    {
        // Auth actual tests
        User userStored = storage.getUser(user.getId());
        assertThat(userStored).isEqualTo(user);
        assertThat(storage.getRole(role.getId())).isEqualTo(role);
        assertThat(storage.getGroup(group.getId())).isEqualTo(group);
        assertThat(storage.getUser(userToDelete.getId())).as("deleted user should stay deleted").isNull();
        assertThat(storage.getRole(roleToDelete.getId())).as("deleted role should stay deleted").isNull();
        assertThat(storage.getGroup(groupToDelete.getId())).as("deleted group should stay deleted").isNull();
        assertThat(userStored.isPermitted(datasetRegistry.get(TEST_DATASET_1.getId()).getDataset(), Ability.READ)).isTrue();
        assertThat(userStored.isPermitted(datasetRegistry.get(TEST_DATASET_2.getId()).getDataset(), Ability.READ)).isFalse();
        assertThat(userStored.isPermitted(datasetRegistry.get(TEST_DATASET_3.getId()).getDataset(), Ability.READ)).isTrue();
        assertThat(userStored.isPermitted(datasetRegistry.get(TEST_DATASET_4.getId()).getDataset(), Ability.READ)).isFalse();
        assertThat(userStored.isPermitted(datasetRegistry.get(TEST_DATASET_5.getId()).getDataset(), Ability.READ)).isTrue();
        assertThat(userStored.isPermitted(datasetRegistry.get(TEST_DATASET_6.getId()).getDataset(), Ability.READ)).isFalse();
    }
    EntityIdMap entityIdMapAfterRestart = conquery.getNamespaceStorage().getIdMapping();
    assertThat(entityIdMapAfterRestart).isEqualTo(entityIdMap);
    // We need to reassign the dataset processor because the instance prio to the restart became invalid
    adminDatasetProcessor = testConquery.getStandaloneCommand().getManager().getAdmin().getAdminDatasetProcessor();
    // Cleanup
    adminDatasetProcessor.deleteDataset(dataset1);
    adminDatasetProcessor.deleteDataset(dataset2);
    adminDatasetProcessor.deleteDataset(dataset3);
    adminDatasetProcessor.deleteDataset(dataset4);
    adminDatasetProcessor.deleteDataset(dataset5);
    adminDatasetProcessor.deleteDataset(dataset6);
}
Also used : Group(com.bakdata.conquery.models.auth.entities.Group) User(com.bakdata.conquery.models.auth.entities.User) Dataset(com.bakdata.conquery.models.datasets.Dataset) AdminProcessor(com.bakdata.conquery.resources.admin.rest.AdminProcessor) EntityIdMap(com.bakdata.conquery.models.identifiable.mapping.EntityIdMap) DatasetId(com.bakdata.conquery.models.identifiable.ids.specific.DatasetId) Role(com.bakdata.conquery.models.auth.entities.Role) ManagerNode(com.bakdata.conquery.commands.ManagerNode) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) ConqueryTestSpec(com.bakdata.conquery.integration.json.ConqueryTestSpec) DatasetRegistry(com.bakdata.conquery.models.worker.DatasetRegistry) NamespaceStorage(com.bakdata.conquery.io.storage.NamespaceStorage) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) AdminDatasetProcessor(com.bakdata.conquery.resources.admin.rest.AdminDatasetProcessor) Validator(javax.validation.Validator)

Example 9 with StandaloneSupport

use of com.bakdata.conquery.util.support.StandaloneSupport in project conquery by bakdata.

the class ReusedQueryTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    final StandaloneSupport conquery = testConquery.getSupport(name);
    final String testJson = In.resource("/tests/query/SECONDARY_ID_MIXED/SECONDARY_IDS_MIXED.test.json").withUTF8().readAll();
    final Dataset dataset = conquery.getDataset();
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
    }
    final SecondaryIdQuery query = (SecondaryIdQuery) IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    final ManagedExecutionId id = IntegrationUtils.assertQueryResult(conquery, query, 4L, ExecutionState.DONE, conquery.getTestUser(), 201);
    assertThat(id).isNotNull();
    final MetaStorage metaStorage = conquery.getMetaStorage();
    final ManagedQuery execution = (ManagedQuery) metaStorage.getExecution(id);
    // Normal reuse
    {
        final ConceptQuery reused = new ConceptQuery(new CQReusedQuery(execution.getId()));
        IntegrationUtils.assertQueryResult(conquery, reused, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Reuse by API
    {
        final URI reexecuteUri = HierarchyHelper.hierarchicalPath(conquery.defaultApiURIBuilder(), QueryResource.class, "reexecute").buildFromMap(Map.of(ResourceConstants.DATASET, conquery.getDataset().getName(), ResourceConstants.QUERY, execution.getId().toString()));
        final FullExecutionStatus status = conquery.getClient().target(reexecuteUri).request(MediaType.APPLICATION_JSON).post(Entity.entity(null, MediaType.APPLICATION_JSON_TYPE)).readEntity(FullExecutionStatus.class);
        assertThat(status.getStatus()).isIn(ExecutionState.RUNNING, ExecutionState.DONE);
    }
    // Reuse in SecondaryId
    {
        final SecondaryIdQuery reused = new SecondaryIdQuery();
        reused.setRoot(new CQReusedQuery(execution.getId()));
        reused.setSecondaryId(query.getSecondaryId());
        IntegrationUtils.assertQueryResult(conquery, reused, 4L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Reuse in SecondaryId, but do exclude
    {
        final SecondaryIdQuery reused = new SecondaryIdQuery();
        final CQAnd root = new CQAnd();
        reused.setRoot(root);
        final CQReusedQuery reuse = new CQReusedQuery(execution.getId());
        reuse.setExcludeFromSecondaryId(true);
        // We select only a single event of the query by the exact filtering.
        final CQConcept cqConcept = new CQConcept();
        final ConceptId conceptId = new ConceptId(conquery.getDataset().getId(), "concept");
        final Concept<?> concept = conquery.getNamespaceStorage().getConcept(conceptId);
        cqConcept.setElements(List.of(concept));
        final CQTable cqTable = new CQTable();
        cqTable.setConcept(cqConcept);
        final CentralRegistry centralRegistry = conquery.getNamespaceStorage().getCentralRegistry();
        final Connector connector = centralRegistry.resolve(new ConnectorId(conceptId, "connector1"));
        cqTable.setConnector(connector);
        cqTable.setFilters(List.of(new FilterValue.CQRealRangeFilter((Filter<Range<BigDecimal>>) centralRegistry.resolve(new FilterId(connector.getId(), "filter")), new Range<>(BigDecimal.valueOf(1.01d), BigDecimal.valueOf(1.01d)))));
        cqConcept.setTables(List.of(cqTable));
        cqConcept.setExcludeFromSecondaryId(false);
        root.setChildren(List.of(reuse, cqConcept));
        reused.setSecondaryId(query.getSecondaryId());
        IntegrationUtils.assertQueryResult(conquery, reused, 1L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Reuse Multiple times with different query types
    {
        final SecondaryIdQuery reused1 = new SecondaryIdQuery();
        reused1.setRoot(new CQReusedQuery(execution.getId()));
        reused1.setSecondaryId(query.getSecondaryId());
        final ManagedExecutionId reused1Id = IntegrationUtils.assertQueryResult(conquery, reused1, 4L, ExecutionState.DONE, conquery.getTestUser(), 201);
        final ManagedQuery execution1 = (ManagedQuery) metaStorage.getExecution(reused1Id);
        {
            final SecondaryIdQuery reused2 = new SecondaryIdQuery();
            reused2.setRoot(new CQReusedQuery(execution1.getId()));
            reused2.setSecondaryId(query.getSecondaryId());
            final ManagedExecutionId reused2Id = IntegrationUtils.assertQueryResult(conquery, reused2, 4L, ExecutionState.DONE, conquery.getTestUser(), 201);
            final ManagedQuery execution2 = (ManagedQuery) metaStorage.getExecution(reused2Id);
            assertThat(reused2Id).as("Query should be reused.").isEqualTo(reused1Id);
            // Now we change to ConceptQuery
            final ConceptQuery reused3 = new ConceptQuery(new CQReusedQuery(execution2.getId()));
            IntegrationUtils.assertQueryResult(conquery, reused3, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
        }
        {
            final SecondaryIdQuery reusedDiffId = new SecondaryIdQuery();
            reusedDiffId.setRoot(new CQReusedQuery(execution1.getId()));
            // ignored is a single global value and therefore the same as by-PID
            reusedDiffId.setSecondaryId(conquery.getNamespace().getStorage().getSecondaryId(new SecondaryIdDescriptionId(conquery.getDataset().getId(), "ignored")));
            final ManagedExecutionId executionId = IntegrationUtils.assertQueryResult(conquery, reusedDiffId, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
            assertThat(executionId).as("Query should NOT be reused.").isNotEqualTo(reused1Id);
        }
        {
            // Reuse by another user (create a copy of the actual query)
            final SecondaryIdQuery reused = new SecondaryIdQuery();
            reused.setRoot(new CQReusedQuery(execution.getId()));
            reused.setSecondaryId(query.getSecondaryId());
            User shareHolder = new User("shareholder", "ShareHolder", conquery.getMetaStorage());
            conquery.getMetaProcessor().addUser(shareHolder);
            shareHolder.addPermissions(Set.of(dataset.createPermission(Set.of(Ability.READ)), execution.createPermission(Set.of(Ability.READ))));
            ManagedExecutionId copyId = IntegrationUtils.assertQueryResult(conquery, reused, 4L, ExecutionState.DONE, shareHolder, 201);
            ManagedExecution<?> copy = metaStorage.getExecution(copyId);
            // Contentwise the label and tags should be the same
            assertThat(copy).usingRecursiveComparison().comparingOnlyFields("label", "tags").isEqualTo(execution);
            // However the Object holding the tags must be different, so the two are not linked here
            assertThat(copy.getTags()).isNotSameAs(execution.getTags());
            // And the ids must be different
            assertThat(copy.getId()).isNotSameAs(execution.getId());
        }
    }
}
Also used : Connector(com.bakdata.conquery.models.datasets.concepts.Connector) User(com.bakdata.conquery.models.auth.entities.User) CQTable(com.bakdata.conquery.apiv1.query.concept.filter.CQTable) CQConcept(com.bakdata.conquery.apiv1.query.concept.specific.CQConcept) CentralRegistry(com.bakdata.conquery.models.identifiable.CentralRegistry) URI(java.net.URI) FilterId(com.bakdata.conquery.models.identifiable.ids.specific.FilterId) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) ManagedQuery(com.bakdata.conquery.models.query.ManagedQuery) CQAnd(com.bakdata.conquery.apiv1.query.concept.specific.CQAnd) ConnectorId(com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId) Concept(com.bakdata.conquery.models.datasets.concepts.Concept) CQConcept(com.bakdata.conquery.apiv1.query.concept.specific.CQConcept) SecondaryIdDescriptionId(com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId) CQReusedQuery(com.bakdata.conquery.apiv1.query.concept.specific.CQReusedQuery) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Dataset(com.bakdata.conquery.models.datasets.Dataset) Range(com.bakdata.conquery.models.common.Range) FullExecutionStatus(com.bakdata.conquery.apiv1.FullExecutionStatus) BigDecimal(java.math.BigDecimal) ConceptId(com.bakdata.conquery.models.identifiable.ids.specific.ConceptId) FilterValue(com.bakdata.conquery.apiv1.query.concept.filter.FilterValue) Filter(com.bakdata.conquery.models.datasets.concepts.filters.Filter) SecondaryIdQuery(com.bakdata.conquery.apiv1.query.SecondaryIdQuery) ManagedExecutionId(com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) ConceptQuery(com.bakdata.conquery.apiv1.query.ConceptQuery)

Aggregations

StandaloneSupport (com.bakdata.conquery.util.support.StandaloneSupport)9 Dataset (com.bakdata.conquery.models.datasets.Dataset)8 Slf4j (lombok.extern.slf4j.Slf4j)7 Query (com.bakdata.conquery.apiv1.query.Query)6 LoadingUtil (com.bakdata.conquery.integration.common.LoadingUtil)6 QueryTest (com.bakdata.conquery.integration.json.QueryTest)6 MetaStorage (com.bakdata.conquery.io.storage.MetaStorage)6 Assertions.assertThat (org.assertj.core.api.Assertions.assertThat)6 ShardNode (com.bakdata.conquery.commands.ShardNode)5 IntegrationUtils (com.bakdata.conquery.integration.common.IntegrationUtils)5 LoadingUtil.importSecondaryIds (com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds)5 JsonIntegrationTest (com.bakdata.conquery.integration.json.JsonIntegrationTest)5 ModificationShieldedWorkerStorage (com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage)5 ValidatorHelper (com.bakdata.conquery.models.exceptions.ValidatorHelper)5 ExecutionState (com.bakdata.conquery.models.execution.ExecutionState)5 Namespace (com.bakdata.conquery.models.worker.Namespace)5 Worker (com.bakdata.conquery.models.worker.Worker)5 TestConquery (com.bakdata.conquery.util.support.TestConquery)5 In (com.github.powerlibraries.io.In)5 ProgrammaticIntegrationTest (com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest)4