Search in sources :

Example 16 with MetaStorage

use of com.bakdata.conquery.io.storage.MetaStorage in project conquery by bakdata.

the class ImportDeletionTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    final StandaloneSupport conquery = testConquery.getSupport(name);
    MetaStorage storage = conquery.getMetaStorage();
    final String testJson = In.resource("/tests/query/DELETE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
    final Dataset dataset = conquery.getDataset();
    final Namespace namespace = conquery.getNamespace();
    final ImportId importId = ImportId.Parser.INSTANCE.parse(dataset.getName(), "test_table2", "test_table2");
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
    }
    final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    final int nImports = namespace.getStorage().getAllImports().size();
    // State before deletion.
    {
        log.info("Checking state before deletion");
        // Must contain the import.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().equals(importId)).isNotEmpty();
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(importId)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker worker : node.getWorkers().getWorkers().values()) {
                if (!worker.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", worker.getInfo().getId()).filteredOn(block -> block.getBucket().getId().getDataset().equals(dataset.getId())).isNotEmpty();
                assertThat(workerStorage.getAllBuckets()).filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())).describedAs("Buckets for Worker %s", worker.getInfo().getId()).isNotEmpty();
                // Must contain the import.
                assertThat(workerStorage.getImport(importId)).isNotNull();
            }
        }
        log.info("Executing query before deletion");
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Delete the import.
    {
        log.info("Issuing deletion of import {}", importId);
        final URI deleteImportUri = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), AdminTablesResource.class, "deleteImport").buildFromMap(Map.of(ResourceConstants.DATASET, conquery.getDataset().getId(), ResourceConstants.TABLE, importId.getTable(), ResourceConstants.IMPORT_ID, importId));
        final Response delete = conquery.getClient().target(deleteImportUri).request(MediaType.APPLICATION_JSON).delete();
        assertThat(delete.getStatusInfo().getFamily()).isEqualTo(Response.Status.Family.SUCCESSFUL);
        conquery.waitUntilWorkDone();
    }
    // State after deletion.
    {
        log.info("Checking state after deletion");
        // We have deleted an import now there should be one less!
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports - 1);
        // The deleted import should not be found.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().equals(importId)).isEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker worker : node.getWorkers().getWorkers().values()) {
                if (!worker.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                // No bucket should be found referencing the import.
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", worker.getInfo().getId()).filteredOn(bucket -> bucket.getImp().getId().equals(importId)).isEmpty();
                // No CBlock associated with import may exist
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", worker.getInfo().getId()).filteredOn(cBlock -> cBlock.getBucket().getId().getImp().equals(importId)).isEmpty();
                // Import should not exists anymore
                assertThat(workerStorage.getImport(importId)).describedAs("Import for Worker %s", worker.getInfo().getId()).isNull();
            }
        }
        log.info("Executing query after deletion");
        // Issue a query and assert that it has less content.
        IntegrationUtils.assertQueryResult(conquery, query, 1L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    conquery.waitUntilWorkDone();
    // Load more data under the same name into the same table, with only the deleted import/table
    {
        // only import the deleted import/table
        final RequiredTable import2Table = test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(importId.getTable().getTable())).findFirst().orElseThrow();
        final ResourceFile csv = import2Table.getCsv();
        final String path = csv.getPath();
        // copy csv to tmp folder
        // Content 2.2 contains an extra entry of a value that hasn't been seen before.
        FileUtils.copyInputStreamToFile(In.resource(path.substring(0, path.lastIndexOf('/')) + "/" + "content2.2.csv").asStream(), new File(conquery.getTmpDir(), csv.getName()));
        File descriptionFile = new File(conquery.getTmpDir(), import2Table.getName() + ConqueryConstants.EXTENSION_DESCRIPTION);
        File preprocessedFile = new File(conquery.getTmpDir(), import2Table.getName() + ConqueryConstants.EXTENSION_PREPROCESSED);
        // create import descriptor
        TableImportDescriptor desc = new TableImportDescriptor();
        desc.setName(import2Table.getName());
        desc.setTable(import2Table.getName());
        TableInputDescriptor input = new TableInputDescriptor();
        {
            input.setPrimary(import2Table.getPrimaryColumn().createOutput());
            input.setSourceFile(import2Table.getCsv().getName());
            input.setOutput(new OutputDescription[import2Table.getColumns().length]);
            for (int i = 0; i < import2Table.getColumns().length; i++) {
                input.getOutput()[i] = import2Table.getColumns()[i].createOutput();
            }
        }
        desc.setInputs(new TableInputDescriptor[] { input });
        Jackson.MAPPER.writeValue(descriptionFile, desc);
        // preprocess
        conquery.preprocessTmp(conquery.getTmpDir(), List.of(descriptionFile));
        // import preprocessedFiles
        conquery.getDatasetsProcessor().addImport(conquery.getNamespace(), new GZIPInputStream(new FileInputStream(preprocessedFile)));
        conquery.waitUntilWorkDone();
    }
    // State after reimport.
    {
        log.info("Checking state after re-import");
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports);
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker worker : node.getWorkers().getWorkers().values()) {
                if (!worker.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", worker.getInfo().getId()).filteredOn(bucket -> bucket.getImp().getId().equals(importId)).filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())).isNotEmpty();
            }
        }
        log.info("Executing query after re-import");
        // Issue a query and assert that it has the same content as the first time around.
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Finally, restart conquery and assert again, that the data is correct.
    {
        testConquery.shutdown();
        // restart
        testConquery.beforeAll();
        StandaloneSupport conquery2 = testConquery.openDataset(dataset.getId());
        log.info("Checking state after re-start");
        {
            assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(2);
            for (ShardNode node : conquery2.getShardNodes()) {
                for (Worker worker : node.getWorkers().getWorkers().values()) {
                    if (!worker.getInfo().getDataset().equals(dataset.getId()))
                        continue;
                    final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                    assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", worker.getInfo().getId()).filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())).filteredOn(bucket -> bucket.getImp().getId().equals(importId)).isNotEmpty();
                }
            }
            log.info("Executing query after re-import");
            // Issue a query and assert that it has the same content as the first time around.
            IntegrationUtils.assertQueryResult(conquery2, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
        }
    }
}
Also used : GZIPInputStream(java.util.zip.GZIPInputStream) ExecutionState(com.bakdata.conquery.models.execution.ExecutionState) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AdminTablesResource(com.bakdata.conquery.resources.admin.rest.AdminTablesResource) RequiredTable(com.bakdata.conquery.integration.common.RequiredTable) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Worker(com.bakdata.conquery.models.worker.Worker) MediaType(javax.ws.rs.core.MediaType) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) In(com.github.powerlibraries.io.In) Map(java.util.Map) LoadingUtil.importSecondaryIds(com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds) URI(java.net.URI) ConqueryConstants(com.bakdata.conquery.ConqueryConstants) ResourceFile(com.bakdata.conquery.integration.common.ResourceFile) IntegrationUtils(com.bakdata.conquery.integration.common.IntegrationUtils) ProgrammaticIntegrationTest(com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest) TableImportDescriptor(com.bakdata.conquery.models.preproc.TableImportDescriptor) TestConquery(com.bakdata.conquery.util.support.TestConquery) ShardNode(com.bakdata.conquery.commands.ShardNode) ResourceConstants(com.bakdata.conquery.resources.ResourceConstants) FileUtils(org.apache.commons.io.FileUtils) FileInputStream(java.io.FileInputStream) LoadingUtil(com.bakdata.conquery.integration.common.LoadingUtil) File(java.io.File) ImportId(com.bakdata.conquery.models.identifiable.ids.specific.ImportId) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Dataset(com.bakdata.conquery.models.datasets.Dataset) ValidatorHelper(com.bakdata.conquery.models.exceptions.ValidatorHelper) OutputDescription(com.bakdata.conquery.models.preproc.outputs.OutputDescription) List(java.util.List) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) TableInputDescriptor(com.bakdata.conquery.models.preproc.TableInputDescriptor) JsonIntegrationTest(com.bakdata.conquery.integration.json.JsonIntegrationTest) Query(com.bakdata.conquery.apiv1.query.Query) Jackson(com.bakdata.conquery.io.jackson.Jackson) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) HierarchyHelper(com.bakdata.conquery.resources.hierarchies.HierarchyHelper) Namespace(com.bakdata.conquery.models.worker.Namespace) Query(com.bakdata.conquery.apiv1.query.Query) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Dataset(com.bakdata.conquery.models.datasets.Dataset) ImportId(com.bakdata.conquery.models.identifiable.ids.specific.ImportId) URI(java.net.URI) Namespace(com.bakdata.conquery.models.worker.Namespace) FileInputStream(java.io.FileInputStream) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) Response(javax.ws.rs.core.Response) GZIPInputStream(java.util.zip.GZIPInputStream) ResourceFile(com.bakdata.conquery.integration.common.ResourceFile) ShardNode(com.bakdata.conquery.commands.ShardNode) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Worker(com.bakdata.conquery.models.worker.Worker) TableInputDescriptor(com.bakdata.conquery.models.preproc.TableInputDescriptor) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) ResourceFile(com.bakdata.conquery.integration.common.ResourceFile) File(java.io.File) TableImportDescriptor(com.bakdata.conquery.models.preproc.TableImportDescriptor) RequiredTable(com.bakdata.conquery.integration.common.RequiredTable)

Example 17 with MetaStorage

use of com.bakdata.conquery.io.storage.MetaStorage in project conquery by bakdata.

the class TableDeletionTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    final StandaloneSupport conquery = testConquery.getSupport(name);
    final MetaStorage storage = conquery.getMetaStorage();
    final String testJson = In.resource("/tests/query/DELETE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
    final Dataset dataset = conquery.getDataset();
    final Namespace namespace = conquery.getNamespace();
    final TableId tableId = TableId.Parser.INSTANCE.parse(dataset.getName(), "test_table2");
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
    }
    final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    final int nImports = namespace.getStorage().getAllImports().size();
    // State before deletion.
    {
        log.info("Checking state before deletion");
        // Must contain the import.
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(tableId)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).isNotEmpty();
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
            }
        }
        log.info("Executing query before deletion");
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Delete the import.
    {
        log.info("Issuing deletion of import {}", tableId);
        // Delete the import via API.
        // But, we do not allow deletion of tables with associated connectors, so this should throw!
        final URI deleteTable = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), AdminTablesResource.class, "remove").buildFromMap(Map.of(ResourceConstants.DATASET, conquery.getDataset().getName(), ResourceConstants.TABLE, tableId.toString()));
        final Response failed = conquery.getClient().target(deleteTable).request().delete();
        assertThat(failed.getStatusInfo().getFamily()).isEqualTo(Response.Status.Family.CLIENT_ERROR);
        conquery.getDatasetsProcessor().deleteConcept(conquery.getNamespace().getStorage().getAllConcepts().iterator().next());
        Thread.sleep(100);
        conquery.waitUntilWorkDone();
        final Response success = conquery.getClient().target(deleteTable).request().delete();
        assertThat(success.getStatusInfo().getStatusCode()).isEqualTo(Response.Status.OK.getStatusCode());
        Thread.sleep(100);
        conquery.waitUntilWorkDone();
    }
    // State after deletion.
    {
        log.info("Checking state after deletion");
        // We have deleted an import now there should be two less!
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports - 1);
        // The deleted import should not be found.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().getTable().equals(tableId)).isEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                // No bucket should be found referencing the import.
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).filteredOn(bucket -> bucket.getImp().getTable().getId().equals(tableId)).isEmpty();
                // No CBlock associated with import may exist
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).filteredOn(cBlock -> cBlock.getBucket().getImp().getTable().getId().equals(tableId)).isEmpty();
            }
        }
        log.info("Executing query after deletion");
        // Issue a query and asseert that it has less content.
        IntegrationUtils.assertQueryResult(conquery, query, 0L, ExecutionState.FAILED, conquery.getTestUser(), 400);
    }
    conquery.waitUntilWorkDone();
    // Load the same import into the same table, with only the deleted import/table
    {
        // only import the deleted import/table
        LoadingUtil.importTables(conquery, test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(tableId.getTable())).collect(Collectors.toList()));
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(tableId.getTable())).collect(Collectors.toList()));
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        assertThat(namespace.getStorage().getTable(tableId)).describedAs("Table after re-import.").isNotNull();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                assertThat(value.getStorage().getCentralRegistry().resolve(tableId)).describedAs("Table in worker storage.").isNotNull();
            }
        }
    }
    // Test state after reimport.
    {
        log.info("Checking state after re-import");
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports);
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
            }
        }
        log.info("Executing query after re-import");
        // Issue a query and assert that it has the same content as the first time around.
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Finally, restart conquery and assert again, that the data is correct.
    {
        testConquery.shutdown();
        // restart
        testConquery.beforeAll();
        StandaloneSupport conquery2 = testConquery.openDataset(dataset.getId());
        log.info("Checking state after re-start");
        {
            assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(2);
            for (ShardNode node : conquery2.getShardNodes()) {
                for (Worker value : node.getWorkers().getWorkers().values()) {
                    if (!value.getInfo().getDataset().equals(dataset.getId())) {
                        continue;
                    }
                    final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                    assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
                }
            }
            log.info("Executing query after re-import");
            // Issue a query and assert that it has the same content as the first time around.
            IntegrationUtils.assertQueryResult(conquery2, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
        }
    }
}
Also used : TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) ExecutionState(com.bakdata.conquery.models.execution.ExecutionState) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AdminTablesResource(com.bakdata.conquery.resources.admin.rest.AdminTablesResource) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Worker(com.bakdata.conquery.models.worker.Worker) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) In(com.github.powerlibraries.io.In) Map(java.util.Map) TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) LoadingUtil.importSecondaryIds(com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds) URI(java.net.URI) IntegrationUtils(com.bakdata.conquery.integration.common.IntegrationUtils) ProgrammaticIntegrationTest(com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest) TestConquery(com.bakdata.conquery.util.support.TestConquery) ShardNode(com.bakdata.conquery.commands.ShardNode) ResourceConstants(com.bakdata.conquery.resources.ResourceConstants) LoadingUtil(com.bakdata.conquery.integration.common.LoadingUtil) Collectors(java.util.stream.Collectors) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Dataset(com.bakdata.conquery.models.datasets.Dataset) ValidatorHelper(com.bakdata.conquery.models.exceptions.ValidatorHelper) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) JsonIntegrationTest(com.bakdata.conquery.integration.json.JsonIntegrationTest) Query(com.bakdata.conquery.apiv1.query.Query) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) HierarchyHelper(com.bakdata.conquery.resources.hierarchies.HierarchyHelper) Namespace(com.bakdata.conquery.models.worker.Namespace) Query(com.bakdata.conquery.apiv1.query.Query) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Dataset(com.bakdata.conquery.models.datasets.Dataset) URI(java.net.URI) Namespace(com.bakdata.conquery.models.worker.Namespace) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) Response(javax.ws.rs.core.Response) ShardNode(com.bakdata.conquery.commands.ShardNode) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Worker(com.bakdata.conquery.models.worker.Worker) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport)

Example 18 with MetaStorage

use of com.bakdata.conquery.io.storage.MetaStorage in project conquery by bakdata.

the class NonPersistentStoreFactory method createMetaStorage.

/**
 * @implNote intended for Unit-tests
 */
public MetaStorage createMetaStorage() {
    final MetaStorage metaStorage = new MetaStorage(null);
    metaStorage.openStores(this);
    return metaStorage;
}
Also used : MetaStorage(com.bakdata.conquery.io.storage.MetaStorage)

Example 19 with MetaStorage

use of com.bakdata.conquery.io.storage.MetaStorage in project conquery by bakdata.

the class ApiTokenRealmTest method execute.

@Override
public void execute(StandaloneSupport conquery) throws Exception {
    final User testUser = conquery.getTestUser();
    final ApiTokenRealm realm = conquery.getAuthorizationController().getAuthenticationRealms().stream().filter(ApiTokenRealm.class::isInstance).map(ApiTokenRealm.class::cast).collect(MoreCollectors.onlyElement());
    final ConqueryTokenRealm conqueryTokenRealm = conquery.getAuthorizationController().getConqueryTokenRealm();
    final String userToken = conqueryTokenRealm.createTokenForUser(testUser.getId());
    // Request ApiToken
    final ApiTokenDataRepresentation.Request tokenRequest1 = new ApiTokenDataRepresentation.Request();
    tokenRequest1.setName("test-token");
    tokenRequest1.setScopes(EnumSet.of(Scopes.DATASET));
    tokenRequest1.setExpirationDate(LocalDate.now().plus(1, ChronoUnit.DAYS));
    ApiToken apiToken1 = requestApiToken(conquery, userToken, tokenRequest1);
    assertThat(apiToken1.getToken()).isNotBlank();
    // List ApiToken
    List<ApiTokenDataRepresentation.Response> apiTokens = conquery.getClient().target(HierarchyHelper.hierarchicalPath(conquery.defaultApiURIBuilder(), ApiTokenResource.class, "listUserTokens")).request(MediaType.APPLICATION_JSON_TYPE).header("Authorization", "Bearer " + userToken).get(new GenericType<List<ApiTokenDataRepresentation.Response>>() {
    });
    final ApiTokenDataRepresentation.Response expected = new ApiTokenDataRepresentation.Response();
    expected.setLastUsed(null);
    expected.setCreationDate(LocalDate.now());
    expected.setExpirationDate(LocalDate.now().plus(1, ChronoUnit.DAYS));
    expected.setScopes(EnumSet.of(Scopes.DATASET));
    expected.setName("test-token");
    assertThat(apiTokens).hasSize(1);
    assertThat(apiTokens.get(0)).usingRecursiveComparison().ignoringFields("id").isEqualTo(expected);
    // Request ApiToken 2
    final ApiTokenDataRepresentation.Request tokenRequest2 = new ApiTokenDataRepresentation.Request();
    tokenRequest2.setName("test-token");
    tokenRequest2.setScopes(EnumSet.of(Scopes.ADMIN));
    tokenRequest2.setExpirationDate(LocalDate.now().plus(1, ChronoUnit.DAYS));
    ApiToken apiToken2 = requestApiToken(conquery, userToken, tokenRequest2);
    assertThat(apiToken2.getToken()).isNotBlank();
    // List ApiToken 2
    apiTokens = requestTokenList(conquery, userToken);
    assertThat(apiTokens).hasSize(2);
    // Use ApiToken1 to get Datasets
    List<IdLabel<DatasetId>> datasets = requestDatasets(conquery, apiToken1);
    assertThat(datasets).isNotEmpty();
    // Use ApiToken2 to get Datasets
    datasets = requestDatasets(conquery, apiToken2);
    assertThat(datasets).as("The second token has no scope for dataset").isEmpty();
    // Use ApiToken2 to access Admin
    List<DatasetId> adminDatasets = conquery.getClient().target(HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), AdminDatasetsResource.class, "listDatasets")).request(MediaType.APPLICATION_JSON_TYPE).header("Authorization", "Bearer " + apiToken2.getToken()).get(new GenericType<>() {
    });
    assertThat(adminDatasets).as("The second token has scope for admin").isNotEmpty();
    // Try to delete ApiToken2 with ApiToken (should fail)
    final UUID id2 = apiTokens.stream().filter(t -> t.getScopes().contains(Scopes.ADMIN)).map(ApiTokenDataRepresentation.Response::getId).collect(MoreCollectors.onlyElement());
    Response response = conquery.getClient().target(HierarchyHelper.hierarchicalPath(conquery.defaultApiURIBuilder(), ApiTokenResource.class, "deleteToken")).resolveTemplate(ApiTokenResource.TOKEN, id2).request(MediaType.APPLICATION_JSON_TYPE).header("Authorization", "Bearer " + apiToken2.getToken()).delete(Response.class);
    assertThat(response.getStatus()).as("It is forbidden to act on ApiTokens with ApiTokens").isEqualTo(403);
    // Delete ApiToken2 with user token
    response = conquery.getClient().target(HierarchyHelper.hierarchicalPath(conquery.defaultApiURIBuilder(), ApiTokenResource.class, "deleteToken")).resolveTemplate(ApiTokenResource.TOKEN, id2).request(MediaType.APPLICATION_JSON_TYPE).header("Authorization", "Bearer " + userToken).delete(Response.class);
    assertThat(response.getStatus()).as("It is okay to act on ApiTokens with UserTokens").isEqualTo(200);
    assertThat(realm.listUserToken(testUser)).hasSize(1);
    // Try to use the deleted token to access Admin
    response = conquery.getClient().target(HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), AdminDatasetsResource.class, "listDatasets")).request(MediaType.APPLICATION_JSON_TYPE).header("Authorization", "Bearer " + apiToken2.getToken()).get(Response.class);
    assertThat(response.getStatus()).as("Cannot use deleted token").isEqualTo(401);
    // Try to act on tokens from another user
    final MetaStorage metaStorage = conquery.getMetaStorage();
    final User user2 = new User("TestUser2", "TestUser2", metaStorage);
    metaStorage.addUser(user2);
    final String user2Token = conqueryTokenRealm.createTokenForUser(user2.getId());
    // Try to delete ApiToken2 with ApiToken (should fail)
    final UUID id1 = apiTokens.stream().filter(t -> t.getScopes().contains(Scopes.DATASET)).map(ApiTokenDataRepresentation.Response::getId).collect(MoreCollectors.onlyElement());
    response = conquery.getClient().target(HierarchyHelper.hierarchicalPath(conquery.defaultApiURIBuilder(), ApiTokenResource.class, "deleteToken")).resolveTemplate(ApiTokenResource.TOKEN, id1).request(MediaType.APPLICATION_JSON_TYPE).header("Authorization", "Bearer " + user2Token).delete(Response.class);
    assertThat(response.getStatus()).as("It is forbidden to act on someone else ApiTokens").isEqualTo(403);
    // Request ApiToken 3 (expired)
    final ApiTokenDataRepresentation.Request tokenRequest3 = new ApiTokenDataRepresentation.Request();
    tokenRequest3.setName("test-token");
    tokenRequest3.setScopes(EnumSet.of(Scopes.DATASET));
    tokenRequest3.setExpirationDate(LocalDate.now().minus(1, ChronoUnit.DAYS));
    assertThatThrownBy(() -> requestApiToken(conquery, userToken, tokenRequest3)).as("Expiration date is in the past").isExactlyInstanceOf(ClientErrorException.class).hasMessageContaining("HTTP 422");
    // Craft expired token behind validation to simulate the use of an expired token
    ApiToken apiToken3 = realm.createApiToken(user2, tokenRequest3);
    assertThatThrownBy(() -> requestDatasets(conquery, apiToken3)).as("Expired token").isExactlyInstanceOf(NotAuthorizedException.class);
}
Also used : User(com.bakdata.conquery.models.auth.entities.User) ConqueryTokenRealm(com.bakdata.conquery.models.auth.conquerytoken.ConqueryTokenRealm) ApiTokenDataRepresentation(com.bakdata.conquery.apiv1.auth.ApiTokenDataRepresentation) DatasetId(com.bakdata.conquery.models.identifiable.ids.specific.DatasetId) ApiTokenResource(com.bakdata.conquery.resources.api.ApiTokenResource) Response(javax.ws.rs.core.Response) IdLabel(com.bakdata.conquery.apiv1.IdLabel) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) ApiToken(com.bakdata.conquery.models.auth.apitoken.ApiToken) ClientErrorException(javax.ws.rs.ClientErrorException) ApiTokenRealm(com.bakdata.conquery.models.auth.apitoken.ApiTokenRealm) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) UUID(java.util.UUID) AdminDatasetsResource(com.bakdata.conquery.resources.admin.rest.AdminDatasetsResource)

Example 20 with MetaStorage

use of com.bakdata.conquery.io.storage.MetaStorage in project conquery by bakdata.

the class DownloadLinkGeneration method execute.

@Override
public void execute(StandaloneSupport conquery) throws Exception {
    final MetaStorage storage = conquery.getMetaStorage();
    final User user = new User("testU", "testU", storage);
    final String testJson = In.resource("/tests/query/SIMPLE_TREECONCEPT_QUERY/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(conquery.getDataset(), testJson);
    storage.updateUser(user);
    // Manually import data
    ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
    test.importRequiredData(conquery);
    // Create execution for download
    ManagedQuery exec = new ManagedQuery(test.getQuery(), user, conquery.getDataset());
    storage.addExecution(exec);
    user.addPermission(DatasetPermission.onInstance(Set.of(Ability.READ), conquery.getDataset().getId()));
    {
        // Try to generate a download link: should not be possible, because the execution isn't run yet
        FullExecutionStatus status = IntegrationUtils.getExecutionStatus(conquery, exec.getId(), user, 200);
        assertThat(status.getResultUrls()).isEmpty();
    }
    {
        // Thinker the state of the execution and try again: still not possible because of missing permissions
        exec.setState(ExecutionState.DONE);
        FullExecutionStatus status = IntegrationUtils.getExecutionStatus(conquery, exec.getId(), user, 200);
        assertThat(status.getResultUrls()).isEmpty();
    }
    {
        // Add permission to download: now it should be possible
        user.addPermission(DatasetPermission.onInstance(Set.of(Ability.DOWNLOAD), conquery.getDataset().getId()));
        FullExecutionStatus status = IntegrationUtils.getExecutionStatus(conquery, exec.getId(), user, 200);
        // This Url is missing the `/api` path part, because we use the standard UriBuilder here
        assertThat(status.getResultUrls()).contains(new URL(String.format("%s/datasets/%s/result/%s.csv", conquery.defaultApiURIBuilder().toString(), conquery.getDataset().getId(), exec.getId())));
    }
}
Also used : User(com.bakdata.conquery.models.auth.entities.User) QueryTest(com.bakdata.conquery.integration.json.QueryTest) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) ManagedQuery(com.bakdata.conquery.models.query.ManagedQuery) FullExecutionStatus(com.bakdata.conquery.apiv1.FullExecutionStatus) URL(java.net.URL)

Aggregations

MetaStorage (com.bakdata.conquery.io.storage.MetaStorage)23 Dataset (com.bakdata.conquery.models.datasets.Dataset)12 User (com.bakdata.conquery.models.auth.entities.User)9 Role (com.bakdata.conquery.models.auth.entities.Role)8 QueryTest (com.bakdata.conquery.integration.json.QueryTest)7 StandaloneSupport (com.bakdata.conquery.util.support.StandaloneSupport)6 Response (javax.ws.rs.core.Response)6 Query (com.bakdata.conquery.apiv1.query.Query)5 Group (com.bakdata.conquery.models.auth.entities.Group)5 ExecutionState (com.bakdata.conquery.models.execution.ExecutionState)5 DatasetId (com.bakdata.conquery.models.identifiable.ids.specific.DatasetId)5 Namespace (com.bakdata.conquery.models.worker.Namespace)5 ShardNode (com.bakdata.conquery.commands.ShardNode)4 IntegrationUtils (com.bakdata.conquery.integration.common.IntegrationUtils)4 LoadingUtil (com.bakdata.conquery.integration.common.LoadingUtil)4 LoadingUtil.importSecondaryIds (com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds)4 JsonIntegrationTest (com.bakdata.conquery.integration.json.JsonIntegrationTest)4 ModificationShieldedWorkerStorage (com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage)4 ValidatorHelper (com.bakdata.conquery.models.exceptions.ValidatorHelper)4 DatasetRegistry (com.bakdata.conquery.models.worker.DatasetRegistry)4