Search in sources :

Example 1 with TableId

use of com.bakdata.conquery.models.identifiable.ids.specific.TableId in project conquery by bakdata.

the class AdminDatasetProcessor method deleteSecondaryId.

/**
 * Delete SecondaryId if it does not have any dependents.
 */
public synchronized void deleteSecondaryId(@NonNull SecondaryIdDescription secondaryId) {
    final Namespace namespace = datasetRegistry.get(secondaryId.getDataset().getId());
    // Before we commit this deletion, we check if this SecondaryId still has dependent Columns.
    final List<Column> dependents = namespace.getStorage().getTables().stream().map(Table::getColumns).flatMap(Arrays::stream).filter(column -> secondaryId.equals(column.getSecondaryId())).collect(Collectors.toList());
    if (!dependents.isEmpty()) {
        final Set<TableId> tables = dependents.stream().map(Column::getTable).map(Identifiable::getId).collect(Collectors.toSet());
        log.error("SecondaryId[{}] still present on {}", secondaryId, tables);
        throw new ForbiddenException(String.format("SecondaryId still has dependencies. %s", tables));
    }
    log.info("Deleting SecondaryId[{}]", secondaryId);
    namespace.getStorage().removeSecondaryId(secondaryId.getId());
    namespace.sendToAll(new RemoveSecondaryId(secondaryId));
}
Also used : Arrays(java.util.Arrays) ConqueryConfig(com.bakdata.conquery.models.config.ConqueryConfig) SneakyThrows(lombok.SneakyThrows) RequiredArgsConstructor(lombok.RequiredArgsConstructor) EntityIdMap(com.bakdata.conquery.models.identifiable.mapping.EntityIdMap) Import(com.bakdata.conquery.models.datasets.Import) DictionaryId(com.bakdata.conquery.models.identifiable.ids.specific.DictionaryId) ConceptId(com.bakdata.conquery.models.identifiable.ids.specific.ConceptId) DatasetId(com.bakdata.conquery.models.identifiable.ids.specific.DatasetId) InternalOnly(com.bakdata.conquery.io.jackson.InternalOnly) RemoveImportJob(com.bakdata.conquery.models.messages.namespaces.specific.RemoveImportJob) NonNull(lombok.NonNull) Table(com.bakdata.conquery.models.datasets.Table) Concept(com.bakdata.conquery.models.datasets.concepts.Concept) RemoveConcept(com.bakdata.conquery.models.messages.namespaces.specific.RemoveConcept) Set(java.util.Set) Validator(javax.validation.Validator) Collectors(java.util.stream.Collectors) NotFoundException(javax.ws.rs.NotFoundException) RemoveSecondaryId(com.bakdata.conquery.models.messages.namespaces.specific.RemoveSecondaryId) Dataset(com.bakdata.conquery.models.datasets.Dataset) Objects(java.util.Objects) SecondaryIdDescription(com.bakdata.conquery.models.datasets.SecondaryIdDescription) JobManager(com.bakdata.conquery.models.jobs.JobManager) List(java.util.List) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) WebApplicationException(javax.ws.rs.WebApplicationException) Jackson(com.bakdata.conquery.io.jackson.Jackson) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Column(com.bakdata.conquery.models.datasets.Column) Namespace(com.bakdata.conquery.models.worker.Namespace) RemoveTable(com.bakdata.conquery.models.messages.namespaces.specific.RemoveTable) Getter(lombok.Getter) AddWorker(com.bakdata.conquery.models.messages.network.specific.AddWorker) UpdateTable(com.bakdata.conquery.models.messages.namespaces.specific.UpdateTable) Connector(com.bakdata.conquery.models.datasets.concepts.Connector) UpdateSecondaryId(com.bakdata.conquery.models.messages.namespaces.specific.UpdateSecondaryId) NamespaceStorage(com.bakdata.conquery.io.storage.NamespaceStorage) IdMutex(com.bakdata.conquery.models.identifiable.IdMutex) TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) CsvParser(com.univocity.parsers.csv.CsvParser) UpdateMatchingStatsMessage(com.bakdata.conquery.models.messages.namespaces.specific.UpdateMatchingStatsMessage) Identifiable(com.bakdata.conquery.models.identifiable.Identifiable) SimpleJob(com.bakdata.conquery.models.jobs.SimpleJob) ForbiddenException(javax.ws.rs.ForbiddenException) ImportJob(com.bakdata.conquery.models.jobs.ImportJob) IOException(java.io.IOException) ValidatorHelper(com.bakdata.conquery.models.exceptions.ValidatorHelper) UpdateConcept(com.bakdata.conquery.models.messages.namespaces.specific.UpdateConcept) ShardNodeInformation(com.bakdata.conquery.models.worker.ShardNodeInformation) StructureNode(com.bakdata.conquery.models.datasets.concepts.StructureNode) FilterSearch(com.bakdata.conquery.apiv1.FilterSearch) DatasetRegistry(com.bakdata.conquery.models.worker.DatasetRegistry) Collections(java.util.Collections) InputStream(java.io.InputStream) TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) ForbiddenException(javax.ws.rs.ForbiddenException) RemoveSecondaryId(com.bakdata.conquery.models.messages.namespaces.specific.RemoveSecondaryId) Column(com.bakdata.conquery.models.datasets.Column) Arrays(java.util.Arrays) Namespace(com.bakdata.conquery.models.worker.Namespace)

Example 2 with TableId

use of com.bakdata.conquery.models.identifiable.ids.specific.TableId in project conquery by bakdata.

the class SecondaryIdEndpointTest method execute.

@Override
public void execute(StandaloneSupport conquery) throws Exception {
    final SecondaryIdDescription description = new SecondaryIdDescription();
    description.setDescription("description-DESCRIPTION");
    description.setName("description-NAME");
    description.setLabel("description-LABEL");
    final SecondaryIdDescriptionId id = new SecondaryIdDescriptionId(conquery.getDataset().getId(), description.getName());
    final Response post = uploadDescription(conquery, description);
    log.info("{}", post);
    assertThat(post).describedAs("Response = `%s`", post).returns(Response.Status.Family.SUCCESSFUL, response -> response.getStatusInfo().getFamily());
    {
        final Set<FESecondaryId> secondaryIds = fetchSecondaryIdDescriptions(conquery);
        log.info("{}", secondaryIds);
        description.setDataset(conquery.getDataset());
        assertThat(secondaryIds).extracting(FESecondaryId::getId).containsExactly(description.getId().toString());
        // Upload Table referencing SecondaryId
        {
            // Build data manually so content is minmal (ie no dataset prefixes etc)
            ObjectNode tableNode = Jackson.MAPPER.createObjectNode();
            tableNode.put("name", "table");
            ObjectNode columnNode = Jackson.MAPPER.createObjectNode();
            columnNode.put("name", "column");
            columnNode.put("type", MajorTypeId.INTEGER.name());
            columnNode.put("secondaryId", description.getId().toStringWithoutDataset());
            tableNode.put("columns", columnNode);
            final Response response = uploadTable(conquery, tableNode);
            assertThat(response.getStatusInfo().getFamily()).isEqualTo(Response.Status.Family.SUCCESSFUL);
        }
    }
    {
        final URI uri = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), DatasetsUIResource.class, "getDataset").buildFromMap(Map.of("dataset", conquery.getDataset().getName()));
        final Response actual = conquery.getClient().target(uri).request().get();
        assertThat(actual).returns(Response.Status.Family.SUCCESSFUL, response -> response.getStatusInfo().getFamily());
    }
    {
        // First one fails because table depends on it
        assertThat(deleteDescription(conquery, id)).returns(Response.Status.Family.CLIENT_ERROR, response -> response.getStatusInfo().getFamily());
        deleteTable(conquery, new TableId(conquery.getDataset().getId(), "table"));
        // We've deleted the table, now it should be successful
        assertThat(deleteDescription(conquery, id)).returns(Response.Status.Family.SUCCESSFUL, response -> response.getStatusInfo().getFamily());
        final Set<FESecondaryId> secondaryIds = fetchSecondaryIdDescriptions(conquery);
        log.info("{}", secondaryIds);
        assertThat(secondaryIds).isEmpty();
    }
}
Also used : Response(javax.ws.rs.core.Response) SecondaryIdDescriptionId(com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AdminTablesResource(com.bakdata.conquery.resources.admin.rest.AdminTablesResource) AdminDatasetResource(com.bakdata.conquery.resources.admin.rest.AdminDatasetResource) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) MediaType(javax.ws.rs.core.MediaType) Map(java.util.Map) TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) URI(java.net.URI) TypeReference(com.fasterxml.jackson.core.type.TypeReference) MajorTypeId(com.bakdata.conquery.models.events.MajorTypeId) IntegrationTest(com.bakdata.conquery.integration.IntegrationTest) ResourceConstants(com.bakdata.conquery.resources.ResourceConstants) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Set(java.util.Set) DatasetsUIResource(com.bakdata.conquery.resources.admin.ui.DatasetsUIResource) FESecondaryId(com.bakdata.conquery.apiv1.frontend.FESecondaryId) Entity(javax.ws.rs.client.Entity) DatasetResource(com.bakdata.conquery.resources.api.DatasetResource) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) SecondaryIdDescription(com.bakdata.conquery.models.datasets.SecondaryIdDescription) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) Jackson(com.bakdata.conquery.io.jackson.Jackson) HierarchyHelper(com.bakdata.conquery.resources.hierarchies.HierarchyHelper) TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) SecondaryIdDescriptionId(com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId) Set(java.util.Set) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) FESecondaryId(com.bakdata.conquery.apiv1.frontend.FESecondaryId) URI(java.net.URI) SecondaryIdDescription(com.bakdata.conquery.models.datasets.SecondaryIdDescription)

Example 3 with TableId

use of com.bakdata.conquery.models.identifiable.ids.specific.TableId in project conquery by bakdata.

the class TableDeletionTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    final StandaloneSupport conquery = testConquery.getSupport(name);
    final MetaStorage storage = conquery.getMetaStorage();
    final String testJson = In.resource("/tests/query/DELETE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
    final Dataset dataset = conquery.getDataset();
    final Namespace namespace = conquery.getNamespace();
    final TableId tableId = TableId.Parser.INSTANCE.parse(dataset.getName(), "test_table2");
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
    }
    final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    final int nImports = namespace.getStorage().getAllImports().size();
    // State before deletion.
    {
        log.info("Checking state before deletion");
        // Must contain the import.
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(tableId)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).isNotEmpty();
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
            }
        }
        log.info("Executing query before deletion");
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Delete the import.
    {
        log.info("Issuing deletion of import {}", tableId);
        // Delete the import via API.
        // But, we do not allow deletion of tables with associated connectors, so this should throw!
        final URI deleteTable = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), AdminTablesResource.class, "remove").buildFromMap(Map.of(ResourceConstants.DATASET, conquery.getDataset().getName(), ResourceConstants.TABLE, tableId.toString()));
        final Response failed = conquery.getClient().target(deleteTable).request().delete();
        assertThat(failed.getStatusInfo().getFamily()).isEqualTo(Response.Status.Family.CLIENT_ERROR);
        conquery.getDatasetsProcessor().deleteConcept(conquery.getNamespace().getStorage().getAllConcepts().iterator().next());
        Thread.sleep(100);
        conquery.waitUntilWorkDone();
        final Response success = conquery.getClient().target(deleteTable).request().delete();
        assertThat(success.getStatusInfo().getStatusCode()).isEqualTo(Response.Status.OK.getStatusCode());
        Thread.sleep(100);
        conquery.waitUntilWorkDone();
    }
    // State after deletion.
    {
        log.info("Checking state after deletion");
        // We have deleted an import now there should be two less!
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports - 1);
        // The deleted import should not be found.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().getTable().equals(tableId)).isEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                // No bucket should be found referencing the import.
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).filteredOn(bucket -> bucket.getImp().getTable().getId().equals(tableId)).isEmpty();
                // No CBlock associated with import may exist
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).filteredOn(cBlock -> cBlock.getBucket().getImp().getTable().getId().equals(tableId)).isEmpty();
            }
        }
        log.info("Executing query after deletion");
        // Issue a query and asseert that it has less content.
        IntegrationUtils.assertQueryResult(conquery, query, 0L, ExecutionState.FAILED, conquery.getTestUser(), 400);
    }
    conquery.waitUntilWorkDone();
    // Load the same import into the same table, with only the deleted import/table
    {
        // only import the deleted import/table
        LoadingUtil.importTables(conquery, test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(tableId.getTable())).collect(Collectors.toList()));
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(tableId.getTable())).collect(Collectors.toList()));
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        assertThat(namespace.getStorage().getTable(tableId)).describedAs("Table after re-import.").isNotNull();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                assertThat(value.getStorage().getCentralRegistry().resolve(tableId)).describedAs("Table in worker storage.").isNotNull();
            }
        }
    }
    // Test state after reimport.
    {
        log.info("Checking state after re-import");
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports);
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
            }
        }
        log.info("Executing query after re-import");
        // Issue a query and assert that it has the same content as the first time around.
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Finally, restart conquery and assert again, that the data is correct.
    {
        testConquery.shutdown();
        // restart
        testConquery.beforeAll();
        StandaloneSupport conquery2 = testConquery.openDataset(dataset.getId());
        log.info("Checking state after re-start");
        {
            assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(2);
            for (ShardNode node : conquery2.getShardNodes()) {
                for (Worker value : node.getWorkers().getWorkers().values()) {
                    if (!value.getInfo().getDataset().equals(dataset.getId())) {
                        continue;
                    }
                    final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                    assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
                }
            }
            log.info("Executing query after re-import");
            // Issue a query and assert that it has the same content as the first time around.
            IntegrationUtils.assertQueryResult(conquery2, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
        }
    }
}
Also used : TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) ExecutionState(com.bakdata.conquery.models.execution.ExecutionState) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AdminTablesResource(com.bakdata.conquery.resources.admin.rest.AdminTablesResource) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Worker(com.bakdata.conquery.models.worker.Worker) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) In(com.github.powerlibraries.io.In) Map(java.util.Map) TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) LoadingUtil.importSecondaryIds(com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds) URI(java.net.URI) IntegrationUtils(com.bakdata.conquery.integration.common.IntegrationUtils) ProgrammaticIntegrationTest(com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest) TestConquery(com.bakdata.conquery.util.support.TestConquery) ShardNode(com.bakdata.conquery.commands.ShardNode) ResourceConstants(com.bakdata.conquery.resources.ResourceConstants) LoadingUtil(com.bakdata.conquery.integration.common.LoadingUtil) Collectors(java.util.stream.Collectors) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Dataset(com.bakdata.conquery.models.datasets.Dataset) ValidatorHelper(com.bakdata.conquery.models.exceptions.ValidatorHelper) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) JsonIntegrationTest(com.bakdata.conquery.integration.json.JsonIntegrationTest) Query(com.bakdata.conquery.apiv1.query.Query) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) HierarchyHelper(com.bakdata.conquery.resources.hierarchies.HierarchyHelper) Namespace(com.bakdata.conquery.models.worker.Namespace) Query(com.bakdata.conquery.apiv1.query.Query) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Dataset(com.bakdata.conquery.models.datasets.Dataset) URI(java.net.URI) Namespace(com.bakdata.conquery.models.worker.Namespace) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) Response(javax.ws.rs.core.Response) ShardNode(com.bakdata.conquery.commands.ShardNode) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Worker(com.bakdata.conquery.models.worker.Worker) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport)

Aggregations

TableId (com.bakdata.conquery.models.identifiable.ids.specific.TableId)3 Response (javax.ws.rs.core.Response)3 Slf4j (lombok.extern.slf4j.Slf4j)3 Jackson (com.bakdata.conquery.io.jackson.Jackson)2 MetaStorage (com.bakdata.conquery.io.storage.MetaStorage)2 Dataset (com.bakdata.conquery.models.datasets.Dataset)2 SecondaryIdDescription (com.bakdata.conquery.models.datasets.SecondaryIdDescription)2 ValidatorHelper (com.bakdata.conquery.models.exceptions.ValidatorHelper)2 Namespace (com.bakdata.conquery.models.worker.Namespace)2 ResourceConstants (com.bakdata.conquery.resources.ResourceConstants)2 AdminTablesResource (com.bakdata.conquery.resources.admin.rest.AdminTablesResource)2 HierarchyHelper (com.bakdata.conquery.resources.hierarchies.HierarchyHelper)2 StandaloneSupport (com.bakdata.conquery.util.support.StandaloneSupport)2 URI (java.net.URI)2 Map (java.util.Map)2 Set (java.util.Set)2 Collectors (java.util.stream.Collectors)2 Assertions.assertThat (org.assertj.core.api.Assertions.assertThat)2 FilterSearch (com.bakdata.conquery.apiv1.FilterSearch)1 FESecondaryId (com.bakdata.conquery.apiv1.frontend.FESecondaryId)1