Search in sources :

Example 1 with StandaloneSupport

use of com.bakdata.conquery.util.support.StandaloneSupport in project conquery by bakdata.

the class ImportUpdateTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    final StandaloneSupport conquery = testConquery.getSupport(name);
    MetaStorage storage = conquery.getMetaStorage();
    String testJson = In.resource("/tests/query/UPDATE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.json").withUTF8().readAll();
    final Dataset dataset = conquery.getDataset();
    final Namespace namespace = conquery.getNamespace();
    final ImportId importId1 = ImportId.Parser.INSTANCE.parse(dataset.getName(), "table1", "table1");
    final ImportId importId2 = ImportId.Parser.INSTANCE.parse(dataset.getName(), "table2", "table2");
    QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    final List<RequiredTable> tables = test.getContent().getTables();
    assertThat(tables.size()).isEqualTo(2);
    List<File> cqpps;
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, tables);
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        cqpps = LoadingUtil.generateCqpp(conquery, tables);
        conquery.waitUntilWorkDone();
        assertThat(cqpps.size()).isEqualTo(tables.size());
        LoadingUtil.importCqppFiles(conquery, List.of(cqpps.get(0)));
        conquery.waitUntilWorkDone();
    }
    final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    // State before update.
    {
        log.info("Checking state before update");
        assertThat(namespace.getStorage().getAllImports()).hasSize(1);
        // Must contain the import.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().equals(importId1)).isNotEmpty();
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(importId1)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker worker : node.getWorkers().getWorkers().values()) {
                if (!worker.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", worker.getInfo().getId()).filteredOn(block -> block.getBucket().getId().getDataset().equals(dataset.getId())).isNotEmpty();
                assertThat(workerStorage.getAllBuckets()).filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())).describedAs("Buckets for Worker %s", worker.getInfo().getId()).isNotEmpty();
                // Must contain the import.
                assertThat(workerStorage.getImport(importId1)).isNotNull();
            }
        }
        assertThat(namespace.getNumberOfEntities()).isEqualTo(4);
        // assert that the query can be executed after the import
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Try to update an import that does not exist should throw a Not-Found Webapplication Exception
    LoadingUtil.updateCqppFile(conquery, cqpps.get(1), Response.Status.Family.CLIENT_ERROR, "Not Found");
    conquery.waitUntilWorkDone();
    // Load manually new data for import and update the concerned import
    {
        log.info("Manually loading new data for import");
        final RequiredTable importTable = test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(importId1.getTable().getTable())).findFirst().orElseThrow();
        final String csvName = importTable.getCsv().getName();
        final String path = importTable.getCsv().getPath();
        // copy new content of the importTable into the csv-File used by the preprocessor to avoid creating multiple files withe same names
        FileUtils.copyInputStreamToFile(In.resource(path.substring(0, path.lastIndexOf('/')) + "/" + csvName.replace(".csv", ".update.csv")).asStream(), new File(conquery.getTmpDir(), csvName));
        File descriptionFile = new File(conquery.getTmpDir(), importTable.getName() + ConqueryConstants.EXTENSION_DESCRIPTION);
        File newPreprocessedFile = new File(conquery.getTmpDir(), importTable.getName() + ConqueryConstants.EXTENSION_PREPROCESSED);
        // create import descriptor
        {
            TableImportDescriptor desc = new TableImportDescriptor();
            desc.setName(importTable.getName());
            desc.setTable(importTable.getName());
            TableInputDescriptor input = new TableInputDescriptor();
            {
                input.setPrimary(importTable.getPrimaryColumn().createOutput());
                input.setSourceFile(csvName);
                input.setOutput(new OutputDescription[importTable.getColumns().length]);
                for (int i = 0; i < importTable.getColumns().length; i++) {
                    input.getOutput()[i] = importTable.getColumns()[i].createOutput();
                }
            }
            desc.setInputs(new TableInputDescriptor[] { input });
            Jackson.MAPPER.writeValue(descriptionFile, desc);
        }
        // preprocess
        conquery.preprocessTmp(conquery.getTmpDir(), List.of(descriptionFile));
        log.info("updating import");
        // correct update of the import
        LoadingUtil.updateCqppFile(conquery, newPreprocessedFile, Response.Status.Family.SUCCESSFUL, "No Content");
        conquery.waitUntilWorkDone();
    }
    // State after update.
    {
        log.info("Checking state after update");
        assertThat(namespace.getStorage().getAllImports()).hasSize(1);
        // Must contain the import.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().equals(importId1)).isNotEmpty();
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(importId1)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker worker : node.getWorkers().getWorkers().values()) {
                if (!worker.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", worker.getInfo().getId()).filteredOn(block -> block.getBucket().getId().getDataset().equals(dataset.getId())).isNotEmpty();
                assertThat(workerStorage.getAllBuckets()).filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())).describedAs("Buckets for Worker %s", worker.getInfo().getId()).isNotEmpty();
                // Must contain the import.
                assertThat(workerStorage.getImport(importId1)).isNotNull();
            }
        }
        assertThat(namespace.getNumberOfEntities()).isEqualTo(9);
        // Issue a query and assert that it has more content.
        IntegrationUtils.assertQueryResult(conquery, query, 4L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
}
Also used : ExecutionState(com.bakdata.conquery.models.execution.ExecutionState) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) RequiredTable(com.bakdata.conquery.integration.common.RequiredTable) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Worker(com.bakdata.conquery.models.worker.Worker) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) In(com.github.powerlibraries.io.In) LoadingUtil.importSecondaryIds(com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds) ConqueryConstants(com.bakdata.conquery.ConqueryConstants) IntegrationUtils(com.bakdata.conquery.integration.common.IntegrationUtils) TableImportDescriptor(com.bakdata.conquery.models.preproc.TableImportDescriptor) TestConquery(com.bakdata.conquery.util.support.TestConquery) ShardNode(com.bakdata.conquery.commands.ShardNode) FileUtils(org.apache.commons.io.FileUtils) LoadingUtil(com.bakdata.conquery.integration.common.LoadingUtil) File(java.io.File) ImportId(com.bakdata.conquery.models.identifiable.ids.specific.ImportId) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Dataset(com.bakdata.conquery.models.datasets.Dataset) ValidatorHelper(com.bakdata.conquery.models.exceptions.ValidatorHelper) OutputDescription(com.bakdata.conquery.models.preproc.outputs.OutputDescription) List(java.util.List) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) TableInputDescriptor(com.bakdata.conquery.models.preproc.TableInputDescriptor) JsonIntegrationTest(com.bakdata.conquery.integration.json.JsonIntegrationTest) Query(com.bakdata.conquery.apiv1.query.Query) Jackson(com.bakdata.conquery.io.jackson.Jackson) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Namespace(com.bakdata.conquery.models.worker.Namespace) Query(com.bakdata.conquery.apiv1.query.Query) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Dataset(com.bakdata.conquery.models.datasets.Dataset) ImportId(com.bakdata.conquery.models.identifiable.ids.specific.ImportId) Namespace(com.bakdata.conquery.models.worker.Namespace) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) OutputDescription(com.bakdata.conquery.models.preproc.outputs.OutputDescription) ShardNode(com.bakdata.conquery.commands.ShardNode) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Worker(com.bakdata.conquery.models.worker.Worker) TableInputDescriptor(com.bakdata.conquery.models.preproc.TableInputDescriptor) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) File(java.io.File) TableImportDescriptor(com.bakdata.conquery.models.preproc.TableImportDescriptor) RequiredTable(com.bakdata.conquery.integration.common.RequiredTable)

Example 2 with StandaloneSupport

use of com.bakdata.conquery.util.support.StandaloneSupport in project conquery by bakdata.

the class DatasetDeletionTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    final StandaloneSupport conquery = testConquery.getSupport(name);
    final MetaStorage storage = conquery.getMetaStorage();
    final Dataset dataset = conquery.getDataset();
    Namespace namespace = conquery.getNamespace();
    final String testJson = In.resource("/tests/query/DELETE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
    }
    final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    final int nImports = namespace.getStorage().getAllImports().size();
    log.info("Checking state before deletion");
    // Assert state before deletion.
    {
        // Must contain the import.
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(dataset.getId())).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).isNotEmpty();
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
            }
        }
        log.info("Executing query before deletion");
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Delete Dataset.
    {
        log.info("Issuing deletion of import {}", dataset);
        // Delete the import.
        // But, we do not allow deletion of tables with associated connectors, so this should throw!
        assertThatThrownBy(() -> conquery.getDatasetsProcessor().deleteDataset(dataset)).isInstanceOf(WebApplicationException.class);
        // TODO use api
        conquery.getNamespace().getStorage().getTables().forEach(tableId -> conquery.getDatasetsProcessor().deleteTable(tableId, true));
        conquery.waitUntilWorkDone();
        // Finally delete dataset
        conquery.getDatasetsProcessor().deleteDataset(dataset);
        conquery.waitUntilWorkDone();
        assertThat(storage.getCentralRegistry().getOptional(dataset.getId())).isEmpty();
    }
    // State after deletion.
    {
        log.info("Checking state after deletion");
        // We have deleted an import now there should be two less!
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(0);
        // The deleted import should not be found.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().getTable().getDataset().equals(dataset.getId())).isEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                // No bucket should be found referencing the import.
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).filteredOn(bucket -> bucket.getTable().getDataset().getId().equals(dataset.getId())).isEmpty();
                // No CBlock associated with import may exist
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).filteredOn(cBlock -> cBlock.getBucket().getTable().getDataset().getId().equals(dataset.getId())).isEmpty();
            }
        }
        // It's not exactly possible to issue a query for a non-existant dataset, so we assert that parsing the fails.
        assertThatThrownBy(() -> {
            IntegrationUtils.parseQuery(conquery, test.getRawQuery());
        }).isNotNull();
        IntegrationUtils.assertQueryResult(conquery, query, 0, ExecutionState.FAILED, conquery.getTestUser(), 404);
    }
    // Reload the dataset and assert the state.
    // We have to do some weird trix with StandaloneSupport to open it with another Dataset
    final StandaloneSupport conqueryReimport = testConquery.getSupport(namespace.getDataset().getName());
    {
        // only import the deleted import/table
        LoadingUtil.importTables(conqueryReimport, test.getContent().getTables());
        assertThat(conqueryReimport.getNamespace().getStorage().getTables()).isNotEmpty();
        conqueryReimport.waitUntilWorkDone();
        LoadingUtil.importTableContents(conqueryReimport, test.getContent().getTables());
        conqueryReimport.waitUntilWorkDone();
        LoadingUtil.importConcepts(conqueryReimport, test.getRawConcepts());
        conqueryReimport.waitUntilWorkDone();
        assertThat(conqueryReimport.getDatasetsProcessor().getDatasetRegistry().get(conqueryReimport.getDataset().getId())).describedAs("Dataset after re-import.").isNotNull();
        assertThat(conqueryReimport.getNamespace().getStorage().getAllImports().size()).isEqualTo(nImports);
        for (ShardNode node : conqueryReimport.getShardNodes()) {
            assertThat(node.getWorkers().getWorkers().values()).filteredOn(w -> w.getInfo().getDataset().equals(conqueryReimport.getDataset().getId())).describedAs("Workers for node {}", node.getName()).isNotEmpty();
        }
        log.info("Executing query after re-import");
        final Query query2 = IntegrationUtils.parseQuery(conqueryReimport, test.getRawQuery());
        // Issue a query and assert that it has the same content as the first time around.
        IntegrationUtils.assertQueryResult(conqueryReimport, query2, 2L, ExecutionState.DONE, conqueryReimport.getTestUser(), 201);
    }
    // Finally, restart conquery and assert again, that the data is correct.
    {
        testConquery.shutdown();
        // restart
        testConquery.beforeAll();
        final StandaloneSupport conqueryRestart = testConquery.openDataset(conqueryReimport.getDataset().getId());
        log.info("Checking state after re-start");
        assertThat(conqueryRestart.getNamespace().getStorage().getAllImports().size()).isEqualTo(2);
        for (ShardNode node : conqueryRestart.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getTable().getDataset().getId().equals(dataset.getId()))).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
            }
        }
        log.info("Executing query after restart");
        final Query query3 = IntegrationUtils.parseQuery(conqueryRestart, test.getRawQuery());
        // Issue a query and assert that it has the same content as the first time around.
        IntegrationUtils.assertQueryResult(conqueryRestart, query3, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
}
Also used : IntegrationUtils(com.bakdata.conquery.integration.common.IntegrationUtils) ProgrammaticIntegrationTest(com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest) ExecutionState(com.bakdata.conquery.models.execution.ExecutionState) TestConquery(com.bakdata.conquery.util.support.TestConquery) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) ShardNode(com.bakdata.conquery.commands.ShardNode) QueryTest(com.bakdata.conquery.integration.json.QueryTest) LoadingUtil(com.bakdata.conquery.integration.common.LoadingUtil) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Dataset(com.bakdata.conquery.models.datasets.Dataset) ValidatorHelper(com.bakdata.conquery.models.exceptions.ValidatorHelper) Worker(com.bakdata.conquery.models.worker.Worker) Slf4j(lombok.extern.slf4j.Slf4j) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) In(com.github.powerlibraries.io.In) WebApplicationException(javax.ws.rs.WebApplicationException) JsonIntegrationTest(com.bakdata.conquery.integration.json.JsonIntegrationTest) Query(com.bakdata.conquery.apiv1.query.Query) LoadingUtil.importSecondaryIds(com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Namespace(com.bakdata.conquery.models.worker.Namespace) Query(com.bakdata.conquery.apiv1.query.Query) WebApplicationException(javax.ws.rs.WebApplicationException) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Dataset(com.bakdata.conquery.models.datasets.Dataset) Namespace(com.bakdata.conquery.models.worker.Namespace) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) ShardNode(com.bakdata.conquery.commands.ShardNode) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Worker(com.bakdata.conquery.models.worker.Worker) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport)

Example 3 with StandaloneSupport

use of com.bakdata.conquery.util.support.StandaloneSupport in project conquery by bakdata.

the class FilterTest method importConcepts.

private void importConcepts(StandaloneSupport support) throws JSONException, IOException {
    Dataset dataset = support.getDataset();
    concept = new TreeConcept();
    concept.setLabel("concept");
    concept.setValidator(support.getValidator());
    concept.setDataset(support.getDataset());
    rawConnector.put("name", "connector");
    rawConnector.put("table", "table");
    ((ObjectNode) rawConnector.get("filters")).put("name", "filter");
    connector = parseSubTree(support, rawConnector, ConceptTreeConnector.class, conn -> conn.setConcept(concept));
    concept.setConnectors(Collections.singletonList((ConceptTreeConnector) connector));
    support.getDatasetsProcessor().addConcept(dataset, concept);
}
Also used : JsonProperty(com.fasterxml.jackson.annotation.JsonProperty) Setter(lombok.Setter) Getter(lombok.Getter) RequiredData(com.bakdata.conquery.integration.common.RequiredData) ConceptTreeConnector(com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) Connector(com.bakdata.conquery.models.datasets.concepts.Connector) JsonIgnore(com.fasterxml.jackson.annotation.JsonIgnore) JSONException(com.bakdata.conquery.models.exceptions.JSONException) CQTable(com.bakdata.conquery.apiv1.query.concept.filter.CQTable) TreeConcept(com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept) Range(com.bakdata.conquery.models.common.Range) ResourceFile(com.bakdata.conquery.integration.common.ResourceFile) CPSType(com.bakdata.conquery.io.cps.CPSType) IOException(java.io.IOException) NotNull(javax.validation.constraints.NotNull) LoadingUtil(com.bakdata.conquery.integration.common.LoadingUtil) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) AbstractQueryEngineTest(com.bakdata.conquery.integration.json.AbstractQueryEngineTest) Dataset(com.bakdata.conquery.models.datasets.Dataset) FilterValue(com.bakdata.conquery.apiv1.query.concept.filter.FilterValue) CQDateRestriction(com.bakdata.conquery.apiv1.query.concept.specific.CQDateRestriction) ConceptQuery(com.bakdata.conquery.apiv1.query.ConceptQuery) Slf4j(lombok.extern.slf4j.Slf4j) LocalDate(java.time.LocalDate) Query(com.bakdata.conquery.apiv1.query.Query) Jackson(com.bakdata.conquery.io.jackson.Jackson) ConqueryTestSpec(com.bakdata.conquery.integration.json.ConqueryTestSpec) CQConcept(com.bakdata.conquery.apiv1.query.concept.specific.CQConcept) Collections(java.util.Collections) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) Dataset(com.bakdata.conquery.models.datasets.Dataset) TreeConcept(com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept) ConceptTreeConnector(com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector)

Example 4 with StandaloneSupport

use of com.bakdata.conquery.util.support.StandaloneSupport in project conquery by bakdata.

the class SecondaryIdEndpointTest method execute.

@Override
public void execute(StandaloneSupport conquery) throws Exception {
    final SecondaryIdDescription description = new SecondaryIdDescription();
    description.setDescription("description-DESCRIPTION");
    description.setName("description-NAME");
    description.setLabel("description-LABEL");
    final SecondaryIdDescriptionId id = new SecondaryIdDescriptionId(conquery.getDataset().getId(), description.getName());
    final Response post = uploadDescription(conquery, description);
    log.info("{}", post);
    assertThat(post).describedAs("Response = `%s`", post).returns(Response.Status.Family.SUCCESSFUL, response -> response.getStatusInfo().getFamily());
    {
        final Set<FESecondaryId> secondaryIds = fetchSecondaryIdDescriptions(conquery);
        log.info("{}", secondaryIds);
        description.setDataset(conquery.getDataset());
        assertThat(secondaryIds).extracting(FESecondaryId::getId).containsExactly(description.getId().toString());
        // Upload Table referencing SecondaryId
        {
            // Build data manually so content is minmal (ie no dataset prefixes etc)
            ObjectNode tableNode = Jackson.MAPPER.createObjectNode();
            tableNode.put("name", "table");
            ObjectNode columnNode = Jackson.MAPPER.createObjectNode();
            columnNode.put("name", "column");
            columnNode.put("type", MajorTypeId.INTEGER.name());
            columnNode.put("secondaryId", description.getId().toStringWithoutDataset());
            tableNode.put("columns", columnNode);
            final Response response = uploadTable(conquery, tableNode);
            assertThat(response.getStatusInfo().getFamily()).isEqualTo(Response.Status.Family.SUCCESSFUL);
        }
    }
    {
        final URI uri = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), DatasetsUIResource.class, "getDataset").buildFromMap(Map.of("dataset", conquery.getDataset().getName()));
        final Response actual = conquery.getClient().target(uri).request().get();
        assertThat(actual).returns(Response.Status.Family.SUCCESSFUL, response -> response.getStatusInfo().getFamily());
    }
    {
        // First one fails because table depends on it
        assertThat(deleteDescription(conquery, id)).returns(Response.Status.Family.CLIENT_ERROR, response -> response.getStatusInfo().getFamily());
        deleteTable(conquery, new TableId(conquery.getDataset().getId(), "table"));
        // We've deleted the table, now it should be successful
        assertThat(deleteDescription(conquery, id)).returns(Response.Status.Family.SUCCESSFUL, response -> response.getStatusInfo().getFamily());
        final Set<FESecondaryId> secondaryIds = fetchSecondaryIdDescriptions(conquery);
        log.info("{}", secondaryIds);
        assertThat(secondaryIds).isEmpty();
    }
}
Also used : Response(javax.ws.rs.core.Response) SecondaryIdDescriptionId(com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AdminTablesResource(com.bakdata.conquery.resources.admin.rest.AdminTablesResource) AdminDatasetResource(com.bakdata.conquery.resources.admin.rest.AdminDatasetResource) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) MediaType(javax.ws.rs.core.MediaType) Map(java.util.Map) TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) URI(java.net.URI) TypeReference(com.fasterxml.jackson.core.type.TypeReference) MajorTypeId(com.bakdata.conquery.models.events.MajorTypeId) IntegrationTest(com.bakdata.conquery.integration.IntegrationTest) ResourceConstants(com.bakdata.conquery.resources.ResourceConstants) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Set(java.util.Set) DatasetsUIResource(com.bakdata.conquery.resources.admin.ui.DatasetsUIResource) FESecondaryId(com.bakdata.conquery.apiv1.frontend.FESecondaryId) Entity(javax.ws.rs.client.Entity) DatasetResource(com.bakdata.conquery.resources.api.DatasetResource) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) SecondaryIdDescription(com.bakdata.conquery.models.datasets.SecondaryIdDescription) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) Jackson(com.bakdata.conquery.io.jackson.Jackson) HierarchyHelper(com.bakdata.conquery.resources.hierarchies.HierarchyHelper) TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) SecondaryIdDescriptionId(com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId) Set(java.util.Set) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) FESecondaryId(com.bakdata.conquery.apiv1.frontend.FESecondaryId) URI(java.net.URI) SecondaryIdDescription(com.bakdata.conquery.models.datasets.SecondaryIdDescription)

Example 5 with StandaloneSupport

use of com.bakdata.conquery.util.support.StandaloneSupport in project conquery by bakdata.

the class ConceptUpdateAndDeletionTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    StandaloneSupport conquery = testConquery.getSupport(name);
    // Read two JSONs with different Trees
    final String testJson = In.resource("/tests/query/UPDATE_CONCEPT_TESTS/SIMPLE_TREECONCEPT_Query.json").withUTF8().readAll();
    final String testJson2 = In.resource("/tests/query/UPDATE_CONCEPT_TESTS/SIMPLE_TREECONCEPT_2_Query.json").withUTF8().readAll();
    final Dataset dataset = conquery.getDataset();
    final Namespace namespace = conquery.getNamespace();
    final ConceptId conceptId = ConceptId.Parser.INSTANCE.parse(dataset.getName(), "test_tree");
    final Concept<?> concept;
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    final QueryTest test2 = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson2);
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        assertThat(namespace.getStorage().getConcept(conceptId)).isNotNull();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
    }
    final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    // State before update.
    {
        log.info("Checking state before update");
        // Must contain the concept.
        assertThat(namespace.getStorage().getAllConcepts()).filteredOn(con -> con.getId().equals(conceptId)).isNotEmpty();
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(conceptId)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getCentralRegistry().getOptional(conceptId)).isNotEmpty();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).filteredOn(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)).isNotEmpty();
            }
        }
        log.info("Executing query before update");
        IntegrationUtils.assertQueryResult(conquery, query, 1L, ExecutionState.DONE, conquery.getTestUser(), 201);
        conquery.waitUntilWorkDone();
        log.info("Query before update executed");
    }
    // Load a different concept with the same id (it has different children "C1" that are more than "A1")
    // To perform the update, the old concept will be deleted first and the new concept will be added. That means the deletion of concept is also covered here
    {
        log.info("Executing  update");
        LoadingUtil.updateConcepts(conquery, test2.getRawConcepts(), Response.Status.Family.SUCCESSFUL);
        conquery.waitUntilWorkDone();
        log.info("Update executed");
    }
    // Check state after update.
    {
        log.info("Checking state after update");
        // Must contain the concept now.
        assertThat(namespace.getStorage().getAllConcepts()).filteredOn(con -> con.getId().equals(conceptId)).isNotEmpty();
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(conceptId)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getCentralRegistry().getOptional(conceptId)).isNotEmpty();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).filteredOn(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)).isNotEmpty();
            }
        }
        log.info("Executing query after update");
        // Assert that it now contains 2 instead of 1.
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
        conquery.waitUntilWorkDone();
        log.info("Query after update executed");
    }
    // new Conquery generated after restarting
    // StandaloneSupport conquery;
    // Restart conquery and assert again, that the data is correct.
    {
        testConquery.shutdown();
        // restart
        testConquery.beforeAll();
        conquery = testConquery.openDataset(dataset.getId());
        log.info("Checking state after re-start");
        {
            // Must contain the concept.
            assertThat(conquery.getNamespace().getStorage().getAllConcepts()).filteredOn(con -> con.getId().equals(conceptId)).isNotEmpty();
            assertThat(conquery.getNamespace().getStorage().getCentralRegistry().getOptional(conceptId)).isNotEmpty();
            for (ShardNode node : conquery.getShardNodes()) {
                for (Worker value : node.getWorkers().getWorkers().values()) {
                    if (!value.getInfo().getDataset().equals(dataset.getId())) {
                        continue;
                    }
                    final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                    assertThat(workerStorage.getCentralRegistry().getOptional(conceptId)).isNotEmpty();
                    assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).filteredOn(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)).isNotEmpty();
                }
            }
            log.info("Executing query after restart.");
            // Re-assert state.
            IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
            conquery.waitUntilWorkDone();
        }
    }
    // Delete the Concept.
    {
        log.info("Issuing deletion of import {}", conceptId);
        concept = Objects.requireNonNull(conquery.getNamespace().getStorage().getConcept(conceptId));
        conquery.getDatasetsProcessor().deleteConcept(concept);
        conquery.waitUntilWorkDone();
    }
    // Check state after deletion.
    {
        log.info("Checking state after deletion");
        // We've deleted the concept so it and it's associated cblock should be gone.
        assertThat(conquery.getNamespace().getStorage().getAllConcepts()).filteredOn(con -> con.getId().equals(conceptId)).isEmpty();
        assertThat(conquery.getNamespace().getStorage().getCentralRegistry().getOptional(conceptId)).isEmpty();
        assertThat(conquery.getShardNodes().stream().flatMap(node -> node.getWorkers().getWorkers().values().stream()).filter(worker -> worker.getInfo().getDataset().equals(dataset.getId())).map(Worker::getStorage)).noneMatch(workerStorage -> workerStorage.getConcept(conceptId) != null).noneMatch(workerStorage -> workerStorage.getAllCBlocks().stream().anyMatch(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)));
        log.info("Executing query after deletion (EXPECTING AN EXCEPTION IN THE LOGS!)");
        // Issue a query and assert that it is failing.
        IntegrationUtils.assertQueryResult(conquery, query, 0L, ExecutionState.FAILED, conquery.getTestUser(), 400);
    }
    // Restart conquery and assert again, that the state after deletion was maintained.
    {
        {
            testConquery.shutdown();
            // restart
            testConquery.beforeAll();
            conquery = testConquery.openDataset(dataset.getId());
        }
        // Check state after restart.
        {
            log.info("Checking state after restart");
            // We've deleted the concept so it and it's associated cblock should be gone.
            assertThat(conquery.getNamespace().getStorage().getAllConcepts()).filteredOn(con -> con.getId().equals(conceptId)).isEmpty();
            assertThat(conquery.getNamespace().getStorage().getCentralRegistry().getOptional(conceptId)).isEmpty();
            assertThat(conquery.getShardNodes().stream().flatMap(node -> node.getWorkers().getWorkers().values().stream()).filter(worker -> worker.getInfo().getDataset().equals(dataset.getId())).map(Worker::getStorage)).noneMatch(workerStorage -> workerStorage.getConcept(conceptId) != null).noneMatch(workerStorage -> workerStorage.getAllCBlocks().stream().anyMatch(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)));
            log.info("Executing query after restart (EXPECTING AN EXCEPTION IN THE LOGS!)");
            // Issue a query and assert that it is failing.
            IntegrationUtils.assertQueryResult(conquery, query, 0L, ExecutionState.FAILED, conquery.getTestUser(), 400);
        }
    }
}
Also used : IntegrationUtils(com.bakdata.conquery.integration.common.IntegrationUtils) ProgrammaticIntegrationTest(com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest) ExecutionState(com.bakdata.conquery.models.execution.ExecutionState) TestConquery(com.bakdata.conquery.util.support.TestConquery) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) ShardNode(com.bakdata.conquery.commands.ShardNode) Concept(com.bakdata.conquery.models.datasets.concepts.Concept) QueryTest(com.bakdata.conquery.integration.json.QueryTest) LoadingUtil(com.bakdata.conquery.integration.common.LoadingUtil) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Dataset(com.bakdata.conquery.models.datasets.Dataset) ValidatorHelper(com.bakdata.conquery.models.exceptions.ValidatorHelper) Objects(java.util.Objects) Worker(com.bakdata.conquery.models.worker.Worker) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) In(com.github.powerlibraries.io.In) ConceptId(com.bakdata.conquery.models.identifiable.ids.specific.ConceptId) JsonIntegrationTest(com.bakdata.conquery.integration.json.JsonIntegrationTest) Query(com.bakdata.conquery.apiv1.query.Query) LoadingUtil.importSecondaryIds(com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds) Namespace(com.bakdata.conquery.models.worker.Namespace) Query(com.bakdata.conquery.apiv1.query.Query) QueryTest(com.bakdata.conquery.integration.json.QueryTest) ShardNode(com.bakdata.conquery.commands.ShardNode) Dataset(com.bakdata.conquery.models.datasets.Dataset) Worker(com.bakdata.conquery.models.worker.Worker) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Namespace(com.bakdata.conquery.models.worker.Namespace) ConceptId(com.bakdata.conquery.models.identifiable.ids.specific.ConceptId) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage)

Aggregations

StandaloneSupport (com.bakdata.conquery.util.support.StandaloneSupport)9 Dataset (com.bakdata.conquery.models.datasets.Dataset)8 Slf4j (lombok.extern.slf4j.Slf4j)7 Query (com.bakdata.conquery.apiv1.query.Query)6 LoadingUtil (com.bakdata.conquery.integration.common.LoadingUtil)6 QueryTest (com.bakdata.conquery.integration.json.QueryTest)6 MetaStorage (com.bakdata.conquery.io.storage.MetaStorage)6 Assertions.assertThat (org.assertj.core.api.Assertions.assertThat)6 ShardNode (com.bakdata.conquery.commands.ShardNode)5 IntegrationUtils (com.bakdata.conquery.integration.common.IntegrationUtils)5 LoadingUtil.importSecondaryIds (com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds)5 JsonIntegrationTest (com.bakdata.conquery.integration.json.JsonIntegrationTest)5 ModificationShieldedWorkerStorage (com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage)5 ValidatorHelper (com.bakdata.conquery.models.exceptions.ValidatorHelper)5 ExecutionState (com.bakdata.conquery.models.execution.ExecutionState)5 Namespace (com.bakdata.conquery.models.worker.Namespace)5 Worker (com.bakdata.conquery.models.worker.Worker)5 TestConquery (com.bakdata.conquery.util.support.TestConquery)5 In (com.github.powerlibraries.io.In)5 ProgrammaticIntegrationTest (com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest)4