Search in sources :

Example 36 with Dataset

use of com.bakdata.conquery.models.datasets.Dataset in project conquery by bakdata.

the class TableDeletionTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    final StandaloneSupport conquery = testConquery.getSupport(name);
    final MetaStorage storage = conquery.getMetaStorage();
    final String testJson = In.resource("/tests/query/DELETE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
    final Dataset dataset = conquery.getDataset();
    final Namespace namespace = conquery.getNamespace();
    final TableId tableId = TableId.Parser.INSTANCE.parse(dataset.getName(), "test_table2");
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
    }
    final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    final int nImports = namespace.getStorage().getAllImports().size();
    // State before deletion.
    {
        log.info("Checking state before deletion");
        // Must contain the import.
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(tableId)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).isNotEmpty();
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
            }
        }
        log.info("Executing query before deletion");
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Delete the import.
    {
        log.info("Issuing deletion of import {}", tableId);
        // Delete the import via API.
        // But, we do not allow deletion of tables with associated connectors, so this should throw!
        final URI deleteTable = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), AdminTablesResource.class, "remove").buildFromMap(Map.of(ResourceConstants.DATASET, conquery.getDataset().getName(), ResourceConstants.TABLE, tableId.toString()));
        final Response failed = conquery.getClient().target(deleteTable).request().delete();
        assertThat(failed.getStatusInfo().getFamily()).isEqualTo(Response.Status.Family.CLIENT_ERROR);
        conquery.getDatasetsProcessor().deleteConcept(conquery.getNamespace().getStorage().getAllConcepts().iterator().next());
        Thread.sleep(100);
        conquery.waitUntilWorkDone();
        final Response success = conquery.getClient().target(deleteTable).request().delete();
        assertThat(success.getStatusInfo().getStatusCode()).isEqualTo(Response.Status.OK.getStatusCode());
        Thread.sleep(100);
        conquery.waitUntilWorkDone();
    }
    // State after deletion.
    {
        log.info("Checking state after deletion");
        // We have deleted an import now there should be two less!
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports - 1);
        // The deleted import should not be found.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().getTable().equals(tableId)).isEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                // No bucket should be found referencing the import.
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).filteredOn(bucket -> bucket.getImp().getTable().getId().equals(tableId)).isEmpty();
                // No CBlock associated with import may exist
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).filteredOn(cBlock -> cBlock.getBucket().getImp().getTable().getId().equals(tableId)).isEmpty();
            }
        }
        log.info("Executing query after deletion");
        // Issue a query and asseert that it has less content.
        IntegrationUtils.assertQueryResult(conquery, query, 0L, ExecutionState.FAILED, conquery.getTestUser(), 400);
    }
    conquery.waitUntilWorkDone();
    // Load the same import into the same table, with only the deleted import/table
    {
        // only import the deleted import/table
        LoadingUtil.importTables(conquery, test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(tableId.getTable())).collect(Collectors.toList()));
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(tableId.getTable())).collect(Collectors.toList()));
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        assertThat(namespace.getStorage().getTable(tableId)).describedAs("Table after re-import.").isNotNull();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                assertThat(value.getStorage().getCentralRegistry().resolve(tableId)).describedAs("Table in worker storage.").isNotNull();
            }
        }
    }
    // Test state after reimport.
    {
        log.info("Checking state after re-import");
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports);
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
            }
        }
        log.info("Executing query after re-import");
        // Issue a query and assert that it has the same content as the first time around.
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Finally, restart conquery and assert again, that the data is correct.
    {
        testConquery.shutdown();
        // restart
        testConquery.beforeAll();
        StandaloneSupport conquery2 = testConquery.openDataset(dataset.getId());
        log.info("Checking state after re-start");
        {
            assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(2);
            for (ShardNode node : conquery2.getShardNodes()) {
                for (Worker value : node.getWorkers().getWorkers().values()) {
                    if (!value.getInfo().getDataset().equals(dataset.getId())) {
                        continue;
                    }
                    final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                    assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
                }
            }
            log.info("Executing query after re-import");
            // Issue a query and assert that it has the same content as the first time around.
            IntegrationUtils.assertQueryResult(conquery2, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
        }
    }
}
Also used : TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) ExecutionState(com.bakdata.conquery.models.execution.ExecutionState) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AdminTablesResource(com.bakdata.conquery.resources.admin.rest.AdminTablesResource) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Worker(com.bakdata.conquery.models.worker.Worker) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) In(com.github.powerlibraries.io.In) Map(java.util.Map) TableId(com.bakdata.conquery.models.identifiable.ids.specific.TableId) LoadingUtil.importSecondaryIds(com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds) URI(java.net.URI) IntegrationUtils(com.bakdata.conquery.integration.common.IntegrationUtils) ProgrammaticIntegrationTest(com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest) TestConquery(com.bakdata.conquery.util.support.TestConquery) ShardNode(com.bakdata.conquery.commands.ShardNode) ResourceConstants(com.bakdata.conquery.resources.ResourceConstants) LoadingUtil(com.bakdata.conquery.integration.common.LoadingUtil) Collectors(java.util.stream.Collectors) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Dataset(com.bakdata.conquery.models.datasets.Dataset) ValidatorHelper(com.bakdata.conquery.models.exceptions.ValidatorHelper) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) JsonIntegrationTest(com.bakdata.conquery.integration.json.JsonIntegrationTest) Query(com.bakdata.conquery.apiv1.query.Query) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) HierarchyHelper(com.bakdata.conquery.resources.hierarchies.HierarchyHelper) Namespace(com.bakdata.conquery.models.worker.Namespace) Query(com.bakdata.conquery.apiv1.query.Query) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Dataset(com.bakdata.conquery.models.datasets.Dataset) URI(java.net.URI) Namespace(com.bakdata.conquery.models.worker.Namespace) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) Response(javax.ws.rs.core.Response) ShardNode(com.bakdata.conquery.commands.ShardNode) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Worker(com.bakdata.conquery.models.worker.Worker) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport)

Example 37 with Dataset

use of com.bakdata.conquery.models.datasets.Dataset in project conquery by bakdata.

the class FEValueTest method sortedValidityDates.

@Test
public void sortedValidityDates() {
    Dataset dataset = new Dataset();
    dataset.setName("testDataset");
    Table table = new Table();
    table.setDataset(dataset);
    table.setName("testTable");
    Column column = new Column();
    column.setName("testColumn");
    column.setTable(table);
    ConceptTreeConnector connector = new ConceptTreeConnector();
    connector.setName("testConnector");
    TreeConcept concept = new TreeConcept();
    concept.setDataset(dataset);
    concept.setName("testConcept");
    ValidityDate val0 = new ValidityDate();
    val0.setName("val0");
    val0.setConnector(connector);
    ValidityDate val1 = new ValidityDate();
    val1.setName("val1");
    val1.setConnector(connector);
    ValidityDate val2 = new ValidityDate();
    val2.setName("val2");
    val2.setConnector(connector);
    List<ValidityDate> validityDates = List.of(val0, val1, val2);
    connector.setColumn(column);
    connector.setConcept(concept);
    connector.setValidityDates(validityDates);
    FETable feTable = FrontEndConceptBuilder.createTable(connector);
    assertThat(feTable.getDateColumn().getOptions()).containsExactly(new FEValue("val0", val0.getId().toString()), new FEValue("val1", val1.getId().toString()), new FEValue("val2", val2.getId().toString()));
}
Also used : FETable(com.bakdata.conquery.apiv1.frontend.FETable) ValidityDate(com.bakdata.conquery.models.datasets.concepts.ValidityDate) FETable(com.bakdata.conquery.apiv1.frontend.FETable) Table(com.bakdata.conquery.models.datasets.Table) Column(com.bakdata.conquery.models.datasets.Column) Dataset(com.bakdata.conquery.models.datasets.Dataset) TreeConcept(com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept) ConceptTreeConnector(com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector) FEValue(com.bakdata.conquery.apiv1.frontend.FEValue) Test(org.junit.jupiter.api.Test)

Example 38 with Dataset

use of com.bakdata.conquery.models.datasets.Dataset in project conquery by bakdata.

the class IdMapTest method generalTest.

@Test
public void generalTest() throws JsonParseException, JsonMappingException, JsonProcessingException, IOException, JSONException {
    Dataset d1 = new Dataset();
    d1.setName("d1");
    Dataset d2 = new Dataset();
    d2.setName("d2");
    IdMap<DatasetId, Dataset> idMap = new IdMap<DatasetId, Dataset>();
    idMap.add(d1);
    idMap.add(d2);
    ContainingDataset containedDataset = new ContainingDataset(idMap);
    JsonNode json = Jackson.MAPPER.valueToTree(containedDataset);
    /*assertThat(json.isArray()).isTrue();
		assertThat(json.get(0)).isEqualTo(Jackson.MAPPER.valueToTree(d1));*/
    ContainingDataset constructed = Jackson.MAPPER.treeToValue(json, ContainingDataset.class);
    assertThat(constructed.idMap.entrySet()).isEqualTo(containedDataset.idMap.entrySet());
}
Also used : Dataset(com.bakdata.conquery.models.datasets.Dataset) JsonNode(com.fasterxml.jackson.databind.JsonNode) DatasetId(com.bakdata.conquery.models.identifiable.ids.specific.DatasetId) Test(org.junit.jupiter.api.Test)

Example 39 with Dataset

use of com.bakdata.conquery.models.datasets.Dataset in project conquery by bakdata.

the class PermissionCleanupTaskTest method createManagedQuery.

private ManagedQuery createManagedQuery() {
    final CQAnd root = new CQAnd();
    root.setChildren(new ArrayList<>());
    ConceptQuery query = new ConceptQuery(root);
    final ManagedQuery managedQuery = new ManagedQuery(query, null, new Dataset("test"));
    managedQuery.setCreationTime(LocalDateTime.now().minusDays(1));
    STORAGE.addExecution(managedQuery);
    return managedQuery;
}
Also used : Dataset(com.bakdata.conquery.models.datasets.Dataset) ManagedQuery(com.bakdata.conquery.models.query.ManagedQuery) CQAnd(com.bakdata.conquery.apiv1.query.concept.specific.CQAnd) ConceptQuery(com.bakdata.conquery.apiv1.query.ConceptQuery)

Example 40 with Dataset

use of com.bakdata.conquery.models.datasets.Dataset in project conquery by bakdata.

the class GroovyIndexedTest method init.

@BeforeAll
public static void init() throws IOException, JSONException, ConfigurationException {
    ObjectNode node = Jackson.MAPPER.readerFor(ObjectNode.class).readValue(In.resource(GroovyIndexedTest.class, CONCEPT_SOURCE).asStream());
    // load concept tree from json
    CentralRegistry registry = new CentralRegistry();
    Table table = new Table();
    table.setName("the_table");
    Dataset dataset = new Dataset();
    dataset.setName("the_dataset");
    registry.register(dataset);
    table.setDataset(dataset);
    Column column = new Column();
    column.setName("the_column");
    column.setType(MajorTypeId.STRING);
    table.setColumns(new Column[] { column });
    column.setTable(table);
    registry.register(table);
    registry.register(column);
    // Prepare Serdes injections
    final Validator validator = Validators.newValidator();
    final ObjectReader conceptReader = new Injectable() {

        @Override
        public MutableInjectableValues inject(MutableInjectableValues values) {
            return values.add(Validator.class, validator);
        }
    }.injectInto(registry.injectIntoNew(dataset.injectIntoNew(Jackson.MAPPER))).readerFor(Concept.class);
    // load tree twice to to avoid references
    indexedConcept = conceptReader.readValue(node);
    indexedConcept.setDataset(dataset);
    indexedConcept.initElements();
    TreeChildPrefixIndex.putIndexInto(indexedConcept);
    oldConcept = conceptReader.readValue(node);
    oldConcept.setDataset(dataset);
    oldConcept.initElements();
}
Also used : Injectable(com.bakdata.conquery.io.jackson.Injectable) Table(com.bakdata.conquery.models.datasets.Table) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) Column(com.bakdata.conquery.models.datasets.Column) Dataset(com.bakdata.conquery.models.datasets.Dataset) MutableInjectableValues(com.bakdata.conquery.io.jackson.MutableInjectableValues) ObjectReader(com.fasterxml.jackson.databind.ObjectReader) CentralRegistry(com.bakdata.conquery.models.identifiable.CentralRegistry) Validator(javax.validation.Validator) BeforeAll(org.junit.jupiter.api.BeforeAll)

Aggregations

Dataset (com.bakdata.conquery.models.datasets.Dataset)46 MetaStorage (com.bakdata.conquery.io.storage.MetaStorage)14 CentralRegistry (com.bakdata.conquery.models.identifiable.CentralRegistry)12 Test (org.junit.jupiter.api.Test)12 Slf4j (lombok.extern.slf4j.Slf4j)10 Table (com.bakdata.conquery.models.datasets.Table)9 Namespace (com.bakdata.conquery.models.worker.Namespace)9 Query (com.bakdata.conquery.apiv1.query.Query)8 User (com.bakdata.conquery.models.auth.entities.User)8 IdMapSerialisationTest (com.bakdata.conquery.models.identifiable.IdMapSerialisationTest)8 StandaloneSupport (com.bakdata.conquery.util.support.StandaloneSupport)8 Response (javax.ws.rs.core.Response)8 QueryTest (com.bakdata.conquery.integration.json.QueryTest)7 TreeConcept (com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept)7 ManagedQuery (com.bakdata.conquery.models.query.ManagedQuery)7 List (java.util.List)7 LoadingUtil (com.bakdata.conquery.integration.common.LoadingUtil)6 Role (com.bakdata.conquery.models.auth.entities.Role)6 ExecutionState (com.bakdata.conquery.models.execution.ExecutionState)6 DatasetId (com.bakdata.conquery.models.identifiable.ids.specific.DatasetId)6