Search in sources :

Example 1 with ResourceFile

use of com.bakdata.conquery.integration.common.ResourceFile in project conquery by bakdata.

the class AbstractQueryEngineTest method executeTest.

@Override
public void executeTest(StandaloneSupport standaloneSupport) throws IOException {
    Query query = getQuery();
    assertThat(standaloneSupport.getValidator().validate(query)).describedAs("Query Validation Errors").isEmpty();
    log.info("{} QUERY INIT", getLabel());
    final User testUser = standaloneSupport.getTestUser();
    final ManagedExecutionId executionId = IntegrationUtils.assertQueryResult(standaloneSupport, query, -1, ExecutionState.DONE, testUser, 201);
    final ManagedQuery execution = (ManagedQuery) standaloneSupport.getMetaStorage().getExecution(executionId);
    // check result info size
    List<ResultInfo> resultInfos = execution.getResultInfos();
    assertThat(execution.streamResults().flatMap(EntityResult::streamValues)).as("Should have same size as result infos").allSatisfy(v -> assertThat(v).hasSameSizeAs(resultInfos));
    // Get the actual response and compare with expected result.
    final Response csvResponse = standaloneSupport.getClient().target(HierarchyHelper.hierarchicalPath(standaloneSupport.defaultApiURIBuilder(), ResultCsvResource.class, "getAsCsv").buildFromMap(Map.of(DATASET, standaloneSupport.getDataset().getName(), QUERY, execution.getId().toString()))).queryParam("pretty", false).request(AdditionalMediaTypes.CSV).acceptLanguage(Locale.ENGLISH).get();
    List<String> actual = In.stream(((InputStream) csvResponse.getEntity())).readLines();
    ResourceFile expectedCsv = getExpectedCsv();
    List<String> expected = In.stream(expectedCsv.stream()).readLines();
    assertThat(actual).as("Results for %s are not as expected.", this).containsExactlyInAnyOrderElementsOf(expected);
    // check that getLastResultCount returns the correct size
    if (execution.streamResults().noneMatch(MultilineEntityResult.class::isInstance)) {
        assertThat(execution.getLastResultCount()).as("Result count for %s is not as expected.", this).isEqualTo(expected.size() - 1);
    }
    log.info("INTEGRATION TEST SUCCESSFUL {} {} on {} rows", getClass().getSimpleName(), this, expected.size());
}
Also used : Response(javax.ws.rs.core.Response) ResourceFile(com.bakdata.conquery.integration.common.ResourceFile) User(com.bakdata.conquery.models.auth.entities.User) ManagedQuery(com.bakdata.conquery.models.query.ManagedQuery) Query(com.bakdata.conquery.apiv1.query.Query) ManagedExecutionId(com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId) ManagedQuery(com.bakdata.conquery.models.query.ManagedQuery) ResultInfo(com.bakdata.conquery.models.query.resultinfo.ResultInfo) MultilineEntityResult(com.bakdata.conquery.models.query.results.MultilineEntityResult)

Example 2 with ResourceFile

use of com.bakdata.conquery.integration.common.ResourceFile in project conquery by bakdata.

the class ImportDeletionTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    final StandaloneSupport conquery = testConquery.getSupport(name);
    MetaStorage storage = conquery.getMetaStorage();
    final String testJson = In.resource("/tests/query/DELETE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
    final Dataset dataset = conquery.getDataset();
    final Namespace namespace = conquery.getNamespace();
    final ImportId importId = ImportId.Parser.INSTANCE.parse(dataset.getName(), "test_table2", "test_table2");
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
    }
    final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    final int nImports = namespace.getStorage().getAllImports().size();
    // State before deletion.
    {
        log.info("Checking state before deletion");
        // Must contain the import.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().equals(importId)).isNotEmpty();
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(importId)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker worker : node.getWorkers().getWorkers().values()) {
                if (!worker.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", worker.getInfo().getId()).filteredOn(block -> block.getBucket().getId().getDataset().equals(dataset.getId())).isNotEmpty();
                assertThat(workerStorage.getAllBuckets()).filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())).describedAs("Buckets for Worker %s", worker.getInfo().getId()).isNotEmpty();
                // Must contain the import.
                assertThat(workerStorage.getImport(importId)).isNotNull();
            }
        }
        log.info("Executing query before deletion");
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Delete the import.
    {
        log.info("Issuing deletion of import {}", importId);
        final URI deleteImportUri = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), AdminTablesResource.class, "deleteImport").buildFromMap(Map.of(ResourceConstants.DATASET, conquery.getDataset().getId(), ResourceConstants.TABLE, importId.getTable(), ResourceConstants.IMPORT_ID, importId));
        final Response delete = conquery.getClient().target(deleteImportUri).request(MediaType.APPLICATION_JSON).delete();
        assertThat(delete.getStatusInfo().getFamily()).isEqualTo(Response.Status.Family.SUCCESSFUL);
        conquery.waitUntilWorkDone();
    }
    // State after deletion.
    {
        log.info("Checking state after deletion");
        // We have deleted an import now there should be one less!
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports - 1);
        // The deleted import should not be found.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().equals(importId)).isEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker worker : node.getWorkers().getWorkers().values()) {
                if (!worker.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                // No bucket should be found referencing the import.
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", worker.getInfo().getId()).filteredOn(bucket -> bucket.getImp().getId().equals(importId)).isEmpty();
                // No CBlock associated with import may exist
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", worker.getInfo().getId()).filteredOn(cBlock -> cBlock.getBucket().getId().getImp().equals(importId)).isEmpty();
                // Import should not exists anymore
                assertThat(workerStorage.getImport(importId)).describedAs("Import for Worker %s", worker.getInfo().getId()).isNull();
            }
        }
        log.info("Executing query after deletion");
        // Issue a query and assert that it has less content.
        IntegrationUtils.assertQueryResult(conquery, query, 1L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    conquery.waitUntilWorkDone();
    // Load more data under the same name into the same table, with only the deleted import/table
    {
        // only import the deleted import/table
        final RequiredTable import2Table = test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(importId.getTable().getTable())).findFirst().orElseThrow();
        final ResourceFile csv = import2Table.getCsv();
        final String path = csv.getPath();
        // copy csv to tmp folder
        // Content 2.2 contains an extra entry of a value that hasn't been seen before.
        FileUtils.copyInputStreamToFile(In.resource(path.substring(0, path.lastIndexOf('/')) + "/" + "content2.2.csv").asStream(), new File(conquery.getTmpDir(), csv.getName()));
        File descriptionFile = new File(conquery.getTmpDir(), import2Table.getName() + ConqueryConstants.EXTENSION_DESCRIPTION);
        File preprocessedFile = new File(conquery.getTmpDir(), import2Table.getName() + ConqueryConstants.EXTENSION_PREPROCESSED);
        // create import descriptor
        TableImportDescriptor desc = new TableImportDescriptor();
        desc.setName(import2Table.getName());
        desc.setTable(import2Table.getName());
        TableInputDescriptor input = new TableInputDescriptor();
        {
            input.setPrimary(import2Table.getPrimaryColumn().createOutput());
            input.setSourceFile(import2Table.getCsv().getName());
            input.setOutput(new OutputDescription[import2Table.getColumns().length]);
            for (int i = 0; i < import2Table.getColumns().length; i++) {
                input.getOutput()[i] = import2Table.getColumns()[i].createOutput();
            }
        }
        desc.setInputs(new TableInputDescriptor[] { input });
        Jackson.MAPPER.writeValue(descriptionFile, desc);
        // preprocess
        conquery.preprocessTmp(conquery.getTmpDir(), List.of(descriptionFile));
        // import preprocessedFiles
        conquery.getDatasetsProcessor().addImport(conquery.getNamespace(), new GZIPInputStream(new FileInputStream(preprocessedFile)));
        conquery.waitUntilWorkDone();
    }
    // State after reimport.
    {
        log.info("Checking state after re-import");
        assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports);
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker worker : node.getWorkers().getWorkers().values()) {
                if (!worker.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", worker.getInfo().getId()).filteredOn(bucket -> bucket.getImp().getId().equals(importId)).filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())).isNotEmpty();
            }
        }
        log.info("Executing query after re-import");
        // Issue a query and assert that it has the same content as the first time around.
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Finally, restart conquery and assert again, that the data is correct.
    {
        testConquery.shutdown();
        // restart
        testConquery.beforeAll();
        StandaloneSupport conquery2 = testConquery.openDataset(dataset.getId());
        log.info("Checking state after re-start");
        {
            assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(2);
            for (ShardNode node : conquery2.getShardNodes()) {
                for (Worker worker : node.getWorkers().getWorkers().values()) {
                    if (!worker.getInfo().getDataset().equals(dataset.getId()))
                        continue;
                    final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                    assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", worker.getInfo().getId()).filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())).filteredOn(bucket -> bucket.getImp().getId().equals(importId)).isNotEmpty();
                }
            }
            log.info("Executing query after re-import");
            // Issue a query and assert that it has the same content as the first time around.
            IntegrationUtils.assertQueryResult(conquery2, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
        }
    }
}
Also used : GZIPInputStream(java.util.zip.GZIPInputStream) ExecutionState(com.bakdata.conquery.models.execution.ExecutionState) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) AdminTablesResource(com.bakdata.conquery.resources.admin.rest.AdminTablesResource) RequiredTable(com.bakdata.conquery.integration.common.RequiredTable) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Worker(com.bakdata.conquery.models.worker.Worker) MediaType(javax.ws.rs.core.MediaType) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) In(com.github.powerlibraries.io.In) Map(java.util.Map) LoadingUtil.importSecondaryIds(com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds) URI(java.net.URI) ConqueryConstants(com.bakdata.conquery.ConqueryConstants) ResourceFile(com.bakdata.conquery.integration.common.ResourceFile) IntegrationUtils(com.bakdata.conquery.integration.common.IntegrationUtils) ProgrammaticIntegrationTest(com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest) TableImportDescriptor(com.bakdata.conquery.models.preproc.TableImportDescriptor) TestConquery(com.bakdata.conquery.util.support.TestConquery) ShardNode(com.bakdata.conquery.commands.ShardNode) ResourceConstants(com.bakdata.conquery.resources.ResourceConstants) FileUtils(org.apache.commons.io.FileUtils) FileInputStream(java.io.FileInputStream) LoadingUtil(com.bakdata.conquery.integration.common.LoadingUtil) File(java.io.File) ImportId(com.bakdata.conquery.models.identifiable.ids.specific.ImportId) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Dataset(com.bakdata.conquery.models.datasets.Dataset) ValidatorHelper(com.bakdata.conquery.models.exceptions.ValidatorHelper) OutputDescription(com.bakdata.conquery.models.preproc.outputs.OutputDescription) List(java.util.List) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) TableInputDescriptor(com.bakdata.conquery.models.preproc.TableInputDescriptor) JsonIntegrationTest(com.bakdata.conquery.integration.json.JsonIntegrationTest) Query(com.bakdata.conquery.apiv1.query.Query) Jackson(com.bakdata.conquery.io.jackson.Jackson) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) HierarchyHelper(com.bakdata.conquery.resources.hierarchies.HierarchyHelper) Namespace(com.bakdata.conquery.models.worker.Namespace) Query(com.bakdata.conquery.apiv1.query.Query) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Dataset(com.bakdata.conquery.models.datasets.Dataset) ImportId(com.bakdata.conquery.models.identifiable.ids.specific.ImportId) URI(java.net.URI) Namespace(com.bakdata.conquery.models.worker.Namespace) FileInputStream(java.io.FileInputStream) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) Response(javax.ws.rs.core.Response) GZIPInputStream(java.util.zip.GZIPInputStream) ResourceFile(com.bakdata.conquery.integration.common.ResourceFile) ShardNode(com.bakdata.conquery.commands.ShardNode) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Worker(com.bakdata.conquery.models.worker.Worker) TableInputDescriptor(com.bakdata.conquery.models.preproc.TableInputDescriptor) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) ResourceFile(com.bakdata.conquery.integration.common.ResourceFile) File(java.io.File) TableImportDescriptor(com.bakdata.conquery.models.preproc.TableImportDescriptor) RequiredTable(com.bakdata.conquery.integration.common.RequiredTable)

Aggregations

Query (com.bakdata.conquery.apiv1.query.Query)2 ResourceFile (com.bakdata.conquery.integration.common.ResourceFile)2 Response (javax.ws.rs.core.Response)2 ConqueryConstants (com.bakdata.conquery.ConqueryConstants)1 ShardNode (com.bakdata.conquery.commands.ShardNode)1 IntegrationUtils (com.bakdata.conquery.integration.common.IntegrationUtils)1 LoadingUtil (com.bakdata.conquery.integration.common.LoadingUtil)1 LoadingUtil.importSecondaryIds (com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds)1 RequiredTable (com.bakdata.conquery.integration.common.RequiredTable)1 JsonIntegrationTest (com.bakdata.conquery.integration.json.JsonIntegrationTest)1 QueryTest (com.bakdata.conquery.integration.json.QueryTest)1 ProgrammaticIntegrationTest (com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest)1 Jackson (com.bakdata.conquery.io.jackson.Jackson)1 MetaStorage (com.bakdata.conquery.io.storage.MetaStorage)1 ModificationShieldedWorkerStorage (com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage)1 User (com.bakdata.conquery.models.auth.entities.User)1 Dataset (com.bakdata.conquery.models.datasets.Dataset)1 ValidatorHelper (com.bakdata.conquery.models.exceptions.ValidatorHelper)1 ExecutionState (com.bakdata.conquery.models.execution.ExecutionState)1 ImportId (com.bakdata.conquery.models.identifiable.ids.specific.ImportId)1