Search in sources :

Example 1 with Query

use of com.bakdata.conquery.apiv1.query.Query in project conquery by bakdata.

the class ManagedExecution method buildStatusFull.

/**
 * Renders an extensive status of this query (see {@link FullExecutionStatus}. The rendering can be computation intensive and can produce a large
 * object. The use  of the full status is only intended if a client requested specific information about this execution.
 */
public FullExecutionStatus buildStatusFull(@NonNull MetaStorage storage, Subject subject, DatasetRegistry datasetRegistry, ConqueryConfig config) {
    initExecutable(datasetRegistry, config);
    FullExecutionStatus status = new FullExecutionStatus();
    setStatusBase(subject, status);
    setAdditionalFieldsForStatusWithColumnDescription(storage, subject, status, datasetRegistry);
    setAdditionalFieldsForStatusWithSource(subject, status);
    setAdditionalFieldsForStatusWithGroups(storage, status);
    setAvailableSecondaryIds(status);
    status.setProgress(progress);
    if (getState().equals(ExecutionState.FAILED) && error != null) {
        // Use plain format here to have a uniform serialization.
        status.setError(error.asPlain());
    }
    return status;
}
Also used : FullExecutionStatus(com.bakdata.conquery.apiv1.FullExecutionStatus)

Example 2 with Query

use of com.bakdata.conquery.apiv1.query.Query in project conquery by bakdata.

the class ManagedExecution method buildStatusOverview.

/**
 * Renders a lightweight status with meta information about this query. Computation an size should be small for this.
 */
public OverviewExecutionStatus buildStatusOverview(UriBuilder url, Subject subject) {
    OverviewExecutionStatus status = new OverviewExecutionStatus();
    setStatusBase(subject, status);
    return status;
}
Also used : OverviewExecutionStatus(com.bakdata.conquery.apiv1.OverviewExecutionStatus)

Example 3 with Query

use of com.bakdata.conquery.apiv1.query.Query in project conquery by bakdata.

the class FilterResolutionExactTest method execute.

@Override
public void execute(StandaloneSupport conquery) throws Exception {
    // read test sepcification
    String testJson = In.resource("/tests/query/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY.test.json").withUTF8().readAll();
    DatasetId dataset = conquery.getDataset().getId();
    ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);
    ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
    CSVConfig csvConf = conquery.getConfig().getCsv();
    test.importRequiredData(conquery);
    FilterSearch.updateSearch(conquery.getNamespace().getNamespaces(), Collections.singleton(conquery.getNamespace().getDataset()), conquery.getDatasetsProcessor().getJobManager(), csvConf);
    conquery.waitUntilWorkDone();
    Concept<?> concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
    Connector connector = concept.getConnectors().iterator().next();
    AbstractSelectFilter<?> filter = (AbstractSelectFilter<?>) connector.getFilters().iterator().next();
    // Copy search csv from resources to tmp folder.
    final Path tmpCSv = Files.createTempFile("conquery_search", "csv");
    Files.write(tmpCSv, String.join(csvConf.getLineSeparator(), lines).getBytes(), StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE, StandardOpenOption.WRITE);
    filter.setSearchType(FilterSearch.FilterSearchType.EXACT);
    filter.setTemplate(new FilterTemplate(tmpCSv.toString(), Arrays.asList("HEADER"), "HEADER", "", ""));
    FilterSearch.createSourceSearch(filter, csvConf);
    assertThat(filter.getSourceSearch()).isNotNull();
    ConceptsProcessor processor = new ConceptsProcessor(conquery.getNamespace().getNamespaces());
    // from csv
    {
        ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("a", "aaa", "unknown"));
        // check the resolved values
        assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("a", "aaa");
        assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
    }
    // from column values
    {
        ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("f", "unknown"));
        // check the resolved values
        assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("f");
        assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
    }
}
Also used : Path(java.nio.file.Path) Connector(com.bakdata.conquery.models.datasets.concepts.Connector) AbstractSelectFilter(com.bakdata.conquery.models.datasets.concepts.filters.specific.AbstractSelectFilter) ConqueryTestSpec(com.bakdata.conquery.integration.json.ConqueryTestSpec) ConceptsProcessor(com.bakdata.conquery.resources.api.ConceptsProcessor) CSVConfig(com.bakdata.conquery.models.config.CSVConfig) DatasetId(com.bakdata.conquery.models.identifiable.ids.specific.DatasetId) FilterTemplate(com.bakdata.conquery.apiv1.FilterTemplate) ResolvedConceptsResult(com.bakdata.conquery.resources.api.ConceptsProcessor.ResolvedConceptsResult)

Example 4 with Query

use of com.bakdata.conquery.apiv1.query.Query in project conquery by bakdata.

the class FilterResolutionPrefixTest method execute.

@Override
public void execute(StandaloneSupport conquery) throws Exception {
    // read test specification
    String testJson = In.resource("/tests/query/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY.test.json").withUTF8().readAll();
    DatasetId dataset = conquery.getDataset().getId();
    ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);
    ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
    test.importRequiredData(conquery);
    CSVConfig csvConf = conquery.getConfig().getCsv();
    FilterSearch.updateSearch(conquery.getNamespace().getNamespaces(), Collections.singleton(conquery.getNamespace().getDataset()), conquery.getDatasetsProcessor().getJobManager(), csvConf);
    conquery.waitUntilWorkDone();
    Concept<?> concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
    Connector connector = concept.getConnectors().iterator().next();
    AbstractSelectFilter<?> filter = (AbstractSelectFilter<?>) connector.getFilters().iterator().next();
    // Copy search csv from resources to tmp folder.
    final Path tmpCSv = Files.createTempFile("conquery_search", "csv");
    Files.write(tmpCSv, String.join(csvConf.getLineSeparator(), lines).getBytes(), StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE, StandardOpenOption.WRITE);
    filter.setSearchType(FilterSearch.FilterSearchType.PREFIX);
    filter.setTemplate(new FilterTemplate(tmpCSv.toString(), Arrays.asList("HEADER"), "HEADER", "", ""));
    FilterSearch.createSourceSearch(filter, csvConf);
    assertThat(filter.getSourceSearch()).isNotNull();
    ConceptsProcessor processor = new ConceptsProcessor(conquery.getNamespace().getNamespaces());
    // from csv
    {
        ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("a", "unknown"));
        // check the resolved values
        assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("a", "aaa", "aab");
        assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
    }
    // from column values
    {
        ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("f", "unknown"));
        // check the resolved values
        assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("f");
        assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
    }
}
Also used : Path(java.nio.file.Path) Connector(com.bakdata.conquery.models.datasets.concepts.Connector) AbstractSelectFilter(com.bakdata.conquery.models.datasets.concepts.filters.specific.AbstractSelectFilter) ConqueryTestSpec(com.bakdata.conquery.integration.json.ConqueryTestSpec) ConceptsProcessor(com.bakdata.conquery.resources.api.ConceptsProcessor) CSVConfig(com.bakdata.conquery.models.config.CSVConfig) DatasetId(com.bakdata.conquery.models.identifiable.ids.specific.DatasetId) FilterTemplate(com.bakdata.conquery.apiv1.FilterTemplate) ResolvedConceptsResult(com.bakdata.conquery.resources.api.ConceptsProcessor.ResolvedConceptsResult)

Example 5 with Query

use of com.bakdata.conquery.apiv1.query.Query in project conquery by bakdata.

the class ImportUpdateTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    final StandaloneSupport conquery = testConquery.getSupport(name);
    MetaStorage storage = conquery.getMetaStorage();
    String testJson = In.resource("/tests/query/UPDATE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.json").withUTF8().readAll();
    final Dataset dataset = conquery.getDataset();
    final Namespace namespace = conquery.getNamespace();
    final ImportId importId1 = ImportId.Parser.INSTANCE.parse(dataset.getName(), "table1", "table1");
    final ImportId importId2 = ImportId.Parser.INSTANCE.parse(dataset.getName(), "table2", "table2");
    QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    final List<RequiredTable> tables = test.getContent().getTables();
    assertThat(tables.size()).isEqualTo(2);
    List<File> cqpps;
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, tables);
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        cqpps = LoadingUtil.generateCqpp(conquery, tables);
        conquery.waitUntilWorkDone();
        assertThat(cqpps.size()).isEqualTo(tables.size());
        LoadingUtil.importCqppFiles(conquery, List.of(cqpps.get(0)));
        conquery.waitUntilWorkDone();
    }
    final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    // State before update.
    {
        log.info("Checking state before update");
        assertThat(namespace.getStorage().getAllImports()).hasSize(1);
        // Must contain the import.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().equals(importId1)).isNotEmpty();
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(importId1)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker worker : node.getWorkers().getWorkers().values()) {
                if (!worker.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", worker.getInfo().getId()).filteredOn(block -> block.getBucket().getId().getDataset().equals(dataset.getId())).isNotEmpty();
                assertThat(workerStorage.getAllBuckets()).filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())).describedAs("Buckets for Worker %s", worker.getInfo().getId()).isNotEmpty();
                // Must contain the import.
                assertThat(workerStorage.getImport(importId1)).isNotNull();
            }
        }
        assertThat(namespace.getNumberOfEntities()).isEqualTo(4);
        // assert that the query can be executed after the import
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
    // Try to update an import that does not exist should throw a Not-Found Webapplication Exception
    LoadingUtil.updateCqppFile(conquery, cqpps.get(1), Response.Status.Family.CLIENT_ERROR, "Not Found");
    conquery.waitUntilWorkDone();
    // Load manually new data for import and update the concerned import
    {
        log.info("Manually loading new data for import");
        final RequiredTable importTable = test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(importId1.getTable().getTable())).findFirst().orElseThrow();
        final String csvName = importTable.getCsv().getName();
        final String path = importTable.getCsv().getPath();
        // copy new content of the importTable into the csv-File used by the preprocessor to avoid creating multiple files withe same names
        FileUtils.copyInputStreamToFile(In.resource(path.substring(0, path.lastIndexOf('/')) + "/" + csvName.replace(".csv", ".update.csv")).asStream(), new File(conquery.getTmpDir(), csvName));
        File descriptionFile = new File(conquery.getTmpDir(), importTable.getName() + ConqueryConstants.EXTENSION_DESCRIPTION);
        File newPreprocessedFile = new File(conquery.getTmpDir(), importTable.getName() + ConqueryConstants.EXTENSION_PREPROCESSED);
        // create import descriptor
        {
            TableImportDescriptor desc = new TableImportDescriptor();
            desc.setName(importTable.getName());
            desc.setTable(importTable.getName());
            TableInputDescriptor input = new TableInputDescriptor();
            {
                input.setPrimary(importTable.getPrimaryColumn().createOutput());
                input.setSourceFile(csvName);
                input.setOutput(new OutputDescription[importTable.getColumns().length]);
                for (int i = 0; i < importTable.getColumns().length; i++) {
                    input.getOutput()[i] = importTable.getColumns()[i].createOutput();
                }
            }
            desc.setInputs(new TableInputDescriptor[] { input });
            Jackson.MAPPER.writeValue(descriptionFile, desc);
        }
        // preprocess
        conquery.preprocessTmp(conquery.getTmpDir(), List.of(descriptionFile));
        log.info("updating import");
        // correct update of the import
        LoadingUtil.updateCqppFile(conquery, newPreprocessedFile, Response.Status.Family.SUCCESSFUL, "No Content");
        conquery.waitUntilWorkDone();
    }
    // State after update.
    {
        log.info("Checking state after update");
        assertThat(namespace.getStorage().getAllImports()).hasSize(1);
        // Must contain the import.
        assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().equals(importId1)).isNotEmpty();
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(importId1)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker worker : node.getWorkers().getWorkers().values()) {
                if (!worker.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", worker.getInfo().getId()).filteredOn(block -> block.getBucket().getId().getDataset().equals(dataset.getId())).isNotEmpty();
                assertThat(workerStorage.getAllBuckets()).filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())).describedAs("Buckets for Worker %s", worker.getInfo().getId()).isNotEmpty();
                // Must contain the import.
                assertThat(workerStorage.getImport(importId1)).isNotNull();
            }
        }
        assertThat(namespace.getNumberOfEntities()).isEqualTo(9);
        // Issue a query and assert that it has more content.
        IntegrationUtils.assertQueryResult(conquery, query, 4L, ExecutionState.DONE, conquery.getTestUser(), 201);
    }
}
Also used : ExecutionState(com.bakdata.conquery.models.execution.ExecutionState) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) RequiredTable(com.bakdata.conquery.integration.common.RequiredTable) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Worker(com.bakdata.conquery.models.worker.Worker) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) In(com.github.powerlibraries.io.In) LoadingUtil.importSecondaryIds(com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds) ConqueryConstants(com.bakdata.conquery.ConqueryConstants) IntegrationUtils(com.bakdata.conquery.integration.common.IntegrationUtils) TableImportDescriptor(com.bakdata.conquery.models.preproc.TableImportDescriptor) TestConquery(com.bakdata.conquery.util.support.TestConquery) ShardNode(com.bakdata.conquery.commands.ShardNode) FileUtils(org.apache.commons.io.FileUtils) LoadingUtil(com.bakdata.conquery.integration.common.LoadingUtil) File(java.io.File) ImportId(com.bakdata.conquery.models.identifiable.ids.specific.ImportId) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Dataset(com.bakdata.conquery.models.datasets.Dataset) ValidatorHelper(com.bakdata.conquery.models.exceptions.ValidatorHelper) OutputDescription(com.bakdata.conquery.models.preproc.outputs.OutputDescription) List(java.util.List) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) TableInputDescriptor(com.bakdata.conquery.models.preproc.TableInputDescriptor) JsonIntegrationTest(com.bakdata.conquery.integration.json.JsonIntegrationTest) Query(com.bakdata.conquery.apiv1.query.Query) Jackson(com.bakdata.conquery.io.jackson.Jackson) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Namespace(com.bakdata.conquery.models.worker.Namespace) Query(com.bakdata.conquery.apiv1.query.Query) QueryTest(com.bakdata.conquery.integration.json.QueryTest) Dataset(com.bakdata.conquery.models.datasets.Dataset) ImportId(com.bakdata.conquery.models.identifiable.ids.specific.ImportId) Namespace(com.bakdata.conquery.models.worker.Namespace) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) OutputDescription(com.bakdata.conquery.models.preproc.outputs.OutputDescription) ShardNode(com.bakdata.conquery.commands.ShardNode) MetaStorage(com.bakdata.conquery.io.storage.MetaStorage) Worker(com.bakdata.conquery.models.worker.Worker) TableInputDescriptor(com.bakdata.conquery.models.preproc.TableInputDescriptor) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) File(java.io.File) TableImportDescriptor(com.bakdata.conquery.models.preproc.TableImportDescriptor) RequiredTable(com.bakdata.conquery.integration.common.RequiredTable)

Aggregations

ConceptQuery (com.bakdata.conquery.apiv1.query.ConceptQuery)12 Dataset (com.bakdata.conquery.models.datasets.Dataset)12 Query (com.bakdata.conquery.apiv1.query.Query)11 ManagedQuery (com.bakdata.conquery.models.query.ManagedQuery)11 MetaStorage (com.bakdata.conquery.io.storage.MetaStorage)10 QueryTest (com.bakdata.conquery.integration.json.QueryTest)8 ExecutionState (com.bakdata.conquery.models.execution.ExecutionState)8 Slf4j (lombok.extern.slf4j.Slf4j)8 Namespace (com.bakdata.conquery.models.worker.Namespace)7 CQAnd (com.bakdata.conquery.apiv1.query.concept.specific.CQAnd)6 CQConcept (com.bakdata.conquery.apiv1.query.concept.specific.CQConcept)6 CQReusedQuery (com.bakdata.conquery.apiv1.query.concept.specific.CQReusedQuery)6 User (com.bakdata.conquery.models.auth.entities.User)6 Response (javax.ws.rs.core.Response)6 CQExternal (com.bakdata.conquery.apiv1.query.concept.specific.external.CQExternal)5 ShardNode (com.bakdata.conquery.commands.ShardNode)5 IntegrationUtils (com.bakdata.conquery.integration.common.IntegrationUtils)5 LoadingUtil (com.bakdata.conquery.integration.common.LoadingUtil)5 LoadingUtil.importSecondaryIds (com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds)5 JsonIntegrationTest (com.bakdata.conquery.integration.json.JsonIntegrationTest)5