Search in sources :

Example 21 with Namespace

use of com.bakdata.conquery.models.worker.Namespace in project conquery by bakdata.

the class AdminDatasetProcessor method deleteDataset.

/**
 * Delete dataset if it is empty.
 */
public synchronized void deleteDataset(Dataset dataset) {
    final Namespace namespace = datasetRegistry.get(dataset.getId());
    if (!namespace.getStorage().getTables().isEmpty()) {
        throw new WebApplicationException(String.format("Cannot delete dataset `%s`, because it still has tables: `%s`", dataset.getId(), namespace.getStorage().getTables().stream().map(Table::getId).map(Objects::toString).collect(Collectors.joining(","))), Response.Status.CONFLICT);
    }
    datasetRegistry.removeNamespace(dataset.getId());
}
Also used : Table(com.bakdata.conquery.models.datasets.Table) RemoveTable(com.bakdata.conquery.models.messages.namespaces.specific.RemoveTable) UpdateTable(com.bakdata.conquery.models.messages.namespaces.specific.UpdateTable) WebApplicationException(javax.ws.rs.WebApplicationException) Namespace(com.bakdata.conquery.models.worker.Namespace)

Example 22 with Namespace

use of com.bakdata.conquery.models.worker.Namespace in project conquery by bakdata.

the class ManagerNode method loadNamespaces.

public void loadNamespaces() {
    final Collection<NamespaceStorage> storages = config.getStorage().loadNamespaceStorages();
    final ObjectWriter objectWriter = config.configureObjectMapper(Jackson.copyMapperAndInjectables(Jackson.BINARY_MAPPER)).writerWithView(InternalOnly.class);
    for (NamespaceStorage namespaceStorage : storages) {
        Namespace ns = new Namespace(namespaceStorage, config.isFailOnError(), objectWriter);
        datasetRegistry.add(ns);
    }
}
Also used : ObjectWriter(com.fasterxml.jackson.databind.ObjectWriter) NamespaceStorage(com.bakdata.conquery.io.storage.NamespaceStorage) Namespace(com.bakdata.conquery.models.worker.Namespace)

Example 23 with Namespace

use of com.bakdata.conquery.models.worker.Namespace in project conquery by bakdata.

the class ResultArrowProcessor method getArrowResult.

public static <E extends ManagedExecution<?> & SingleTableResult> Response getArrowResult(Function<OutputStream, Function<VectorSchemaRoot, ArrowWriter>> writerProducer, Subject subject, E exec, Dataset dataset, DatasetRegistry datasetRegistry, boolean pretty, String fileExtension, MediaType mediaType, ConqueryConfig config) {
    final Namespace namespace = datasetRegistry.get(dataset.getId());
    ConqueryMDC.setLocation(subject.getName());
    log.info("Downloading results for {} on dataset {}", exec, dataset);
    subject.authorize(dataset, Ability.READ);
    subject.authorize(dataset, Ability.DOWNLOAD);
    subject.authorize(exec, Ability.READ);
    // Check if subject is permitted to download on all datasets that were referenced by the query
    authorizeDownloadDatasets(subject, exec);
    if (!(exec instanceof ManagedQuery || (exec instanceof ManagedForm && ((ManagedForm) exec).getSubQueries().size() == 1))) {
        return Response.status(HttpStatus.SC_UNPROCESSABLE_ENTITY, "Execution result is not a single Table").build();
    }
    // Get the locale extracted by the LocaleFilter
    IdPrinter idPrinter = config.getFrontend().getQueryUpload().getIdPrinter(subject, exec, namespace);
    final Locale locale = I18n.LOCALE.get();
    PrintSettings settings = new PrintSettings(pretty, locale, datasetRegistry, config, idPrinter::createId);
    // Collect ResultInfos for id columns and result columns
    final List<ResultInfo> resultInfosId = config.getFrontend().getQueryUpload().getIdResultInfos();
    final List<ResultInfo> resultInfosExec = exec.getResultInfos();
    StreamingOutput out = output -> renderToStream(writerProducer.apply(output), settings, config.getArrow().getBatchSize(), resultInfosId, resultInfosExec, exec.streamResults());
    return makeResponseWithFileName(out, exec.getLabelWithoutAutoLabelSuffix(), fileExtension, mediaType, ResultUtil.ContentDispositionOption.ATTACHMENT);
}
Also used : IdPrinter(com.bakdata.conquery.models.identifiable.mapping.IdPrinter) Locale(java.util.Locale) ManagedQuery(com.bakdata.conquery.models.query.ManagedQuery) ConqueryConfig(com.bakdata.conquery.models.config.ConqueryConfig) ManagedForm(com.bakdata.conquery.models.forms.managed.ManagedForm) Subject(com.bakdata.conquery.models.auth.entities.Subject) AuthorizationHelper.authorizeDownloadDatasets(com.bakdata.conquery.models.auth.AuthorizationHelper.authorizeDownloadDatasets) HttpStatus(org.apache.http.HttpStatus) Function(java.util.function.Function) ArrayList(java.util.ArrayList) PrintSettings(com.bakdata.conquery.models.query.PrintSettings) UtilityClass(lombok.experimental.UtilityClass) MediaType(javax.ws.rs.core.MediaType) ManagedExecution(com.bakdata.conquery.models.execution.ManagedExecution) ArrowWriter(org.apache.arrow.vector.ipc.ArrowWriter) Locale(java.util.Locale) ArrowRenderer.renderToStream(com.bakdata.conquery.io.result.arrow.ArrowRenderer.renderToStream) I18n(com.bakdata.conquery.models.i18n.I18n) IdPrinter(com.bakdata.conquery.models.identifiable.mapping.IdPrinter) ResultInfo(com.bakdata.conquery.models.query.resultinfo.ResultInfo) OutputStream(java.io.OutputStream) ResultUtil(com.bakdata.conquery.io.result.ResultUtil) ConqueryMDC(com.bakdata.conquery.util.io.ConqueryMDC) ResultUtil.makeResponseWithFileName(com.bakdata.conquery.io.result.ResultUtil.makeResponseWithFileName) VectorSchemaRoot(org.apache.arrow.vector.VectorSchemaRoot) StreamingOutput(javax.ws.rs.core.StreamingOutput) SingleTableResult(com.bakdata.conquery.models.query.SingleTableResult) Dataset(com.bakdata.conquery.models.datasets.Dataset) List(java.util.List) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) Ability(com.bakdata.conquery.models.auth.permissions.Ability) DatasetRegistry(com.bakdata.conquery.models.worker.DatasetRegistry) Namespace(com.bakdata.conquery.models.worker.Namespace) ManagedForm(com.bakdata.conquery.models.forms.managed.ManagedForm) PrintSettings(com.bakdata.conquery.models.query.PrintSettings) StreamingOutput(javax.ws.rs.core.StreamingOutput) ManagedQuery(com.bakdata.conquery.models.query.ManagedQuery) ResultInfo(com.bakdata.conquery.models.query.resultinfo.ResultInfo) Namespace(com.bakdata.conquery.models.worker.Namespace)

Example 24 with Namespace

use of com.bakdata.conquery.models.worker.Namespace in project conquery by bakdata.

the class ResultCsvProcessor method getResult.

public <E extends ManagedExecution<?> & SingleTableResult> Response getResult(Subject subject, Dataset dataset, E exec, String userAgent, String queryCharset, boolean pretty) {
    final Namespace namespace = datasetRegistry.get(dataset.getId());
    ConqueryMDC.setLocation(subject.getName());
    log.info("Downloading results for {} on dataset {}", exec, dataset);
    subject.authorize(namespace.getDataset(), Ability.READ);
    subject.authorize(namespace.getDataset(), Ability.DOWNLOAD);
    subject.authorize(exec, Ability.READ);
    // Check if subject is permitted to download on all datasets that were referenced by the query
    authorizeDownloadDatasets(subject, exec);
    IdPrinter idPrinter = config.getFrontend().getQueryUpload().getIdPrinter(subject, exec, namespace);
    // Get the locale extracted by the LocaleFilter
    final Locale locale = I18n.LOCALE.get();
    PrintSettings settings = new PrintSettings(pretty, locale, datasetRegistry, config, idPrinter::createId);
    Charset charset = determineCharset(userAgent, queryCharset);
    StreamingOutput out = os -> {
        try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(os, charset))) {
            CsvRenderer renderer = new CsvRenderer(config.getCsv().createWriter(writer), settings);
            renderer.toCSV(config.getFrontend().getQueryUpload().getIdResultInfos(), exec.getResultInfos(), exec.streamResults());
        } catch (EofException e) {
            log.info("User canceled download");
        } catch (Exception e) {
            throw new WebApplicationException("Failed to load result", e);
        }
    };
    return makeResponseWithFileName(out, exec.getLabelWithoutAutoLabelSuffix(), "csv", new MediaType("text", "csv", charset.toString()), ResultUtil.ContentDispositionOption.ATTACHMENT);
}
Also used : Locale(java.util.Locale) ConqueryConfig(com.bakdata.conquery.models.config.ConqueryConfig) Subject(com.bakdata.conquery.models.auth.entities.Subject) RequiredArgsConstructor(lombok.RequiredArgsConstructor) AuthorizationHelper.authorizeDownloadDatasets(com.bakdata.conquery.models.auth.AuthorizationHelper.authorizeDownloadDatasets) EofException(org.eclipse.jetty.io.EofException) PrintSettings(com.bakdata.conquery.models.query.PrintSettings) MediaType(javax.ws.rs.core.MediaType) Charset(java.nio.charset.Charset) ManagedExecution(com.bakdata.conquery.models.execution.ManagedExecution) Locale(java.util.Locale) OutputStreamWriter(java.io.OutputStreamWriter) I18n(com.bakdata.conquery.models.i18n.I18n) IdPrinter(com.bakdata.conquery.models.identifiable.mapping.IdPrinter) ResultUtil(com.bakdata.conquery.io.result.ResultUtil) ConqueryMDC(com.bakdata.conquery.util.io.ConqueryMDC) ResultUtil.makeResponseWithFileName(com.bakdata.conquery.io.result.ResultUtil.makeResponseWithFileName) BufferedWriter(java.io.BufferedWriter) ResultUtil.determineCharset(com.bakdata.conquery.io.result.ResultUtil.determineCharset) StreamingOutput(javax.ws.rs.core.StreamingOutput) SingleTableResult(com.bakdata.conquery.models.query.SingleTableResult) Dataset(com.bakdata.conquery.models.datasets.Dataset) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) Ability(com.bakdata.conquery.models.auth.permissions.Ability) WebApplicationException(javax.ws.rs.WebApplicationException) DatasetRegistry(com.bakdata.conquery.models.worker.DatasetRegistry) Namespace(com.bakdata.conquery.models.worker.Namespace) EofException(org.eclipse.jetty.io.EofException) WebApplicationException(javax.ws.rs.WebApplicationException) Charset(java.nio.charset.Charset) ResultUtil.determineCharset(com.bakdata.conquery.io.result.ResultUtil.determineCharset) StreamingOutput(javax.ws.rs.core.StreamingOutput) Namespace(com.bakdata.conquery.models.worker.Namespace) EofException(org.eclipse.jetty.io.EofException) WebApplicationException(javax.ws.rs.WebApplicationException) BufferedWriter(java.io.BufferedWriter) IdPrinter(com.bakdata.conquery.models.identifiable.mapping.IdPrinter) PrintSettings(com.bakdata.conquery.models.query.PrintSettings) MediaType(javax.ws.rs.core.MediaType) OutputStreamWriter(java.io.OutputStreamWriter)

Example 25 with Namespace

use of com.bakdata.conquery.models.worker.Namespace in project conquery by bakdata.

the class ConceptUpdateAndDeletionTest method execute.

@Override
public void execute(String name, TestConquery testConquery) throws Exception {
    StandaloneSupport conquery = testConquery.getSupport(name);
    // Read two JSONs with different Trees
    final String testJson = In.resource("/tests/query/UPDATE_CONCEPT_TESTS/SIMPLE_TREECONCEPT_Query.json").withUTF8().readAll();
    final String testJson2 = In.resource("/tests/query/UPDATE_CONCEPT_TESTS/SIMPLE_TREECONCEPT_2_Query.json").withUTF8().readAll();
    final Dataset dataset = conquery.getDataset();
    final Namespace namespace = conquery.getNamespace();
    final ConceptId conceptId = ConceptId.Parser.INSTANCE.parse(dataset.getName(), "test_tree");
    final Concept<?> concept;
    final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
    final QueryTest test2 = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson2);
    // Manually import data, so we can do our own work.
    {
        ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
        importSecondaryIds(conquery, test.getContent().getSecondaryIds());
        conquery.waitUntilWorkDone();
        LoadingUtil.importTables(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
        LoadingUtil.importConcepts(conquery, test.getRawConcepts());
        conquery.waitUntilWorkDone();
        assertThat(namespace.getStorage().getConcept(conceptId)).isNotNull();
        LoadingUtil.importTableContents(conquery, test.getContent().getTables());
        conquery.waitUntilWorkDone();
    }
    final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
    // State before update.
    {
        log.info("Checking state before update");
        // Must contain the concept.
        assertThat(namespace.getStorage().getAllConcepts()).filteredOn(con -> con.getId().equals(conceptId)).isNotEmpty();
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(conceptId)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getCentralRegistry().getOptional(conceptId)).isNotEmpty();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).filteredOn(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)).isNotEmpty();
            }
        }
        log.info("Executing query before update");
        IntegrationUtils.assertQueryResult(conquery, query, 1L, ExecutionState.DONE, conquery.getTestUser(), 201);
        conquery.waitUntilWorkDone();
        log.info("Query before update executed");
    }
    // Load a different concept with the same id (it has different children "C1" that are more than "A1")
    // To perform the update, the old concept will be deleted first and the new concept will be added. That means the deletion of concept is also covered here
    {
        log.info("Executing  update");
        LoadingUtil.updateConcepts(conquery, test2.getRawConcepts(), Response.Status.Family.SUCCESSFUL);
        conquery.waitUntilWorkDone();
        log.info("Update executed");
    }
    // Check state after update.
    {
        log.info("Checking state after update");
        // Must contain the concept now.
        assertThat(namespace.getStorage().getAllConcepts()).filteredOn(con -> con.getId().equals(conceptId)).isNotEmpty();
        assertThat(namespace.getStorage().getCentralRegistry().getOptional(conceptId)).isNotEmpty();
        for (ShardNode node : conquery.getShardNodes()) {
            for (Worker value : node.getWorkers().getWorkers().values()) {
                if (!value.getInfo().getDataset().equals(dataset.getId())) {
                    continue;
                }
                final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                assertThat(workerStorage.getCentralRegistry().getOptional(conceptId)).isNotEmpty();
                assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).filteredOn(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)).isNotEmpty();
            }
        }
        log.info("Executing query after update");
        // Assert that it now contains 2 instead of 1.
        IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
        conquery.waitUntilWorkDone();
        log.info("Query after update executed");
    }
    // new Conquery generated after restarting
    // StandaloneSupport conquery;
    // Restart conquery and assert again, that the data is correct.
    {
        testConquery.shutdown();
        // restart
        testConquery.beforeAll();
        conquery = testConquery.openDataset(dataset.getId());
        log.info("Checking state after re-start");
        {
            // Must contain the concept.
            assertThat(conquery.getNamespace().getStorage().getAllConcepts()).filteredOn(con -> con.getId().equals(conceptId)).isNotEmpty();
            assertThat(conquery.getNamespace().getStorage().getCentralRegistry().getOptional(conceptId)).isNotEmpty();
            for (ShardNode node : conquery.getShardNodes()) {
                for (Worker value : node.getWorkers().getWorkers().values()) {
                    if (!value.getInfo().getDataset().equals(dataset.getId())) {
                        continue;
                    }
                    final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
                    assertThat(workerStorage.getCentralRegistry().getOptional(conceptId)).isNotEmpty();
                    assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).filteredOn(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)).isNotEmpty();
                }
            }
            log.info("Executing query after restart.");
            // Re-assert state.
            IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
            conquery.waitUntilWorkDone();
        }
    }
    // Delete the Concept.
    {
        log.info("Issuing deletion of import {}", conceptId);
        concept = Objects.requireNonNull(conquery.getNamespace().getStorage().getConcept(conceptId));
        conquery.getDatasetsProcessor().deleteConcept(concept);
        conquery.waitUntilWorkDone();
    }
    // Check state after deletion.
    {
        log.info("Checking state after deletion");
        // We've deleted the concept so it and it's associated cblock should be gone.
        assertThat(conquery.getNamespace().getStorage().getAllConcepts()).filteredOn(con -> con.getId().equals(conceptId)).isEmpty();
        assertThat(conquery.getNamespace().getStorage().getCentralRegistry().getOptional(conceptId)).isEmpty();
        assertThat(conquery.getShardNodes().stream().flatMap(node -> node.getWorkers().getWorkers().values().stream()).filter(worker -> worker.getInfo().getDataset().equals(dataset.getId())).map(Worker::getStorage)).noneMatch(workerStorage -> workerStorage.getConcept(conceptId) != null).noneMatch(workerStorage -> workerStorage.getAllCBlocks().stream().anyMatch(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)));
        log.info("Executing query after deletion (EXPECTING AN EXCEPTION IN THE LOGS!)");
        // Issue a query and assert that it is failing.
        IntegrationUtils.assertQueryResult(conquery, query, 0L, ExecutionState.FAILED, conquery.getTestUser(), 400);
    }
    // Restart conquery and assert again, that the state after deletion was maintained.
    {
        {
            testConquery.shutdown();
            // restart
            testConquery.beforeAll();
            conquery = testConquery.openDataset(dataset.getId());
        }
        // Check state after restart.
        {
            log.info("Checking state after restart");
            // We've deleted the concept so it and it's associated cblock should be gone.
            assertThat(conquery.getNamespace().getStorage().getAllConcepts()).filteredOn(con -> con.getId().equals(conceptId)).isEmpty();
            assertThat(conquery.getNamespace().getStorage().getCentralRegistry().getOptional(conceptId)).isEmpty();
            assertThat(conquery.getShardNodes().stream().flatMap(node -> node.getWorkers().getWorkers().values().stream()).filter(worker -> worker.getInfo().getDataset().equals(dataset.getId())).map(Worker::getStorage)).noneMatch(workerStorage -> workerStorage.getConcept(conceptId) != null).noneMatch(workerStorage -> workerStorage.getAllCBlocks().stream().anyMatch(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)));
            log.info("Executing query after restart (EXPECTING AN EXCEPTION IN THE LOGS!)");
            // Issue a query and assert that it is failing.
            IntegrationUtils.assertQueryResult(conquery, query, 0L, ExecutionState.FAILED, conquery.getTestUser(), 400);
        }
    }
}
Also used : IntegrationUtils(com.bakdata.conquery.integration.common.IntegrationUtils) ProgrammaticIntegrationTest(com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest) ExecutionState(com.bakdata.conquery.models.execution.ExecutionState) TestConquery(com.bakdata.conquery.util.support.TestConquery) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) ShardNode(com.bakdata.conquery.commands.ShardNode) Concept(com.bakdata.conquery.models.datasets.concepts.Concept) QueryTest(com.bakdata.conquery.integration.json.QueryTest) LoadingUtil(com.bakdata.conquery.integration.common.LoadingUtil) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Dataset(com.bakdata.conquery.models.datasets.Dataset) ValidatorHelper(com.bakdata.conquery.models.exceptions.ValidatorHelper) Objects(java.util.Objects) Worker(com.bakdata.conquery.models.worker.Worker) Slf4j(lombok.extern.slf4j.Slf4j) Response(javax.ws.rs.core.Response) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage) In(com.github.powerlibraries.io.In) ConceptId(com.bakdata.conquery.models.identifiable.ids.specific.ConceptId) JsonIntegrationTest(com.bakdata.conquery.integration.json.JsonIntegrationTest) Query(com.bakdata.conquery.apiv1.query.Query) LoadingUtil.importSecondaryIds(com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds) Namespace(com.bakdata.conquery.models.worker.Namespace) Query(com.bakdata.conquery.apiv1.query.Query) QueryTest(com.bakdata.conquery.integration.json.QueryTest) ShardNode(com.bakdata.conquery.commands.ShardNode) Dataset(com.bakdata.conquery.models.datasets.Dataset) Worker(com.bakdata.conquery.models.worker.Worker) StandaloneSupport(com.bakdata.conquery.util.support.StandaloneSupport) Namespace(com.bakdata.conquery.models.worker.Namespace) ConceptId(com.bakdata.conquery.models.identifiable.ids.specific.ConceptId) ModificationShieldedWorkerStorage(com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage)

Aggregations

Namespace (com.bakdata.conquery.models.worker.Namespace)28 Dataset (com.bakdata.conquery.models.datasets.Dataset)11 Response (javax.ws.rs.core.Response)10 Slf4j (lombok.extern.slf4j.Slf4j)10 MetaStorage (com.bakdata.conquery.io.storage.MetaStorage)8 ConqueryConfig (com.bakdata.conquery.models.config.ConqueryConfig)8 DatasetRegistry (com.bakdata.conquery.models.worker.DatasetRegistry)8 ValidatorHelper (com.bakdata.conquery.models.exceptions.ValidatorHelper)7 ExecutionState (com.bakdata.conquery.models.execution.ExecutionState)7 List (java.util.List)7 WebApplicationException (javax.ws.rs.WebApplicationException)7 Query (com.bakdata.conquery.apiv1.query.Query)6 ShardNode (com.bakdata.conquery.commands.ShardNode)6 IntegrationUtils (com.bakdata.conquery.integration.common.IntegrationUtils)5 LoadingUtil (com.bakdata.conquery.integration.common.LoadingUtil)5 LoadingUtil.importSecondaryIds (com.bakdata.conquery.integration.common.LoadingUtil.importSecondaryIds)5 JsonIntegrationTest (com.bakdata.conquery.integration.json.JsonIntegrationTest)5 QueryTest (com.bakdata.conquery.integration.json.QueryTest)5 ModificationShieldedWorkerStorage (com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage)5 SimpleJob (com.bakdata.conquery.models.jobs.SimpleJob)5