use of com.bakdata.conquery.models.datasets.Dataset in project conquery by bakdata.
the class AdminDatasetProcessor method addTable.
/**
* Add table to Dataset if it doesn't already exist.
*/
@SneakyThrows
public synchronized void addTable(@NonNull Table table, Namespace namespace) {
Dataset dataset = namespace.getDataset();
if (table.getDataset() == null) {
table.setDataset(dataset);
} else if (!table.getDataset().equals(dataset)) {
throw new IllegalArgumentException();
}
if (namespace.getStorage().getTable(table.getId()) != null) {
throw new WebApplicationException("Table already exists", Response.Status.CONFLICT);
}
ValidatorHelper.failOnError(log, validator.validate(table));
namespace.getStorage().addTable(table);
namespace.sendToAll(new UpdateTable(table));
}
use of com.bakdata.conquery.models.datasets.Dataset in project conquery by bakdata.
the class ResultArrowProcessor method getArrowResult.
public static <E extends ManagedExecution<?> & SingleTableResult> Response getArrowResult(Function<OutputStream, Function<VectorSchemaRoot, ArrowWriter>> writerProducer, Subject subject, E exec, Dataset dataset, DatasetRegistry datasetRegistry, boolean pretty, String fileExtension, MediaType mediaType, ConqueryConfig config) {
final Namespace namespace = datasetRegistry.get(dataset.getId());
ConqueryMDC.setLocation(subject.getName());
log.info("Downloading results for {} on dataset {}", exec, dataset);
subject.authorize(dataset, Ability.READ);
subject.authorize(dataset, Ability.DOWNLOAD);
subject.authorize(exec, Ability.READ);
// Check if subject is permitted to download on all datasets that were referenced by the query
authorizeDownloadDatasets(subject, exec);
if (!(exec instanceof ManagedQuery || (exec instanceof ManagedForm && ((ManagedForm) exec).getSubQueries().size() == 1))) {
return Response.status(HttpStatus.SC_UNPROCESSABLE_ENTITY, "Execution result is not a single Table").build();
}
// Get the locale extracted by the LocaleFilter
IdPrinter idPrinter = config.getFrontend().getQueryUpload().getIdPrinter(subject, exec, namespace);
final Locale locale = I18n.LOCALE.get();
PrintSettings settings = new PrintSettings(pretty, locale, datasetRegistry, config, idPrinter::createId);
// Collect ResultInfos for id columns and result columns
final List<ResultInfo> resultInfosId = config.getFrontend().getQueryUpload().getIdResultInfos();
final List<ResultInfo> resultInfosExec = exec.getResultInfos();
StreamingOutput out = output -> renderToStream(writerProducer.apply(output), settings, config.getArrow().getBatchSize(), resultInfosId, resultInfosExec, exec.streamResults());
return makeResponseWithFileName(out, exec.getLabelWithoutAutoLabelSuffix(), fileExtension, mediaType, ResultUtil.ContentDispositionOption.ATTACHMENT);
}
use of com.bakdata.conquery.models.datasets.Dataset in project conquery by bakdata.
the class ResultCsvProcessor method getResult.
public <E extends ManagedExecution<?> & SingleTableResult> Response getResult(Subject subject, Dataset dataset, E exec, String userAgent, String queryCharset, boolean pretty) {
final Namespace namespace = datasetRegistry.get(dataset.getId());
ConqueryMDC.setLocation(subject.getName());
log.info("Downloading results for {} on dataset {}", exec, dataset);
subject.authorize(namespace.getDataset(), Ability.READ);
subject.authorize(namespace.getDataset(), Ability.DOWNLOAD);
subject.authorize(exec, Ability.READ);
// Check if subject is permitted to download on all datasets that were referenced by the query
authorizeDownloadDatasets(subject, exec);
IdPrinter idPrinter = config.getFrontend().getQueryUpload().getIdPrinter(subject, exec, namespace);
// Get the locale extracted by the LocaleFilter
final Locale locale = I18n.LOCALE.get();
PrintSettings settings = new PrintSettings(pretty, locale, datasetRegistry, config, idPrinter::createId);
Charset charset = determineCharset(userAgent, queryCharset);
StreamingOutput out = os -> {
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(os, charset))) {
CsvRenderer renderer = new CsvRenderer(config.getCsv().createWriter(writer), settings);
renderer.toCSV(config.getFrontend().getQueryUpload().getIdResultInfos(), exec.getResultInfos(), exec.streamResults());
} catch (EofException e) {
log.info("User canceled download");
} catch (Exception e) {
throw new WebApplicationException("Failed to load result", e);
}
};
return makeResponseWithFileName(out, exec.getLabelWithoutAutoLabelSuffix(), "csv", new MediaType("text", "csv", charset.toString()), ResultUtil.ContentDispositionOption.ATTACHMENT);
}
use of com.bakdata.conquery.models.datasets.Dataset in project conquery by bakdata.
the class NamespacedStorage method decorateConceptStore.
private void decorateConceptStore(IdentifiableStore<Concept<?>> store) {
store.onAdd(concept -> {
if (concept.getDataset() != null && !concept.getDataset().equals(dataset.get())) {
throw new IllegalStateException("Concept is not for this dataset.");
}
concept.setDataset(dataset.get());
concept.initElements();
concept.getSelects().forEach(centralRegistry::register);
for (Connector connector : concept.getConnectors()) {
centralRegistry.register(connector);
connector.collectAllFilters().forEach(centralRegistry::register);
connector.getSelects().forEach(centralRegistry::register);
connector.getValidityDates().forEach(centralRegistry::register);
}
// add imports of table
if (isRegisterImports()) {
for (Import imp : getAllImports()) {
for (Connector con : concept.getConnectors()) {
if (con.getTable().equals(imp.getTable())) {
con.addImport(imp);
}
}
}
}
if (concept instanceof TreeConcept) {
((TreeConcept) concept).getAllChildren().values().forEach(centralRegistry::register);
}
}).onRemove(concept -> {
concept.getSelects().forEach(centralRegistry::remove);
// see #146 remove from Dataset.concepts
for (Connector connector : concept.getConnectors()) {
connector.getSelects().forEach(centralRegistry::remove);
connector.collectAllFilters().forEach(centralRegistry::remove);
connector.getValidityDates().forEach(centralRegistry::remove);
centralRegistry.remove(connector);
}
if (concept instanceof TreeConcept) {
((TreeConcept) concept).getAllChildren().values().forEach(centralRegistry::remove);
}
});
}
use of com.bakdata.conquery.models.datasets.Dataset in project conquery by bakdata.
the class SerializationTests method dataset.
@Test
public void dataset() throws IOException, JSONException {
Dataset dataset = new Dataset();
dataset.setName("dataset");
SerializationTestUtil.forType(Dataset.class).test(dataset);
}
Aggregations