use of com.bakdata.conquery.apiv1.query.Query in project conquery by bakdata.
the class ManagedExecution method buildStatusFull.
/**
* Renders an extensive status of this query (see {@link FullExecutionStatus}. The rendering can be computation intensive and can produce a large
* object. The use of the full status is only intended if a client requested specific information about this execution.
*/
public FullExecutionStatus buildStatusFull(@NonNull MetaStorage storage, Subject subject, DatasetRegistry datasetRegistry, ConqueryConfig config) {
initExecutable(datasetRegistry, config);
FullExecutionStatus status = new FullExecutionStatus();
setStatusBase(subject, status);
setAdditionalFieldsForStatusWithColumnDescription(storage, subject, status, datasetRegistry);
setAdditionalFieldsForStatusWithSource(subject, status);
setAdditionalFieldsForStatusWithGroups(storage, status);
setAvailableSecondaryIds(status);
status.setProgress(progress);
if (getState().equals(ExecutionState.FAILED) && error != null) {
// Use plain format here to have a uniform serialization.
status.setError(error.asPlain());
}
return status;
}
use of com.bakdata.conquery.apiv1.query.Query in project conquery by bakdata.
the class ManagedExecution method buildStatusOverview.
/**
* Renders a lightweight status with meta information about this query. Computation an size should be small for this.
*/
public OverviewExecutionStatus buildStatusOverview(UriBuilder url, Subject subject) {
OverviewExecutionStatus status = new OverviewExecutionStatus();
setStatusBase(subject, status);
return status;
}
use of com.bakdata.conquery.apiv1.query.Query in project conquery by bakdata.
the class FilterResolutionExactTest method execute.
@Override
public void execute(StandaloneSupport conquery) throws Exception {
// read test sepcification
String testJson = In.resource("/tests/query/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY.test.json").withUTF8().readAll();
DatasetId dataset = conquery.getDataset().getId();
ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);
ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
CSVConfig csvConf = conquery.getConfig().getCsv();
test.importRequiredData(conquery);
FilterSearch.updateSearch(conquery.getNamespace().getNamespaces(), Collections.singleton(conquery.getNamespace().getDataset()), conquery.getDatasetsProcessor().getJobManager(), csvConf);
conquery.waitUntilWorkDone();
Concept<?> concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
Connector connector = concept.getConnectors().iterator().next();
AbstractSelectFilter<?> filter = (AbstractSelectFilter<?>) connector.getFilters().iterator().next();
// Copy search csv from resources to tmp folder.
final Path tmpCSv = Files.createTempFile("conquery_search", "csv");
Files.write(tmpCSv, String.join(csvConf.getLineSeparator(), lines).getBytes(), StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE, StandardOpenOption.WRITE);
filter.setSearchType(FilterSearch.FilterSearchType.EXACT);
filter.setTemplate(new FilterTemplate(tmpCSv.toString(), Arrays.asList("HEADER"), "HEADER", "", ""));
FilterSearch.createSourceSearch(filter, csvConf);
assertThat(filter.getSourceSearch()).isNotNull();
ConceptsProcessor processor = new ConceptsProcessor(conquery.getNamespace().getNamespaces());
// from csv
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("a", "aaa", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("a", "aaa");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
// from column values
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("f", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("f");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
}
use of com.bakdata.conquery.apiv1.query.Query in project conquery by bakdata.
the class FilterResolutionPrefixTest method execute.
@Override
public void execute(StandaloneSupport conquery) throws Exception {
// read test specification
String testJson = In.resource("/tests/query/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY.test.json").withUTF8().readAll();
DatasetId dataset = conquery.getDataset().getId();
ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);
ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
test.importRequiredData(conquery);
CSVConfig csvConf = conquery.getConfig().getCsv();
FilterSearch.updateSearch(conquery.getNamespace().getNamespaces(), Collections.singleton(conquery.getNamespace().getDataset()), conquery.getDatasetsProcessor().getJobManager(), csvConf);
conquery.waitUntilWorkDone();
Concept<?> concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
Connector connector = concept.getConnectors().iterator().next();
AbstractSelectFilter<?> filter = (AbstractSelectFilter<?>) connector.getFilters().iterator().next();
// Copy search csv from resources to tmp folder.
final Path tmpCSv = Files.createTempFile("conquery_search", "csv");
Files.write(tmpCSv, String.join(csvConf.getLineSeparator(), lines).getBytes(), StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE, StandardOpenOption.WRITE);
filter.setSearchType(FilterSearch.FilterSearchType.PREFIX);
filter.setTemplate(new FilterTemplate(tmpCSv.toString(), Arrays.asList("HEADER"), "HEADER", "", ""));
FilterSearch.createSourceSearch(filter, csvConf);
assertThat(filter.getSourceSearch()).isNotNull();
ConceptsProcessor processor = new ConceptsProcessor(conquery.getNamespace().getNamespaces());
// from csv
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("a", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("a", "aaa", "aab");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
// from column values
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("f", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("f");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
}
use of com.bakdata.conquery.apiv1.query.Query in project conquery by bakdata.
the class ImportUpdateTest method execute.
@Override
public void execute(String name, TestConquery testConquery) throws Exception {
final StandaloneSupport conquery = testConquery.getSupport(name);
MetaStorage storage = conquery.getMetaStorage();
String testJson = In.resource("/tests/query/UPDATE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.json").withUTF8().readAll();
final Dataset dataset = conquery.getDataset();
final Namespace namespace = conquery.getNamespace();
final ImportId importId1 = ImportId.Parser.INSTANCE.parse(dataset.getName(), "table1", "table1");
final ImportId importId2 = ImportId.Parser.INSTANCE.parse(dataset.getName(), "table2", "table2");
QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
final List<RequiredTable> tables = test.getContent().getTables();
assertThat(tables.size()).isEqualTo(2);
List<File> cqpps;
// Manually import data, so we can do our own work.
{
ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
importSecondaryIds(conquery, test.getContent().getSecondaryIds());
conquery.waitUntilWorkDone();
LoadingUtil.importTables(conquery, tables);
conquery.waitUntilWorkDone();
LoadingUtil.importConcepts(conquery, test.getRawConcepts());
conquery.waitUntilWorkDone();
cqpps = LoadingUtil.generateCqpp(conquery, tables);
conquery.waitUntilWorkDone();
assertThat(cqpps.size()).isEqualTo(tables.size());
LoadingUtil.importCqppFiles(conquery, List.of(cqpps.get(0)));
conquery.waitUntilWorkDone();
}
final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
// State before update.
{
log.info("Checking state before update");
assertThat(namespace.getStorage().getAllImports()).hasSize(1);
// Must contain the import.
assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().equals(importId1)).isNotEmpty();
assertThat(namespace.getStorage().getCentralRegistry().getOptional(importId1)).isNotEmpty();
for (ShardNode node : conquery.getShardNodes()) {
for (Worker worker : node.getWorkers().getWorkers().values()) {
if (!worker.getInfo().getDataset().equals(dataset.getId())) {
continue;
}
final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", worker.getInfo().getId()).filteredOn(block -> block.getBucket().getId().getDataset().equals(dataset.getId())).isNotEmpty();
assertThat(workerStorage.getAllBuckets()).filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())).describedAs("Buckets for Worker %s", worker.getInfo().getId()).isNotEmpty();
// Must contain the import.
assertThat(workerStorage.getImport(importId1)).isNotNull();
}
}
assertThat(namespace.getNumberOfEntities()).isEqualTo(4);
// assert that the query can be executed after the import
IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
}
// Try to update an import that does not exist should throw a Not-Found Webapplication Exception
LoadingUtil.updateCqppFile(conquery, cqpps.get(1), Response.Status.Family.CLIENT_ERROR, "Not Found");
conquery.waitUntilWorkDone();
// Load manually new data for import and update the concerned import
{
log.info("Manually loading new data for import");
final RequiredTable importTable = test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(importId1.getTable().getTable())).findFirst().orElseThrow();
final String csvName = importTable.getCsv().getName();
final String path = importTable.getCsv().getPath();
// copy new content of the importTable into the csv-File used by the preprocessor to avoid creating multiple files withe same names
FileUtils.copyInputStreamToFile(In.resource(path.substring(0, path.lastIndexOf('/')) + "/" + csvName.replace(".csv", ".update.csv")).asStream(), new File(conquery.getTmpDir(), csvName));
File descriptionFile = new File(conquery.getTmpDir(), importTable.getName() + ConqueryConstants.EXTENSION_DESCRIPTION);
File newPreprocessedFile = new File(conquery.getTmpDir(), importTable.getName() + ConqueryConstants.EXTENSION_PREPROCESSED);
// create import descriptor
{
TableImportDescriptor desc = new TableImportDescriptor();
desc.setName(importTable.getName());
desc.setTable(importTable.getName());
TableInputDescriptor input = new TableInputDescriptor();
{
input.setPrimary(importTable.getPrimaryColumn().createOutput());
input.setSourceFile(csvName);
input.setOutput(new OutputDescription[importTable.getColumns().length]);
for (int i = 0; i < importTable.getColumns().length; i++) {
input.getOutput()[i] = importTable.getColumns()[i].createOutput();
}
}
desc.setInputs(new TableInputDescriptor[] { input });
Jackson.MAPPER.writeValue(descriptionFile, desc);
}
// preprocess
conquery.preprocessTmp(conquery.getTmpDir(), List.of(descriptionFile));
log.info("updating import");
// correct update of the import
LoadingUtil.updateCqppFile(conquery, newPreprocessedFile, Response.Status.Family.SUCCESSFUL, "No Content");
conquery.waitUntilWorkDone();
}
// State after update.
{
log.info("Checking state after update");
assertThat(namespace.getStorage().getAllImports()).hasSize(1);
// Must contain the import.
assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().equals(importId1)).isNotEmpty();
assertThat(namespace.getStorage().getCentralRegistry().getOptional(importId1)).isNotEmpty();
for (ShardNode node : conquery.getShardNodes()) {
for (Worker worker : node.getWorkers().getWorkers().values()) {
if (!worker.getInfo().getDataset().equals(dataset.getId())) {
continue;
}
final ModificationShieldedWorkerStorage workerStorage = worker.getStorage();
assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", worker.getInfo().getId()).filteredOn(block -> block.getBucket().getId().getDataset().equals(dataset.getId())).isNotEmpty();
assertThat(workerStorage.getAllBuckets()).filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())).describedAs("Buckets for Worker %s", worker.getInfo().getId()).isNotEmpty();
// Must contain the import.
assertThat(workerStorage.getImport(importId1)).isNotNull();
}
}
assertThat(namespace.getNumberOfEntities()).isEqualTo(9);
// Issue a query and assert that it has more content.
IntegrationUtils.assertQueryResult(conquery, query, 4L, ExecutionState.DONE, conquery.getTestUser(), 201);
}
}
Aggregations