use of com.bakdata.conquery.models.query.ManagedQuery in project conquery by bakdata.
the class AbstractQueryEngineTest method executeTest.
@Override
public void executeTest(StandaloneSupport standaloneSupport) throws IOException {
Query query = getQuery();
assertThat(standaloneSupport.getValidator().validate(query)).describedAs("Query Validation Errors").isEmpty();
log.info("{} QUERY INIT", getLabel());
final User testUser = standaloneSupport.getTestUser();
final ManagedExecutionId executionId = IntegrationUtils.assertQueryResult(standaloneSupport, query, -1, ExecutionState.DONE, testUser, 201);
final ManagedQuery execution = (ManagedQuery) standaloneSupport.getMetaStorage().getExecution(executionId);
// check result info size
List<ResultInfo> resultInfos = execution.getResultInfos();
assertThat(execution.streamResults().flatMap(EntityResult::streamValues)).as("Should have same size as result infos").allSatisfy(v -> assertThat(v).hasSameSizeAs(resultInfos));
// Get the actual response and compare with expected result.
final Response csvResponse = standaloneSupport.getClient().target(HierarchyHelper.hierarchicalPath(standaloneSupport.defaultApiURIBuilder(), ResultCsvResource.class, "getAsCsv").buildFromMap(Map.of(DATASET, standaloneSupport.getDataset().getName(), QUERY, execution.getId().toString()))).queryParam("pretty", false).request(AdditionalMediaTypes.CSV).acceptLanguage(Locale.ENGLISH).get();
List<String> actual = In.stream(((InputStream) csvResponse.getEntity())).readLines();
ResourceFile expectedCsv = getExpectedCsv();
List<String> expected = In.stream(expectedCsv.stream()).readLines();
assertThat(actual).as("Results for %s are not as expected.", this).containsExactlyInAnyOrderElementsOf(expected);
// check that getLastResultCount returns the correct size
if (execution.streamResults().noneMatch(MultilineEntityResult.class::isInstance)) {
assertThat(execution.getLastResultCount()).as("Result count for %s is not as expected.", this).isEqualTo(expected.size() - 1);
}
log.info("INTEGRATION TEST SUCCESSFUL {} {} on {} rows", getClass().getSimpleName(), this, expected.size());
}
use of com.bakdata.conquery.models.query.ManagedQuery in project conquery by bakdata.
the class QueryCleanupTask method execute.
@Override
public void execute(Map<String, List<String>> parameters, PrintWriter output) throws Exception {
Duration queryExpiration = this.queryExpiration;
if (parameters.containsKey(EXPIRATION_PARAM)) {
if (parameters.get(EXPIRATION_PARAM).size() > 1) {
log.warn("Will not respect more than one expiration time. Have `{}`", parameters.get(EXPIRATION_PARAM));
}
queryExpiration = Duration.parse(parameters.get(EXPIRATION_PARAM).get(0));
}
if (queryExpiration == null) {
throw new IllegalArgumentException("Query Expiration may not be null");
}
log.info("Starting deletion of queries older than {} of {}", queryExpiration, storage.getAllExecutions().size());
// Iterate for as long as no changes are needed (this is because queries can be referenced by other queries)
while (true) {
final QueryUtils.AllReusedFinder reusedChecker = new QueryUtils.AllReusedFinder();
Set<ManagedExecution<?>> toDelete = new HashSet<>();
for (ManagedExecution<?> execution : storage.getAllExecutions()) {
// Gather all referenced queries via reused checker.
if (execution instanceof ManagedQuery) {
((ManagedQuery) execution).getQuery().visit(reusedChecker);
} else if (execution instanceof ManagedForm) {
((ManagedForm) execution).getFlatSubQueries().values().forEach(q -> q.getQuery().visit(reusedChecker));
}
if (execution.isShared()) {
continue;
}
log.trace("{} is not shared", execution.getId());
if (ArrayUtils.isNotEmpty(execution.getTags())) {
continue;
}
log.trace("{} has no tags", execution.getId());
if (execution.getLabel() != null && !isDefaultLabel(execution.getLabel())) {
continue;
}
log.trace("{} has no label", execution.getId());
if (LocalDateTime.now().minus(queryExpiration).isBefore(execution.getCreationTime())) {
continue;
}
log.trace("{} is not older than {}.", execution.getId(), queryExpiration);
toDelete.add(execution);
}
// remove all queries referenced in reused queries.
final Collection<ManagedExecution<?>> referenced = reusedChecker.getReusedElements().stream().map(CQReusedQuery::getQueryId).map(storage::getExecution).collect(Collectors.toSet());
toDelete.removeAll(referenced);
if (toDelete.isEmpty()) {
log.info("No queries to delete");
break;
}
log.info("Deleting {} Executions", toDelete.size());
for (ManagedExecution<?> execution : toDelete) {
log.trace("Deleting Execution[{}]", execution.getId());
storage.removeExecution(execution.getId());
}
}
}
use of com.bakdata.conquery.models.query.ManagedQuery in project conquery by bakdata.
the class QueryProcessor method uploadEntities.
/**
* Try to resolve the external upload, if successful, create query for the subject and return id and statistics for that.
*/
public ExternalUploadResult uploadEntities(Subject subject, Dataset dataset, ExternalUpload upload) {
final CQExternal.ResolveStatistic statistic = CQExternal.resolveEntities(upload.getValues(), upload.getFormat(), datasetRegistry.get(dataset.getId()).getStorage().getIdMapping(), config.getFrontend().getQueryUpload(), config.getLocale().getDateReader());
// Resolving nothing is a problem thus we fail.
if (statistic.getResolved().isEmpty()) {
throw new BadRequestException(Response.status(Response.Status.BAD_REQUEST).entity(new ExternalUploadResult(null, 0, statistic.getUnresolvedId(), statistic.getUnreadableDate())).build());
}
final ConceptQuery query = new ConceptQuery(new CQExternal(upload.getFormat(), upload.getValues()));
// We only create the Query, really no need to execute it as it's only useful for composition.
final ManagedQuery execution = ((ManagedQuery) datasetRegistry.get(dataset.getId()).getExecutionManager().createExecution(datasetRegistry, query, subject.getUser(), dataset));
execution.setLastResultCount((long) statistic.getResolved().size());
if (upload.getLabel() != null) {
execution.setLabel(upload.getLabel());
}
execution.initExecutable(datasetRegistry, config);
return new ExternalUploadResult(execution.getId(), statistic.getResolved().size(), statistic.getUnresolvedId(), statistic.getUnreadableDate());
}
use of com.bakdata.conquery.models.query.ManagedQuery in project conquery by bakdata.
the class FullExportForm method createSubQueries.
@Override
public Map<String, List<ManagedQuery>> createSubQueries(DatasetRegistry datasets, User user, Dataset submittedDataset) {
// Forms are sent as an array of standard queries containing AND/OR of CQConcepts, we ignore everything and just convert the CQConcepts into CQUnfiltered for export.
final List<CQUnfilteredTable> unfilteredTables = tables.stream().flatMap(Visitable::stream).filter(CQConcept.class::isInstance).map(CQConcept.class::cast).flatMap(concept -> concept.getTables().stream()).map(table -> new CQUnfilteredTable(table.getConnector(), table.getDateColumn())).collect(Collectors.toList());
final TableExportQuery exportQuery = new TableExportQuery(queryGroup.getQuery());
exportQuery.setDateRange(getDateRange());
exportQuery.setTables(unfilteredTables);
final ManagedQuery managedQuery = new ManagedQuery(exportQuery, user, submittedDataset);
return Map.of(ConqueryConstants.SINGLE_RESULT_TABLE_NAME, List.of(managedQuery));
}
use of com.bakdata.conquery.models.query.ManagedQuery in project conquery by bakdata.
the class CsvResultGenerationTest method writeAndRead.
@Test
void writeAndRead() throws IOException {
// Prepare every input data
PrintSettings printSettings = new PrintSettings(true, Locale.GERMAN, null, CONFIG, (cer) -> EntityPrintId.from(Integer.toString(cer.getEntityId()), Integer.toString(cer.getEntityId())), (selectInfo) -> selectInfo.getSelect().getLabel());
// The Shard nodes send Object[] but since Jackson is used for deserialization, nested collections are always a list because they are not further specialized
List<EntityResult> results = getTestEntityResults();
ManagedQuery mquery = getTestQuery();
// First we write to the buffer, than we read from it and parse it as TSV
StringWriter writer = new StringWriter();
CsvRenderer renderer = new CsvRenderer(CONFIG.getCsv().createWriter(writer), printSettings);
renderer.toCSV(ResultTestUtil.ID_FIELDS, mquery.getResultInfos(), mquery.streamResults());
String computed = writer.toString();
String expected = generateExpectedCSV(results, mquery.getResultInfos(), printSettings);
log.info("Wrote and than read this csv data: {}", computed);
assertThat(computed).isNotEmpty();
assertThat(computed).isEqualTo(expected);
}
Aggregations