use of com.bakdata.conquery.models.query.results.EntityResult in project conquery by bakdata.
the class ExcelResultRenderTest method writeAndRead.
@Test
void writeAndRead() throws IOException {
// Prepare every input data
PrintSettings printSettings = new PrintSettings(true, Locale.GERMAN, null, CONFIG, (cer) -> EntityPrintId.from(Integer.toString(cer.getEntityId()), Integer.toString(cer.getEntityId())), (selectInfo) -> selectInfo.getSelect().getLabel());
// The Shard nodes send Object[] but since Jackson is used for deserialization, nested collections are always a list because they are not further specialized
List<EntityResult> results = getTestEntityResults();
ManagedQuery mquery = new ManagedQuery(null, null, null) {
public List<ResultInfo> getResultInfos() {
return getResultTypes().stream().map(ResultTestUtil.TypedSelectDummy::new).map(select -> new SelectResultInfo(select, new CQConcept())).collect(Collectors.toList());
}
@Override
public Stream<EntityResult> streamResults() {
return results.stream();
}
};
// First we write to the buffer, than we read from it and parse it as TSV
ByteArrayOutputStream output = new ByteArrayOutputStream();
ExcelRenderer renderer = new ExcelRenderer(new ExcelConfig(), printSettings);
renderer.renderToStream(ResultTestUtil.ID_FIELDS, mquery, output);
InputStream inputStream = new ByteArrayInputStream(output.toByteArray());
List<String> computed = readComputed(inputStream, printSettings);
List<String> expected = generateExpectedTSV(results, mquery.getResultInfos(), printSettings);
log.info("Wrote and than read this excel data: {}", computed);
assertThat(computed).isNotEmpty();
assertThat(computed).isEqualTo(expected);
}
use of com.bakdata.conquery.models.query.results.EntityResult in project conquery by bakdata.
the class ArrowResultGenerationTest method writeAndRead.
@Test
void writeAndRead() throws IOException {
// Initialize internationalization
I18n.init();
// Prepare every input data
PrintSettings printSettings = new PrintSettings(false, Locale.ROOT, null, CONFIG, (cer) -> EntityPrintId.from(Integer.toString(cer.getEntityId()), Integer.toString(cer.getEntityId())), (selectInfo) -> selectInfo.getSelect().getLabel());
// The Shard nodes send Object[] but since Jackson is used for deserialization, nested collections are always a list because they are not further specialized
List<EntityResult> results = getTestEntityResults();
ManagedQuery mquery = getTestQuery();
// First we write to the buffer, than we read from it and parse it as TSV
ByteArrayOutputStream output = new ByteArrayOutputStream();
renderToStream((root) -> new ArrowStreamWriter(root, new DictionaryProvider.MapDictionaryProvider(), output), printSettings, BATCH_SIZE, ResultTestUtil.ID_FIELDS, mquery.getResultInfos(), mquery.streamResults());
InputStream inputStream = new ByteArrayInputStream(output.toByteArray());
String computed = readTSV(inputStream);
assertThat(computed).isNotBlank();
assertThat(computed).isEqualTo(generateExpectedTSV(results, mquery.getResultInfos(), printSettings));
}
use of com.bakdata.conquery.models.query.results.EntityResult in project conquery by bakdata.
the class EntityDateQueryPlan method execute.
@Override
public Optional<MultilineEntityResult> execute(QueryExecutionContext ctx, Entity entity) {
// Don't set the query date aggregator here because the subqueries should set their aggregator independently
// Execute the prerequisite query
Optional<EntityResult> preResult = query.execute(ctx, entity);
if (preResult.isEmpty()) {
return Optional.empty();
}
Optional<DateAggregator> validityDateAggregator = query.getValidityDateAggregator();
if (validityDateAggregator.isEmpty()) {
return Optional.empty();
}
final CDateSet aggregationResult = validityDateAggregator.get().createAggregationResult();
aggregationResult.retainAll(dateRestriction);
// Generate DateContexts in the provided resolutions
List<DateContext> contexts = new ArrayList<>();
for (CDateRange range : aggregationResult.asRanges()) {
contexts.addAll(DateContext.generateAbsoluteContexts(range, resolutionsAndAlignments));
}
FormQueryPlan resolutionQuery = new FormQueryPlan(contexts, features);
return resolutionQuery.execute(ctx, entity);
}
use of com.bakdata.conquery.models.query.results.EntityResult in project conquery by bakdata.
the class RelativeFormQueryPlan method execute.
@Override
public Optional<MultilineEntityResult> execute(QueryExecutionContext ctx, Entity entity) {
// Don't set the query date aggregator here because the subqueries should set their aggregator independently
Optional<? extends EntityResult> preResult = query.execute(ctx, entity);
if (preResult.isEmpty()) {
return Optional.empty();
}
int size = calculateCompleteLength();
EntityResult contained = preResult.get();
// Gather all validity dates from prerequisite
CDateSet dateSet = query.getValidityDateAggregator().map(Aggregator::createAggregationResult).orElseGet(CDateSet::create);
final OptionalInt sampled = indexSelector.sample(dateSet);
// dateset is empty or sampling failed.
if (sampled.isEmpty()) {
log.warn("Sampled empty result for Entity[{}]: `{}({})`", contained.getEntityId(), indexSelector, dateSet);
List<Object[]> results = new ArrayList<>();
results.add(new Object[size]);
return Optional.of(ResultModifier.modify(new MultilineEntityResult(entity.getId(), results), ResultModifier.existAggValuesSetterFor(getAggregators(), OptionalInt.of(getFirstAggregatorPosition()))));
}
int sample = sampled.getAsInt();
List<DateContext> contexts = DateContext.generateRelativeContexts(sample, indexPlacement, timeCountBefore, timeCountAfter, timeUnit, resolutionsAndAlignmentMap);
// create feature and outcome plans
featureSubquery = createSubQuery(featurePlan, contexts, FeatureGroup.FEATURE);
outcomeSubquery = createSubQuery(outcomePlan, contexts, FeatureGroup.OUTCOME);
// determine result length and check against aggregators in query
int featureLength = featureSubquery.columnCount();
int outcomeLength = outcomeSubquery.columnCount();
featureSubquery.init(ctx, entity);
outcomeSubquery.init(ctx, entity);
Optional<MultilineEntityResult> featureResult = featureSubquery.execute(ctx, entity);
Optional<MultilineEntityResult> outcomeResult = outcomeSubquery.execute(ctx, entity);
// determine result length and check against aggregators in query
assertResultWidth(featureResult.get(), featureLength);
assertResultWidth(outcomeResult.get(), outcomeLength);
List<Object[]> featureResultValues = featureResult.get().getValues();
List<Object[]> outcomeResultValues = outcomeResult.get().getValues();
int resultStartIndex = 0;
List<Object[]> values = new ArrayList<>();
if (hasCompleteDateContexts(contexts)) {
// merge a line for the complete daterange, when two dateContext were generated
// that don't target the same feature group,
// which would be a mistake by the generation
// Since the DateContexts are primarily ordered by their coarseness and COMPLETE
// is the most coarse resolution it must be at the first
// to indexes of the list.
Object[] mergedFull = new Object[size];
if (featurePlan.getAggregatorSize() > 0) {
setFeatureValues(mergedFull, featureResultValues.get(resultStartIndex));
}
if (outcomePlan.getAggregatorSize() > 0) {
setOutcomeValues(mergedFull, outcomeResultValues.get(resultStartIndex));
}
values.add(mergedFull);
resultStartIndex++;
}
// append all other lines directly
for (int i = resultStartIndex; i < featureResultValues.size(); i++) {
Object[] result = new Object[size];
setFeatureValues(result, featureResultValues.get(i));
values.add(result);
}
for (int i = resultStartIndex; i < outcomeResultValues.size(); i++) {
Object[] result = new Object[size];
setOutcomeValues(result, outcomeResultValues.get(i));
values.add(result);
}
return Optional.of(new MultilineEntityResult(entity.getId(), values));
}
Aggregations