use of com.bakdata.conquery.models.query.resultinfo.ResultInfo in project conquery by bakdata.
the class AbstractQueryEngineTest method executeTest.
@Override
public void executeTest(StandaloneSupport standaloneSupport) throws IOException {
Query query = getQuery();
assertThat(standaloneSupport.getValidator().validate(query)).describedAs("Query Validation Errors").isEmpty();
log.info("{} QUERY INIT", getLabel());
final User testUser = standaloneSupport.getTestUser();
final ManagedExecutionId executionId = IntegrationUtils.assertQueryResult(standaloneSupport, query, -1, ExecutionState.DONE, testUser, 201);
final ManagedQuery execution = (ManagedQuery) standaloneSupport.getMetaStorage().getExecution(executionId);
// check result info size
List<ResultInfo> resultInfos = execution.getResultInfos();
assertThat(execution.streamResults().flatMap(EntityResult::streamValues)).as("Should have same size as result infos").allSatisfy(v -> assertThat(v).hasSameSizeAs(resultInfos));
// Get the actual response and compare with expected result.
final Response csvResponse = standaloneSupport.getClient().target(HierarchyHelper.hierarchicalPath(standaloneSupport.defaultApiURIBuilder(), ResultCsvResource.class, "getAsCsv").buildFromMap(Map.of(DATASET, standaloneSupport.getDataset().getName(), QUERY, execution.getId().toString()))).queryParam("pretty", false).request(AdditionalMediaTypes.CSV).acceptLanguage(Locale.ENGLISH).get();
List<String> actual = In.stream(((InputStream) csvResponse.getEntity())).readLines();
ResourceFile expectedCsv = getExpectedCsv();
List<String> expected = In.stream(expectedCsv.stream()).readLines();
assertThat(actual).as("Results for %s are not as expected.", this).containsExactlyInAnyOrderElementsOf(expected);
// check that getLastResultCount returns the correct size
if (execution.streamResults().noneMatch(MultilineEntityResult.class::isInstance)) {
assertThat(execution.getLastResultCount()).as("Result count for %s is not as expected.", this).isEqualTo(expected.size() - 1);
}
log.info("INTEGRATION TEST SUCCESSFUL {} {} on {} rows", getClass().getSimpleName(), this, expected.size());
}
use of com.bakdata.conquery.models.query.resultinfo.ResultInfo in project conquery by bakdata.
the class ArrayConceptQuery method getResultInfos.
@Override
public List<ResultInfo> getResultInfos() {
final List<ResultInfo> resultInfos = new ArrayList<>();
ResultInfo dateInfo = ConqueryConstants.DATES_INFO;
if (!DateAggregationMode.NONE.equals(getResolvedDateAggregationMode())) {
// Add one DateInfo for the whole Query
resultInfos.add(0, dateInfo);
}
int lastIndex = resultInfos.size();
childQueries.forEach(q -> resultInfos.addAll(q.getResultInfos()));
if (!resultInfos.isEmpty()) {
// Remove DateInfo from each childQuery
resultInfos.subList(lastIndex, resultInfos.size()).removeAll(List.of(dateInfo));
}
return resultInfos;
}
use of com.bakdata.conquery.models.query.resultinfo.ResultInfo in project conquery by bakdata.
the class TableExportQuery method createResultInfos.
private static List<ResultInfo> createResultInfos(int size, Map<SecondaryIdDescription, Integer> secondaryIdPositions, Map<Column, Integer> positions) {
ResultInfo[] infos = new ResultInfo[size];
infos[0] = ConqueryConstants.DATES_INFO;
for (Map.Entry<SecondaryIdDescription, Integer> e : secondaryIdPositions.entrySet()) {
SecondaryIdDescription desc = e.getKey();
Integer pos = e.getValue();
infos[pos] = new SimpleResultInfo(desc.getLabel(), ResultType.IdT.INSTANCE);
}
for (Map.Entry<Column, Integer> entry : positions.entrySet()) {
// 0 Position is date, already covered
final int position = entry.getValue();
// SecondaryIds are pulled to the front, already covered.
final Column column = entry.getKey();
if (position == 0 || column.getSecondaryId() != null) {
continue;
}
infos[position] = new SimpleResultInfo(column.getTable().getLabel() + " " + column.getLabel(), ResultType.resolveResultType(column.getType()));
}
return List.of(infos);
}
use of com.bakdata.conquery.models.query.resultinfo.ResultInfo in project conquery by bakdata.
the class ExcelRenderer method writeBody.
private int writeBody(SXSSFSheet sheet, List<ResultInfo> infos, Stream<EntityResult> resultLines) {
// Row 0 is the Header the data starts at 1
final AtomicInteger currentRow = new AtomicInteger(1);
final int writtenLines = resultLines.mapToInt(l -> this.writeRowsForEntity(infos, l, currentRow, cfg, sheet)).sum();
// The result was shorter than the number of rows to track, so we auto size here explicitly
if (writtenLines < config.getLastRowToAutosize()) {
setColumnWidthsAndUntrack(sheet);
}
return writtenLines;
}
use of com.bakdata.conquery.models.query.resultinfo.ResultInfo in project conquery by bakdata.
the class ArrowUtil method listField.
private static Field listField(ResultInfo info, @NonNull String uniqueName) {
if (!(info.getType() instanceof ResultType.ListT)) {
throw new IllegalStateException("Expected result type of " + ResultType.ListT.class.getName() + " but got " + info.getType().getClass().getName());
}
final ResultType elementType = ((ResultType.ListT) info.getType()).getElementType();
BiFunction<ResultInfo, String, Field> nestedFieldCreator = FIELD_MAP.getOrDefault(elementType.getClass(), ArrowUtil::stringField);
final Field nestedField = nestedFieldCreator.apply(info, uniqueName);
return new Field(uniqueName, FieldType.nullable(ArrowType.List.INSTANCE), List.of(nestedField));
}
Aggregations