use of com.bakdata.conquery.models.query.resultinfo.ResultInfo in project conquery by bakdata.
the class ArrowResultGenerationTest method generateFieldsValue.
@Test
void generateFieldsValue() {
List<ResultInfo> resultInfos = getResultTypes().stream().map(ResultTestUtil.TypedSelectDummy::new).map(select -> new SelectResultInfo(select, new CQConcept())).collect(Collectors.toList());
List<Field> fields = generateFields(resultInfos, // Custom column namer so we don't require a dataset registry
UNIQUE_NAMER);
assertThat(fields).containsExactlyElementsOf(List.of(new Field("BOOLEAN", FieldType.nullable(ArrowType.Bool.INSTANCE), null), new Field("INTEGER", FieldType.nullable(new ArrowType.Int(32, true)), null), new Field("NUMERIC", FieldType.nullable(new ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE)), null), new Field("CATEGORICAL", FieldType.nullable(new ArrowType.Utf8()), null), new Field("RESOLUTION", FieldType.nullable(new ArrowType.Utf8()), null), new Field("DATE", FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null), new Field("DATE_RANGE", FieldType.nullable(ArrowType.Struct.INSTANCE), List.of(new Field("min", FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null), new Field("max", FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null))), new Field("STRING", FieldType.nullable(new ArrowType.Utf8()), null), new Field("MONEY", FieldType.nullable(new ArrowType.Int(32, true)), null), new Field("LIST[BOOLEAN]", FieldType.nullable(ArrowType.List.INSTANCE), List.of(new Field("LIST[BOOLEAN]", FieldType.nullable(ArrowType.Bool.INSTANCE), null))), new Field("LIST[DATE_RANGE]", FieldType.nullable(ArrowType.List.INSTANCE), List.of(new Field("LIST[DATE_RANGE]", FieldType.nullable(ArrowType.Struct.INSTANCE), List.of(new Field("min", FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null), new Field("max", FieldType.nullable(new ArrowType.Date(DateUnit.DAY)), null))))), new Field("LIST[STRING]", FieldType.nullable(ArrowType.List.INSTANCE), List.of(new Field("LIST[STRING]", FieldType.nullable(new ArrowType.Utf8()), null)))));
}
use of com.bakdata.conquery.models.query.resultinfo.ResultInfo in project conquery by bakdata.
the class CsvResultGenerationTest method generateExpectedCSV.
private String generateExpectedCSV(List<EntityResult> results, List<ResultInfo> resultInfos, PrintSettings settings) {
List<String> expected = new ArrayList<>();
expected.add(ResultTestUtil.ID_FIELDS.stream().map(info -> info.defaultColumnName(settings)).collect(Collectors.joining(",")) + "," + getResultTypes().stream().map(ResultType::typeInfo).collect(Collectors.joining(",")) + "\n");
results.stream().map(EntityResult.class::cast).forEach(res -> {
for (Object[] line : res.listResultLines()) {
StringJoiner valueJoiner = new StringJoiner(",");
valueJoiner.add(String.valueOf(res.getEntityId()));
valueJoiner.add(String.valueOf(res.getEntityId()));
for (int lIdx = 0; lIdx < line.length; lIdx++) {
Object val = line[lIdx];
if (val == null) {
valueJoiner.add("");
continue;
}
ResultInfo info = resultInfos.get(lIdx);
final String printVal = info.getType().printNullable(settings, val);
valueJoiner.add(printVal.contains(String.valueOf(CONFIG.getCsv().getDelimeter())) ? "\"" + printVal + "\"" : printVal);
}
expected.add(valueJoiner + "\n");
}
});
return expected.stream().collect(Collectors.joining());
}
use of com.bakdata.conquery.models.query.resultinfo.ResultInfo in project conquery by bakdata.
the class ManagedQuery method generateColumnDescriptions.
/**
* Generates a description of each column that will appear in the resulting csv.
*/
public List<ColumnDescriptor> generateColumnDescriptions(DatasetRegistry datasetRegistry) {
Preconditions.checkArgument(isInitialized(), "The execution must have been initialized first");
List<ColumnDescriptor> columnDescriptions = new ArrayList<>();
final Locale locale = I18n.LOCALE.get();
PrintSettings settings = new PrintSettings(true, locale, datasetRegistry, config, null);
UniqueNamer uniqNamer = new UniqueNamer(settings);
// First add the id columns to the descriptor list. The are the first columns
for (ResultInfo header : config.getFrontend().getQueryUpload().getIdResultInfos()) {
columnDescriptions.add(ColumnDescriptor.builder().label(uniqNamer.getUniqueName(header)).type(ResultType.IdT.INSTANCE.typeInfo()).build());
}
final UniqueNamer collector = new UniqueNamer(settings);
getResultInfos().forEach(info -> columnDescriptions.add(info.asColumnDescriptor(settings, collector)));
return columnDescriptions;
}
use of com.bakdata.conquery.models.query.resultinfo.ResultInfo in project conquery by bakdata.
the class CQExternal method getResultInfos.
@Override
public List<ResultInfo> getResultInfos() {
if (extra == null) {
return Collections.emptyList();
}
List<ResultInfo> resultInfos = new ArrayList<>();
for (int col = 0; col < format.size(); col++) {
if (!format.get(col).equals(FORMAT_EXTRA)) {
continue;
}
String column = values[0][col];
resultInfos.add(new SimpleResultInfo(column, new ResultType.ListT(ResultType.StringT.INSTANCE)));
}
return resultInfos;
}
use of com.bakdata.conquery.models.query.resultinfo.ResultInfo in project conquery by bakdata.
the class ResultArrowProcessor method getArrowResult.
public static <E extends ManagedExecution<?> & SingleTableResult> Response getArrowResult(Function<OutputStream, Function<VectorSchemaRoot, ArrowWriter>> writerProducer, Subject subject, E exec, Dataset dataset, DatasetRegistry datasetRegistry, boolean pretty, String fileExtension, MediaType mediaType, ConqueryConfig config) {
final Namespace namespace = datasetRegistry.get(dataset.getId());
ConqueryMDC.setLocation(subject.getName());
log.info("Downloading results for {} on dataset {}", exec, dataset);
subject.authorize(dataset, Ability.READ);
subject.authorize(dataset, Ability.DOWNLOAD);
subject.authorize(exec, Ability.READ);
// Check if subject is permitted to download on all datasets that were referenced by the query
authorizeDownloadDatasets(subject, exec);
if (!(exec instanceof ManagedQuery || (exec instanceof ManagedForm && ((ManagedForm) exec).getSubQueries().size() == 1))) {
return Response.status(HttpStatus.SC_UNPROCESSABLE_ENTITY, "Execution result is not a single Table").build();
}
// Get the locale extracted by the LocaleFilter
IdPrinter idPrinter = config.getFrontend().getQueryUpload().getIdPrinter(subject, exec, namespace);
final Locale locale = I18n.LOCALE.get();
PrintSettings settings = new PrintSettings(pretty, locale, datasetRegistry, config, idPrinter::createId);
// Collect ResultInfos for id columns and result columns
final List<ResultInfo> resultInfosId = config.getFrontend().getQueryUpload().getIdResultInfos();
final List<ResultInfo> resultInfosExec = exec.getResultInfos();
StreamingOutput out = output -> renderToStream(writerProducer.apply(output), settings, config.getArrow().getBatchSize(), resultInfosId, resultInfosExec, exec.streamResults());
return makeResponseWithFileName(out, exec.getLabelWithoutAutoLabelSuffix(), fileExtension, mediaType, ResultUtil.ContentDispositionOption.ATTACHMENT);
}
Aggregations