use of com.bakdata.conquery.models.datasets.concepts.Connector in project conquery by bakdata.
the class FilterResolutionExactTest method execute.
@Override
public void execute(StandaloneSupport conquery) throws Exception {
// read test sepcification
String testJson = In.resource("/tests/query/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY.test.json").withUTF8().readAll();
DatasetId dataset = conquery.getDataset().getId();
ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);
ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
CSVConfig csvConf = conquery.getConfig().getCsv();
test.importRequiredData(conquery);
FilterSearch.updateSearch(conquery.getNamespace().getNamespaces(), Collections.singleton(conquery.getNamespace().getDataset()), conquery.getDatasetsProcessor().getJobManager(), csvConf);
conquery.waitUntilWorkDone();
Concept<?> concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
Connector connector = concept.getConnectors().iterator().next();
AbstractSelectFilter<?> filter = (AbstractSelectFilter<?>) connector.getFilters().iterator().next();
// Copy search csv from resources to tmp folder.
final Path tmpCSv = Files.createTempFile("conquery_search", "csv");
Files.write(tmpCSv, String.join(csvConf.getLineSeparator(), lines).getBytes(), StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE, StandardOpenOption.WRITE);
filter.setSearchType(FilterSearch.FilterSearchType.EXACT);
filter.setTemplate(new FilterTemplate(tmpCSv.toString(), Arrays.asList("HEADER"), "HEADER", "", ""));
FilterSearch.createSourceSearch(filter, csvConf);
assertThat(filter.getSourceSearch()).isNotNull();
ConceptsProcessor processor = new ConceptsProcessor(conquery.getNamespace().getNamespaces());
// from csv
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("a", "aaa", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("a", "aaa");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
// from column values
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("f", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("f");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
}
use of com.bakdata.conquery.models.datasets.concepts.Connector in project conquery by bakdata.
the class FilterResolutionPrefixTest method execute.
@Override
public void execute(StandaloneSupport conquery) throws Exception {
// read test specification
String testJson = In.resource("/tests/query/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY.test.json").withUTF8().readAll();
DatasetId dataset = conquery.getDataset().getId();
ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);
ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
test.importRequiredData(conquery);
CSVConfig csvConf = conquery.getConfig().getCsv();
FilterSearch.updateSearch(conquery.getNamespace().getNamespaces(), Collections.singleton(conquery.getNamespace().getDataset()), conquery.getDatasetsProcessor().getJobManager(), csvConf);
conquery.waitUntilWorkDone();
Concept<?> concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
Connector connector = concept.getConnectors().iterator().next();
AbstractSelectFilter<?> filter = (AbstractSelectFilter<?>) connector.getFilters().iterator().next();
// Copy search csv from resources to tmp folder.
final Path tmpCSv = Files.createTempFile("conquery_search", "csv");
Files.write(tmpCSv, String.join(csvConf.getLineSeparator(), lines).getBytes(), StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE, StandardOpenOption.WRITE);
filter.setSearchType(FilterSearch.FilterSearchType.PREFIX);
filter.setTemplate(new FilterTemplate(tmpCSv.toString(), Arrays.asList("HEADER"), "HEADER", "", ""));
FilterSearch.createSourceSearch(filter, csvConf);
assertThat(filter.getSourceSearch()).isNotNull();
ConceptsProcessor processor = new ConceptsProcessor(conquery.getNamespace().getNamespaces());
// from csv
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("a", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("a", "aaa", "aab");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
// from column values
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("f", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("f");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
}
use of com.bakdata.conquery.models.datasets.concepts.Connector in project conquery by bakdata.
the class FilterTest method importConcepts.
private void importConcepts(StandaloneSupport support) throws JSONException, IOException {
Dataset dataset = support.getDataset();
concept = new TreeConcept();
concept.setLabel("concept");
concept.setValidator(support.getValidator());
concept.setDataset(support.getDataset());
rawConnector.put("name", "connector");
rawConnector.put("table", "table");
((ObjectNode) rawConnector.get("filters")).put("name", "filter");
connector = parseSubTree(support, rawConnector, ConceptTreeConnector.class, conn -> conn.setConcept(concept));
concept.setConnectors(Collections.singletonList((ConceptTreeConnector) connector));
support.getDatasetsProcessor().addConcept(dataset, concept);
}
use of com.bakdata.conquery.models.datasets.concepts.Connector in project conquery by bakdata.
the class TableExportQuery method resolve.
@Override
public void resolve(QueryResolveContext context) {
query.resolve(context);
// First is dates
AtomicInteger currentPosition = new AtomicInteger(1);
positions = new HashMap<>();
Map<SecondaryIdDescription, Integer> secondaryIdPositions = new HashMap<>();
// SecondaryIds are pulled to the front and grouped over all tables
tables.stream().map(cqUnfilteredTable -> cqUnfilteredTable.getTable().getTable().getColumns()).flatMap(Arrays::stream).map(Column::getSecondaryId).filter(Objects::nonNull).distinct().sorted(Comparator.comparing(SecondaryIdDescription::getLabel)).forEach(secondaryId -> secondaryIdPositions.put(secondaryId, currentPosition.getAndIncrement()));
for (CQUnfilteredTable table : tables) {
Connector connector = table.getTable();
final Column validityDateColumn = findValidityDateColumn(connector, table.getDateColumn());
if (validityDateColumn != null) {
positions.putIfAbsent(validityDateColumn, 0);
}
// Set column positions, set SecondaryId positions to precomputed ones.
for (Column column : connector.getTable().getColumns()) {
positions.computeIfAbsent(column, col -> col.getSecondaryId() != null ? secondaryIdPositions.get(col.getSecondaryId()) : currentPosition.getAndIncrement());
}
}
resultInfos = createResultInfos(currentPosition.get(), secondaryIdPositions, positions);
}
use of com.bakdata.conquery.models.datasets.concepts.Connector in project conquery by bakdata.
the class AdminDatasetProcessor method deleteTable.
/**
* Deletes a table if it has no dependents or not forced to do so.
*/
public synchronized List<ConceptId> deleteTable(Table table, boolean force) {
final Namespace namespace = datasetRegistry.get(table.getDataset().getId());
final List<Concept<?>> dependentConcepts = namespace.getStorage().getAllConcepts().stream().flatMap(c -> c.getConnectors().stream()).filter(con -> con.getTable().equals(table)).map(Connector::getConcept).collect(Collectors.toList());
if (force || dependentConcepts.isEmpty()) {
for (Concept<?> concept : dependentConcepts) {
deleteConcept(concept);
}
namespace.getStorage().getAllImports().stream().filter(imp -> imp.getTable().equals(table)).forEach(this::deleteImport);
namespace.getStorage().removeTable(table.getId());
namespace.sendToAll(new RemoveTable(table));
}
return dependentConcepts.stream().map(Concept::getId).collect(Collectors.toList());
}
Aggregations