use of com.bakdata.conquery.apiv1.frontend.FEValue in project conquery by bakdata.
the class FilterResolutionExactTest method execute.
@Override
public void execute(StandaloneSupport conquery) throws Exception {
// read test sepcification
String testJson = In.resource("/tests/query/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY.test.json").withUTF8().readAll();
DatasetId dataset = conquery.getDataset().getId();
ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);
ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
CSVConfig csvConf = conquery.getConfig().getCsv();
test.importRequiredData(conquery);
FilterSearch.updateSearch(conquery.getNamespace().getNamespaces(), Collections.singleton(conquery.getNamespace().getDataset()), conquery.getDatasetsProcessor().getJobManager(), csvConf);
conquery.waitUntilWorkDone();
Concept<?> concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
Connector connector = concept.getConnectors().iterator().next();
AbstractSelectFilter<?> filter = (AbstractSelectFilter<?>) connector.getFilters().iterator().next();
// Copy search csv from resources to tmp folder.
final Path tmpCSv = Files.createTempFile("conquery_search", "csv");
Files.write(tmpCSv, String.join(csvConf.getLineSeparator(), lines).getBytes(), StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE, StandardOpenOption.WRITE);
filter.setSearchType(FilterSearch.FilterSearchType.EXACT);
filter.setTemplate(new FilterTemplate(tmpCSv.toString(), Arrays.asList("HEADER"), "HEADER", "", ""));
FilterSearch.createSourceSearch(filter, csvConf);
assertThat(filter.getSourceSearch()).isNotNull();
ConceptsProcessor processor = new ConceptsProcessor(conquery.getNamespace().getNamespaces());
// from csv
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("a", "aaa", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("a", "aaa");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
// from column values
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("f", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("f");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
}
use of com.bakdata.conquery.apiv1.frontend.FEValue in project conquery by bakdata.
the class FilterResolutionPrefixTest method execute.
@Override
public void execute(StandaloneSupport conquery) throws Exception {
// read test specification
String testJson = In.resource("/tests/query/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY/MULTI_SELECT_DATE_RESTRICTION_OR_CONCEPT_QUERY.test.json").withUTF8().readAll();
DatasetId dataset = conquery.getDataset().getId();
ConqueryTestSpec test = JsonIntegrationTest.readJson(dataset, testJson);
ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
test.importRequiredData(conquery);
CSVConfig csvConf = conquery.getConfig().getCsv();
FilterSearch.updateSearch(conquery.getNamespace().getNamespaces(), Collections.singleton(conquery.getNamespace().getDataset()), conquery.getDatasetsProcessor().getJobManager(), csvConf);
conquery.waitUntilWorkDone();
Concept<?> concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next();
Connector connector = concept.getConnectors().iterator().next();
AbstractSelectFilter<?> filter = (AbstractSelectFilter<?>) connector.getFilters().iterator().next();
// Copy search csv from resources to tmp folder.
final Path tmpCSv = Files.createTempFile("conquery_search", "csv");
Files.write(tmpCSv, String.join(csvConf.getLineSeparator(), lines).getBytes(), StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE, StandardOpenOption.WRITE);
filter.setSearchType(FilterSearch.FilterSearchType.PREFIX);
filter.setTemplate(new FilterTemplate(tmpCSv.toString(), Arrays.asList("HEADER"), "HEADER", "", ""));
FilterSearch.createSourceSearch(filter, csvConf);
assertThat(filter.getSourceSearch()).isNotNull();
ConceptsProcessor processor = new ConceptsProcessor(conquery.getNamespace().getNamespaces());
// from csv
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("a", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("a", "aaa", "aab");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
// from column values
{
ResolvedConceptsResult resolved = processor.resolveFilterValues(filter, List.of("f", "unknown"));
// check the resolved values
assertThat(resolved.getResolvedFilter().getValue().stream().map(FEValue::getValue)).containsExactlyInAnyOrder("f");
assertThat(resolved.getUnknownCodes()).containsExactlyInAnyOrder("unknown");
}
}
use of com.bakdata.conquery.apiv1.frontend.FEValue in project conquery by bakdata.
the class ConceptsProcessor method resolveFilterValues.
/**
* Search for all search terms at once, with stricter scoring.
* The user will upload a file and expect only well-corresponding resolutions.
*/
public ResolvedConceptsResult resolveFilterValues(AbstractSelectFilter<?> filter, List<String> searchTerms) {
// search in the full text engine
Set<String> searchResult = createSourceSearchResult(filter.getSourceSearch(), searchTerms, OptionalInt.empty(), filter.getSearchType()::score).stream().map(FEValue::getValue).collect(Collectors.toSet());
Set<String> openSearchTerms = new HashSet<>(searchTerms);
openSearchTerms.removeAll(searchResult);
// Iterate over all unresolved search terms. Gather all that match labels into searchResults. Keep the unresolvable ones.
for (Iterator<String> it = openSearchTerms.iterator(); it.hasNext(); ) {
String searchTerm = it.next();
// Test if any of the values occurs directly in the filter's values or their labels (for when we don't have a provided file).
if (filter.getValues().contains(searchTerm)) {
searchResult.add(searchTerm);
it.remove();
} else {
String matchingValue = filter.getLabels().inverse().get(searchTerm);
if (matchingValue != null) {
searchResult.add(matchingValue);
it.remove();
}
}
}
return new ResolvedConceptsResult(null, new ResolvedFilterResult(filter.getConnector().getId(), filter.getId(), searchResult.stream().map(v -> new FEValue(filter.getLabelFor(v), v)).collect(Collectors.toList())), new ArrayList<>(openSearchTerms));
}
use of com.bakdata.conquery.apiv1.frontend.FEValue in project conquery by bakdata.
the class ConceptsProcessor method autocompleteTextFilter.
public AutoCompleteResult autocompleteTextFilter(AbstractSelectFilter<?> filter, Optional<String> maybeText, OptionalInt pageNumberOpt, OptionalInt itemsPerPageOpt) {
final int pageNumber = pageNumberOpt.orElse(0);
final int itemsPerPage = itemsPerPageOpt.orElse(50);
final String text = maybeText.orElse("");
Preconditions.checkArgument(pageNumber >= 0, "Page number must be 0 or a positive integer.");
Preconditions.checkArgument(itemsPerPage > 1, "Must at least have one item per page.");
log.trace("Searching for for the term \"{}\". (Page = {}, Items = {})", text, pageNumber, itemsPerPage);
List<FEValue> fullResult = null;
try {
fullResult = searchCache.get(Pair.of(filter, text));
} catch (ExecutionException e) {
log.warn("Failed to search for \"{}\".", text, (Throwable) (log.isTraceEnabled() ? e : null));
return new AutoCompleteResult(Collections.emptyList(), 0);
}
int startIncl = Math.min(itemsPerPage * pageNumber, fullResult.size());
int endExcl = Math.min(startIncl + itemsPerPage, fullResult.size());
log.trace("Preparing subresult for search term \"{}\" in the index range [{}-{})", text, startIncl, endExcl);
return new AutoCompleteResult(fullResult.subList(startIncl, endExcl), fullResult.size());
}
use of com.bakdata.conquery.apiv1.frontend.FEValue in project conquery by bakdata.
the class ConceptsProcessor method autocompleteTextFilter.
/**
* Autocompletion for search terms. For values of {@link AbstractSelectFilter<?>}.
* Is used by the serach cache to load missing items
*/
private static List<FEValue> autocompleteTextFilter(AbstractSelectFilter<?> filter, String text) {
if (Strings.isNullOrEmpty(text)) {
// If no text provided, we just list them
// Filter might not have a source search (since none might be defined).
// TODO unify these code paths, they are quite the mess, maybe also create source search for key-value also
final Stream<FEValue> fromSearch = filter.getSourceSearch() == null ? Stream.empty() : filter.getSourceSearch().listItems().stream().map(item -> new FEValue(item.getLabel(), item.getValue(), item.getTemplateValues(), item.getOptionValue()));
final Stream<FEValue> fromLabels = filter.getLabels().entrySet().stream().map(entry -> new FEValue(entry.getValue(), entry.getKey()));
return Stream.concat(fromLabels, fromSearch).sorted().collect(Collectors.toList());
}
List<FEValue> result = new LinkedList<>();
QuickSearch<FilterSearchItem> search = filter.getSourceSearch();
if (search != null) {
result = createSourceSearchResult(filter.getSourceSearch(), Collections.singletonList(text), OptionalInt.empty(), FilterSearch.FilterSearchType.CONTAINS::score);
}
String value = filter.getValueFor(text);
if (value != null) {
result.add(new FEValue(text, value));
}
return result;
}
Aggregations