use of com.apple.foundationdb.record.query.expressions.QueryComponent in project fdb-record-layer by FoundationDB.
the class FilterVisitor method postVisit.
@Nonnull
@Override
public RecordQueryPlan postVisit(@Nonnull RecordQueryPlan recordQueryPlan) {
if (recordQueryPlan instanceof RecordQueryFilterPlan) {
final RecordQueryFilterPlan filterPlan = (RecordQueryFilterPlan) recordQueryPlan;
final List<QueryComponent> filters = filterPlan.getFilters();
final AvailableFields availableFields = availableFields(((RecordQueryFilterPlan) recordQueryPlan).getInnerPlan());
// Partition the filters according to whether they can be evaluated using just the fields from the index or
// if they need a full record.
final List<QueryComponent> indexFilters = Lists.newArrayListWithCapacity(filters.size());
final List<QueryComponent> residualFilters = Lists.newArrayListWithCapacity(filters.size());
final Set<KeyExpression> allReferencedFields = new HashSet<>();
partitionFilters(filters, availableFields, indexFilters, residualFilters, allReferencedFields);
Verify.verify(indexFilters.size() + residualFilters.size() == filters.size());
if (indexFilters.isEmpty()) {
return recordQueryPlan;
}
@Nullable RecordQueryPlan removedFetchPlan = removeIndexFetch(filterPlan.getChild(), allReferencedFields);
if (removedFetchPlan == null) {
return recordQueryPlan;
}
recordQueryPlan = new RecordQueryFetchFromPartialRecordPlan(new RecordQueryFilterPlan(removedFetchPlan, indexFilters), TranslateValueFunction.unableToTranslate());
if (!residualFilters.isEmpty()) {
recordQueryPlan = new RecordQueryFilterPlan(recordQueryPlan, residualFilters);
}
}
return recordQueryPlan;
}
use of com.apple.foundationdb.record.query.expressions.QueryComponent in project fdb-record-layer by FoundationDB.
the class TextIndexTest method queryComplexDocumentsWithOr.
@Nonnull
private List<Tuple> queryComplexDocumentsWithOr(@Nonnull OrComponent orFilter, long group, int planHash) throws InterruptedException, ExecutionException {
final Matcher<RecordQueryPlan> textPlanMatcher = textIndexScan(allOf(indexName(COMPLEX_TEXT_BY_GROUP.getName()), groupingBounds(allOf(notNullValue(), hasTupleString("[[" + group + "],[" + group + "]]"))), textComparison(any(Comparisons.TextComparison.class))));
final QueryComponent filter = Query.and(orFilter, Query.field("group").equalsValue(group));
final Matcher<RecordQueryPlan> planMatcher = descendant(textPlanMatcher);
return queryComplexDocumentsWithPlan(filter, planHash, planMatcher);
}
use of com.apple.foundationdb.record.query.expressions.QueryComponent in project fdb-record-layer by FoundationDB.
the class TextIndexTest method queryScanEquivalence.
/**
* Generate random documents and then make sure that querying them using the index
* produces the same result as performing a full scan of all records.
*/
@MethodSource("indexArguments")
@ParameterizedTest
public void queryScanEquivalence(@Nonnull Index index) throws Exception {
final Random r = new Random(0xba5eba1L + index.getName().hashCode());
final int recordCount = 100;
final int recordBatch = 25;
final int queryCount = 25;
final List<String> lexicon = getStandardLexicon();
TextTokenizerRegistryImpl.instance().register(FILTERING_TOKENIZER);
final TextTokenizer tokenizer = TextIndexMaintainer.getTokenizer(index);
final RecordMetaDataHook hook = metaDataBuilder -> {
metaDataBuilder.removeIndex(TextIndexTestUtils.SIMPLE_DEFAULT_NAME);
metaDataBuilder.addIndex(SIMPLE_DOC, index);
};
long seed = r.nextLong();
LOGGER.info(KeyValueLogMessage.of("initializing random number generator", TestLogMessageKeys.SEED, seed));
r.setSeed(seed);
for (int i = 0; i < recordCount; i += recordBatch) {
List<SimpleDocument> records = getRandomRecords(r, recordBatch, lexicon);
LOGGER.info(KeyValueLogMessage.of("creating and saving random records", TestLogMessageKeys.BATCH_SIZE, recordBatch));
try (FDBRecordContext context = openContext()) {
openRecordStore(context, hook);
records.forEach(recordStore::saveRecord);
commit(context);
}
}
double[] proportions = getZipfProportions(lexicon);
long totalScanningTime = 0;
long totalQueryingTime = 0;
long totalResults = 0;
for (int i = 0; i < queryCount; i++) {
// Generate a random text query
List<String> tokens = getRandomWords(r, lexicon, proportions, 6, 3);
String tokenString = String.join(" ", tokens);
double filterChoice = r.nextDouble();
final QueryComponent filter;
if (filterChoice < 0.2) {
filter = Query.field("text").text(tokenizer.getName()).containsAll(tokenString);
} else if (filterChoice < 0.4) {
filter = Query.field("text").text(tokenizer.getName()).containsAny(tokenString);
} else if (filterChoice < 0.6) {
filter = Query.field("text").text(tokenizer.getName()).containsPhrase(tokenString);
} else if (filterChoice < 0.8) {
int maxDistance = r.nextInt(10) + tokens.size();
filter = Query.field("text").text(tokenizer.getName()).containsAll(tokenString, maxDistance);
} else if (filterChoice < 0.9) {
filter = Query.field("text").text(tokenizer.getName()).containsAnyPrefix(tokenString);
} else if (filterChoice < 0.95) {
filter = Query.field("text").text(tokenizer.getName()).containsAllPrefixes(tokenString);
} else {
if (tokens.isEmpty()) {
continue;
}
// Choose the first non-empty token from the iterator
Iterator<? extends CharSequence> tokenIterator = tokenizer.tokenize(tokenString, tokenizer.getMaxVersion(), TextTokenizer.TokenizerMode.QUERY);
String firstToken = null;
while (tokenIterator.hasNext()) {
String nextToken = tokenIterator.next().toString();
if (!nextToken.isEmpty()) {
firstToken = nextToken;
break;
}
}
if (firstToken == null) {
continue;
}
int prefixEnd;
if (firstToken.length() > 1) {
prefixEnd = r.nextInt(firstToken.length() - 1) + 1;
} else {
prefixEnd = 1;
}
filter = Query.field("text").text(tokenizer.getName()).containsPrefix(firstToken.substring(0, prefixEnd));
}
LOGGER.info(KeyValueLogMessage.of("generated random filter", TestLogMessageKeys.ITERATION, i, LogMessageKeys.FILTER, filter));
// Manual scan all of the records
long startTime = System.nanoTime();
final Set<Long> manualRecordIds = performQueryWithRecordStoreScan(hook, filter);
long endTime = System.nanoTime();
LOGGER.info(KeyValueLogMessage.of("manual scan completed", TestLogMessageKeys.SCAN_MILLIS, TimeUnit.MILLISECONDS.convert(endTime - startTime, TimeUnit.NANOSECONDS)));
totalScanningTime += endTime - startTime;
// Generate a query and use the index
startTime = System.nanoTime();
final Set<Long> queryRecordIds = performQueryWithIndexScan(hook, index, filter);
endTime = System.nanoTime();
LOGGER.info(KeyValueLogMessage.of("query completed", TestLogMessageKeys.SCAN_MILLIS, TimeUnit.MILLISECONDS.convert(endTime - startTime, TimeUnit.NANOSECONDS)));
totalQueryingTime += endTime - startTime;
if (!manualRecordIds.equals(queryRecordIds)) {
Set<Long> onlyManual = new HashSet<>(manualRecordIds);
onlyManual.removeAll(queryRecordIds);
Set<Long> onlyQuery = new HashSet<>(queryRecordIds);
onlyManual.removeAll(manualRecordIds);
LOGGER.warn(KeyValueLogMessage.of("results did not match", LogMessageKeys.FILTER, filter, TestLogMessageKeys.MANUAL_RESULT_COUNT, manualRecordIds.size(), TestLogMessageKeys.QUERY_RESULT_COUNT, queryRecordIds.size(), TestLogMessageKeys.ONLY_MANUAL_COUNT, onlyManual.size(), TestLogMessageKeys.ONLY_QUERY_COUNT, onlyQuery.size()));
}
assertEquals(manualRecordIds, queryRecordIds);
LOGGER.info(KeyValueLogMessage.of("results matched", LogMessageKeys.FILTER, filter, TestLogMessageKeys.RESULT_COUNT, manualRecordIds.size()));
totalResults += queryRecordIds.size();
}
LOGGER.info(KeyValueLogMessage.of("test completed", TestLogMessageKeys.TOTAL_SCAN_MILLIS, TimeUnit.MILLISECONDS.convert(totalScanningTime, TimeUnit.NANOSECONDS), TestLogMessageKeys.TOTAL_QUERY_MILLIS, TimeUnit.MILLISECONDS.convert(totalQueryingTime, TimeUnit.NANOSECONDS), TestLogMessageKeys.TOTAL_RESULT_COUNT, totalResults));
}
use of com.apple.foundationdb.record.query.expressions.QueryComponent in project fdb-record-layer by FoundationDB.
the class TextIndexTest method querySimpleDocumentsMaybeCovering.
@Test
public void querySimpleDocumentsMaybeCovering() throws Exception {
final List<SimpleDocument> documents = TextIndexTestUtils.toSimpleDocuments(Arrays.asList(TextSamples.ANGSTROM, TextSamples.AETHELRED, TextSamples.ROMEO_AND_JULIET_PROLOGUE, TextSamples.FRENCH));
try (FDBRecordContext context = openContext()) {
openRecordStore(context);
documents.forEach(recordStore::saveRecord);
final QueryComponent filter1 = Query.field("text").text().containsPhrase("civil blood makes civil hands unclean");
final Comparisons.Comparison comparison1 = new Comparisons.TextComparison(Comparisons.Type.TEXT_CONTAINS_PHRASE, "civil blood makes civil hands unclean", null, DefaultTextTokenizer.NAME);
final QueryComponent filter2 = Query.field("text").text().containsPrefix("th");
final Comparisons.Comparison comparison2 = new Comparisons.TextComparison(Comparisons.Type.TEXT_CONTAINS_PREFIX, Collections.singletonList("th"), null, DefaultTextTokenizer.NAME);
// Query for full records
RecordQuery query = RecordQuery.newBuilder().setRecordType(SIMPLE_DOC).setFilter(filter1).build();
// TextIndex(SimpleDocument$text null, TEXT_CONTAINS_PHRASE civil blood makes civil hands unclean, null)
RecordQueryPlan plan = planner.plan(query);
assertThat(plan, textIndexScan(allOf(indexName(TextIndexTestUtils.SIMPLE_DEFAULT_NAME), textComparison(equalTo(comparison1)))));
assertEquals(814602491, plan.planHash(PlanHashable.PlanHashKind.LEGACY));
assertEquals(1101247748, plan.planHash(PlanHashable.PlanHashKind.FOR_CONTINUATION));
assertEquals(-1215587201, plan.planHash(PlanHashable.PlanHashKind.STRUCTURAL_WITHOUT_LITERALS));
List<Long> primaryKeys = recordStore.executeQuery(plan).map(FDBQueriedRecord::getPrimaryKey).map(t -> t.getLong(0)).asList().get();
assertEquals(Collections.singletonList(2L), primaryKeys);
query = RecordQuery.newBuilder().setRecordType(SIMPLE_DOC).setFilter(filter2).build();
// TextIndex(SimpleDocument$text null, TEXT_CONTAINS_PREFIX [th], null) | UnorderedPrimaryKeyDistinct()
plan = planner.plan(query);
assertThat(plan, primaryKeyDistinct(textIndexScan(allOf(indexName(TextIndexTestUtils.SIMPLE_DEFAULT_NAME), textComparison(equalTo(comparison2))))));
assertEquals(1032989149, plan.planHash(PlanHashable.PlanHashKind.LEGACY));
assertEquals(-1513880131, plan.planHash(PlanHashable.PlanHashKind.FOR_CONTINUATION));
assertEquals(-1570861632, plan.planHash(PlanHashable.PlanHashKind.STRUCTURAL_WITHOUT_LITERALS));
primaryKeys = recordStore.executeQuery(plan).map(FDBQueriedRecord::getPrimaryKey).map(t -> t.getLong(0)).asList().get();
assertEquals(Arrays.asList(0L, 1L, 2L, 3L), primaryKeys);
// Query for just primary key
query = RecordQuery.newBuilder().setRecordType(SIMPLE_DOC).setRequiredResults(Collections.singletonList(field("doc_id"))).setFilter(filter1).build();
// Covering(TextIndex(SimpleDocument$text null, TEXT_CONTAINS_PHRASE civil blood makes civil hands unclean, null) -> [doc_id: KEY[1]])
plan = planner.plan(query);
assertThat(plan, coveringIndexScan(textIndexScan(allOf(indexName(TextIndexTestUtils.SIMPLE_DEFAULT_NAME), textComparison(equalTo(comparison1))))));
assertEquals(814602491, plan.planHash(PlanHashable.PlanHashKind.LEGACY));
assertEquals(-786467136, plan.planHash(PlanHashable.PlanHashKind.FOR_CONTINUATION));
assertEquals(1191665211, plan.planHash(PlanHashable.PlanHashKind.STRUCTURAL_WITHOUT_LITERALS));
primaryKeys = recordStore.executeQuery(plan).map(FDBQueriedRecord::getPrimaryKey).map(t -> t.getLong(0)).asList().get();
assertEquals(Collections.singletonList(2L), primaryKeys);
query = RecordQuery.newBuilder().setRecordType(SIMPLE_DOC).setRequiredResults(Collections.singletonList(field("doc_id"))).setFilter(filter2).build();
// Covering(TextIndex(SimpleDocument$text null, TEXT_CONTAINS_PREFIX [th], null) -> [doc_id: KEY[1]]) | UnorderedPrimaryKeyDistinct()
plan = planner.plan(query);
assertThat(plan, primaryKeyDistinct(coveringIndexScan(textIndexScan(allOf(indexName(TextIndexTestUtils.SIMPLE_DEFAULT_NAME), textComparison(equalTo(comparison2)))))));
assertEquals(1032989149, plan.planHash(PlanHashable.PlanHashKind.LEGACY));
assertEquals(893372281, plan.planHash(PlanHashable.PlanHashKind.FOR_CONTINUATION));
assertEquals(836390780, plan.planHash(PlanHashable.PlanHashKind.STRUCTURAL_WITHOUT_LITERALS));
primaryKeys = recordStore.executeQuery(plan).map(FDBQueriedRecord::getPrimaryKey).map(t -> t.getLong(0)).asList().get();
assertEquals(Arrays.asList(0L, 1L, 2L, 3L), primaryKeys);
// Query for primary key but also have a filter on something outside the index
query = RecordQuery.newBuilder().setRecordType(SIMPLE_DOC).setRequiredResults(Collections.singletonList(field("doc_id"))).setFilter(Query.and(filter1, Query.field("group").equalsValue(0L))).build();
// Covering(TextIndex(SimpleDocument$text null, TEXT_CONTAINS_PREFIX [th], null) -> [doc_id: KEY[1]]) | UnorderedPrimaryKeyDistinct()
plan = planner.plan(query);
assertThat(plan, filter(Query.field("group").equalsValue(0L), textIndexScan(allOf(indexName(TextIndexTestUtils.SIMPLE_DEFAULT_NAME), textComparison(equalTo(comparison1))))));
assertEquals(-1328921799, plan.planHash(PlanHashable.PlanHashKind.LEGACY));
assertEquals(390154904, plan.planHash(PlanHashable.PlanHashKind.FOR_CONTINUATION));
assertEquals(-611539723, plan.planHash(PlanHashable.PlanHashKind.STRUCTURAL_WITHOUT_LITERALS));
primaryKeys = recordStore.executeQuery(plan).map(FDBQueriedRecord::getPrimaryKey).map(t -> t.getLong(0)).asList().get();
assertEquals(Collections.singletonList(2L), primaryKeys);
query = RecordQuery.newBuilder().setRecordType(SIMPLE_DOC).setRequiredResults(Collections.singletonList(field("doc_id"))).setFilter(Query.and(filter2, Query.field("group").equalsValue(0L))).build();
// TextIndex(SimpleDocument$text null, TEXT_CONTAINS_PHRASE civil blood makes civil hands unclean, null) | group EQUALS 0
plan = planner.plan(query);
System.out.println(plan.planHash(PlanHashable.PlanHashKind.LEGACY));
System.out.println(plan.planHash(PlanHashable.PlanHashKind.FOR_CONTINUATION));
System.out.println(plan.planHash(PlanHashable.PlanHashKind.STRUCTURAL_WITHOUT_LITERALS));
assertThat(plan, filter(Query.field("group").equalsValue(0L), fetch(primaryKeyDistinct(coveringIndexScan(textIndexScan(allOf(indexName(TextIndexTestUtils.SIMPLE_DEFAULT_NAME), textComparison(equalTo(comparison2)))))))));
assertEquals(792432470, plan.planHash(PlanHashable.PlanHashKind.LEGACY));
assertEquals(-879354804, plan.planHash(PlanHashable.PlanHashKind.FOR_CONTINUATION));
assertEquals(-545069279, plan.planHash(PlanHashable.PlanHashKind.STRUCTURAL_WITHOUT_LITERALS));
primaryKeys = recordStore.executeQuery(plan).map(FDBQueriedRecord::getPrimaryKey).map(t -> t.getLong(0)).asList().get();
assertEquals(Arrays.asList(0L, 2L), primaryKeys);
// Query for the text field, which produces the first token that matches
// Arguably, this should produce an error, but that requires a more sophisticated
// check when trying to determine if the index covers the query
final Descriptors.FieldDescriptor docIdDescriptor = SimpleDocument.getDescriptor().findFieldByNumber(SimpleDocument.DOC_ID_FIELD_NUMBER);
final Descriptors.FieldDescriptor textDescriptor = SimpleDocument.getDescriptor().findFieldByNumber(SimpleDocument.TEXT_FIELD_NUMBER);
query = RecordQuery.newBuilder().setRecordType(SIMPLE_DOC).setRequiredResults(Collections.singletonList(field("text"))).setFilter(filter1).build();
// Fetch(Covering(TextIndex(SimpleDocument$text null, TEXT_CONTAINS_PREFIX [th], null) -> [doc_id: KEY[1]]) | UnorderedPrimaryKeyDistinct()) | group EQUALS 0
plan = planner.plan(query);
assertThat(plan, textIndexScan(allOf(indexName(TextIndexTestUtils.SIMPLE_DEFAULT_NAME), textComparison(equalTo(comparison1)))));
assertEquals(814602491, plan.planHash(PlanHashable.PlanHashKind.LEGACY));
assertEquals(1101247748, plan.planHash(PlanHashable.PlanHashKind.FOR_CONTINUATION));
assertEquals(-1215587201, plan.planHash(PlanHashable.PlanHashKind.STRUCTURAL_WITHOUT_LITERALS));
List<Tuple> idTextTuples = recordStore.executeQuery(plan).map(record -> {
final Object docId = record.getRecord().getField(docIdDescriptor);
final Object text = record.getRecord().getField(textDescriptor);
return Tuple.from(docId, text);
}).asList().get();
assertEquals(Collections.singletonList(Tuple.from(2L, TextSamples.ROMEO_AND_JULIET_PROLOGUE)), idTextTuples);
query = RecordQuery.newBuilder().setRecordType(SIMPLE_DOC).setRequiredResults(Collections.singletonList(field("text"))).setFilter(filter2).build();
// TextIndex(SimpleDocument$text null, TEXT_CONTAINS_PHRASE civil blood makes civil hands unclean, null)
plan = planner.plan(query);
assertThat(plan, fetch(primaryKeyDistinct(coveringIndexScan(textIndexScan(allOf(indexName(TextIndexTestUtils.SIMPLE_DEFAULT_NAME), textComparison(equalTo(comparison2))))))));
assertEquals(-1359010536, plan.planHash(PlanHashable.PlanHashKind.LEGACY));
assertEquals(-1017914160, plan.planHash(PlanHashable.PlanHashKind.FOR_CONTINUATION));
assertEquals(-1074895661, plan.planHash(PlanHashable.PlanHashKind.STRUCTURAL_WITHOUT_LITERALS));
idTextTuples = recordStore.executeQuery(plan).map(record -> {
final Object docId = record.getRecord().getField(docIdDescriptor);
final Object text = record.getRecord().getField(textDescriptor);
return Tuple.from(docId, text);
}).asList().get();
assertEquals(Arrays.asList(Tuple.from(0L, TextSamples.ANGSTROM), Tuple.from(1L, TextSamples.AETHELRED), Tuple.from(2L, TextSamples.ROMEO_AND_JULIET_PROLOGUE), Tuple.from(3L, TextSamples.FRENCH)), idTextTuples);
commit(context);
}
}
use of com.apple.foundationdb.record.query.expressions.QueryComponent in project fdb-record-layer by FoundationDB.
the class TextIndexTest method queryMapDocuments.
@Test
public void queryMapDocuments() throws Exception {
final List<String> textSamples = Arrays.asList(TextSamples.ROMEO_AND_JULIET_PROLOGUE, TextSamples.AETHELRED, TextSamples.ROMEO_AND_JULIET_PROLOGUE, TextSamples.ANGSTROM, TextSamples.AETHELRED, TextSamples.FRENCH);
final List<MapDocument> documents = IntStream.range(0, textSamples.size() / 2).mapToObj(i -> MapDocument.newBuilder().setDocId(i).addEntry(MapDocument.Entry.newBuilder().setKey("a").setValue(textSamples.get(i * 2)).build()).addEntry(MapDocument.Entry.newBuilder().setKey("b").setValue(textSamples.get(i * 2 + 1)).build()).setGroup(i % 2).build()).collect(Collectors.toList());
try (FDBRecordContext context = openContext()) {
openRecordStore(context, metaDataBuilder -> metaDataBuilder.addIndex(MAP_DOC, MAP_ON_VALUE_INDEX));
documents.forEach(recordStore::saveRecord);
assertEquals(Collections.singletonList(2L), queryMapDocumentsWithIndex("a", Query.field("value").text().containsAny("king unknown_token"), 1059912699, true));
assertEquals(Arrays.asList(0L, 1L), queryMapDocumentsWithIndex("a", Query.field("value").text().containsPhrase("civil blood makes civil hands unclean"), 1085034960, true));
assertEquals(Collections.emptyList(), queryMapDocumentsWithIndex("b", Query.field("value").text().containsPhrase("civil blood makes civil hands unclean"), 1085034991, true));
assertEquals(Arrays.asList(1L, 2L), queryMapDocumentsWithIndex("b", Query.field("value").text().containsPrefix("na"), 1125182095, true));
assertEquals(Arrays.asList(0L, 1L), queryMapDocumentsWithIndex("a", Query.field("value").text().containsAllPrefixes("civ mut ha"), 0, false));
assertEquals(Arrays.asList(1L, 2L), queryMapDocumentsWithIndex("b", Query.field("value").text().containsAnyPrefix("civ mut na"), 0, true));
RecordQuery queryWithAdditionalFilter = RecordQuery.newBuilder().setRecordType(MAP_DOC).setFilter(Query.and(Query.field("group").equalsValue(0L), Query.field("entry").oneOfThem().matches(Query.and(Query.field("key").equalsValue("b"), Query.field("value").text().containsAny("anders king"))))).build();
RecordQueryPlan planWithAdditionalFilter = recordStore.planQuery(queryWithAdditionalFilter);
assertThat(planWithAdditionalFilter, filter(Query.field("group").equalsValue(0L), descendant(textIndexScan(anything()))));
List<Long> queryResults = recordStore.executeQuery(planWithAdditionalFilter).map(FDBQueriedRecord::getPrimaryKey).map(tuple -> tuple.getLong(0)).asList().join();
assertEquals(Collections.singletonList(0L), queryResults);
queryWithAdditionalFilter = RecordQuery.newBuilder().setRecordType(MAP_DOC).setFilter(Query.or(Query.field("entry").oneOfThem().matches(Query.and(Query.field("key").equalsValue("a"), Query.field("value").text().containsPhrase("bury their parents strife"))), Query.field("entry").oneOfThem().matches(Query.and(Query.field("key").equalsValue("b"), Query.field("value").text().containsPrefix("th"))))).build();
planWithAdditionalFilter = recordStore.planQuery(queryWithAdditionalFilter);
assertThat(planWithAdditionalFilter, primaryKeyDistinct(unorderedUnion(descendant(textIndexScan(indexName(equalTo(MAP_ON_VALUE_INDEX.getName())))), descendant(textIndexScan(indexName(equalTo(MAP_ON_VALUE_INDEX.getName())))))));
queryResults = recordStore.executeQuery(planWithAdditionalFilter).map(FDBQueriedRecord::getPrimaryKey).map(tuple -> tuple.getLong(0)).asList().join();
assertEquals(3, queryResults.size());
assertEquals(ImmutableSet.of(0L, 1L, 2L), ImmutableSet.copyOf(queryResults));
// Planner bug that can happen with certain malformed queries. This plan actually
// returns records where the key and the value match in the same entry, but it is
// asking for all records where *any* entry has a key matching "a" and *any* entry
// has a value matching the text predicate. In reality, this is probably a sign
// the user didn't input their query correctly, but it requires more work from the
// planner not to plan this kind of query.
// FIXME: Full Text: The Planner doesn't always correctly handle ands with nesteds (https://github.com/FoundationDB/fdb-record-layer/issues/53)
final QueryComponent malformedMapFilter = Query.and(Query.field("entry").oneOfThem().matches(Query.field("key").equalsValue("a")), Query.field("entry").oneOfThem().matches(Query.field("value").text().containsAll("civil hands unclean")));
RecordQueryPlan malformedMapPlan = planner.plan(RecordQuery.newBuilder().setRecordType(MAP_DOC).setFilter(malformedMapFilter).build());
assertThat(malformedMapPlan, descendant(textIndexScan(allOf(indexName(MAP_ON_VALUE_INDEX.getName()), groupingBounds(allOf(notNullValue(), hasTupleString("[[a],[a]]"))), textComparison(equalTo(new Comparisons.TextComparison(Comparisons.Type.TEXT_CONTAINS_ALL, "civil hands unclean", null, DefaultTextTokenizer.NAME)))))));
commit(context);
}
}
Aggregations