use of com.apple.foundationdb.record.TestRecordsTextProto.SimpleDocument in project fdb-record-layer by FoundationDB.
the class TextIndexTest method saveCombinedByGroup.
@Test
public void saveCombinedByGroup() throws Exception {
final SimpleDocument simpleDocument = SimpleDocument.newBuilder().setGroup(0).setDocId(1907L).setText(TextSamples.ANGSTROM).build();
final ComplexDocument complexDocument = ComplexDocument.newBuilder().setGroup(0).setDocId(966L).setText(TextSamples.AETHELRED).build();
try (FDBRecordContext context = openContext()) {
openRecordStore(context, metaDataBuilder -> {
metaDataBuilder.removeIndex(TextIndexTestUtils.SIMPLE_DEFAULT_NAME);
metaDataBuilder.addMultiTypeIndex(Arrays.asList(metaDataBuilder.getRecordType(COMPLEX_DOC), metaDataBuilder.getRecordType(SIMPLE_DOC)), COMBINED_TEXT_BY_GROUP);
});
recordStore.saveRecord(simpleDocument);
int firstKeys = getSaveIndexKeyCount(recordStore);
assertEquals(8, firstKeys);
recordStore.saveRecord(complexDocument);
int secondKeys = getSaveIndexKeyCount(recordStore) - firstKeys;
assertEquals(11, secondKeys);
List<Map.Entry<Tuple, List<Integer>>> entryList = scanMapEntries(recordStore, COMBINED_TEXT_BY_GROUP, Tuple.from(0, "was"));
assertEquals(Arrays.asList(entryOf(Tuple.from(0L, 966L), Collections.singletonList(7)), entryOf(Tuple.from(1907L), Collections.singletonList(4))), entryList);
commit(context);
}
}
use of com.apple.foundationdb.record.TestRecordsTextProto.SimpleDocument in project fdb-record-layer by FoundationDB.
the class TextIndexTest method tokenizerVersionChange.
@Test
public void tokenizerVersionChange() throws Exception {
final SimpleDocument shakespeareDocument = SimpleDocument.newBuilder().setDocId(1623L).setText(TextSamples.ROMEO_AND_JULIET_PROLOGUE).build();
final SimpleDocument aethelredDocument1 = SimpleDocument.newBuilder().setDocId(966L).setText(TextSamples.AETHELRED).build();
final SimpleDocument aethelredDocument2 = SimpleDocument.newBuilder().setDocId(1016L).setText(TextSamples.AETHELRED).build();
try (FDBRecordContext context = openContext()) {
// Use a version of the prefix filter that only keeps first 3 letters
openRecordStore(context, metaDataBuilder -> {
metaDataBuilder.removeIndex(TextIndexTestUtils.SIMPLE_DEFAULT_NAME);
metaDataBuilder.addIndex(SIMPLE_DOC, SIMPLE_TEXT_PREFIX_LEGACY);
});
recordStore.saveRecord(shakespeareDocument);
recordStore.saveRecord(aethelredDocument1);
List<Map.Entry<Tuple, List<Integer>>> scannedEntries = scanMapEntries(recordStore, SIMPLE_TEXT_PREFIX_LEGACY, Tuple.from("the"));
assertEquals(Arrays.asList(entryOf(Tuple.from(966L), Arrays.asList(2, 5)), entryOf(Tuple.from(1623L), Arrays.asList(30, 34, 44, 53, 56, 59, 63, 68, 71, 76, 85, 92))), scannedEntries);
assertEquals(Arrays.asList(Pair.of(Tuple.from(966L), 0), Pair.of(Tuple.from(1623L), 0)), scanTokenizerVersions(recordStore, SIMPLE_TEXT_PREFIX_LEGACY));
commit(context);
}
try (FDBRecordContext context = openContext()) {
// Use a version of the prefix filter that keeps the first 4 letters instead
openRecordStore(context, metaDataBuilder -> {
metaDataBuilder.removeIndex(TextIndexTestUtils.SIMPLE_DEFAULT_NAME);
metaDataBuilder.addIndex(SIMPLE_DOC, SIMPLE_TEXT_PREFIX);
});
// check saving new document
recordStore.saveRecord(aethelredDocument2);
List<Map.Entry<Tuple, List<Integer>>> scannedEntries = toMapEntries(scanIndex(recordStore, SIMPLE_TEXT_PREFIX, TupleRange.prefixedBy("enc")), TupleHelpers.EMPTY);
assertEquals(Arrays.asList(entryOf(Tuple.from("enc", 966L), Collections.singletonList(3)), entryOf(Tuple.from("ency", 1016L), Collections.singletonList(3))), scannedEntries);
assertEquals(Arrays.asList(Pair.of(Tuple.from(966L), 0), Pair.of(Tuple.from(1016L), 1), Pair.of(Tuple.from(1623L), 0)), scanTokenizerVersions(recordStore, SIMPLE_TEXT_PREFIX));
// check document is re-indexed
recordStore.saveRecord(aethelredDocument1);
scannedEntries = scanMapEntries(recordStore, SIMPLE_TEXT_PREFIX, Tuple.from("ency"));
assertEquals(Arrays.asList(entryOf(Tuple.from(966L), Collections.singletonList(3)), entryOf(Tuple.from(1016L), Collections.singletonList(3))), scannedEntries);
scannedEntries = scanMapEntries(recordStore, SIMPLE_TEXT_PREFIX, Tuple.from("enc"));
assertEquals(Collections.emptyList(), scannedEntries);
assertEquals(Arrays.asList(Pair.of(Tuple.from(966L), 1), Pair.of(Tuple.from(1016L), 1), Pair.of(Tuple.from(1623L), 0)), scanTokenizerVersions(recordStore, SIMPLE_TEXT_PREFIX));
// check document that matches tokenizer version is *not* re-indexed
int beforeSaveKeys = getSaveIndexKeyCount(recordStore);
recordStore.saveRecord(aethelredDocument1);
int afterSaveKeys = getSaveIndexKeyCount(recordStore);
assertEquals(afterSaveKeys, beforeSaveKeys);
assertEquals(Arrays.asList(Pair.of(Tuple.from(966L), 1), Pair.of(Tuple.from(1016L), 1), Pair.of(Tuple.from(1623L), 0)), scanTokenizerVersions(recordStore, SIMPLE_TEXT_PREFIX));
// check old index entries are the ones deleted
scannedEntries = scanMapEntries(recordStore, SIMPLE_TEXT_PREFIX, Tuple.from("civ"));
assertEquals(Collections.singletonList(entryOf(Tuple.from(1623L), Arrays.asList(22, 25))), scannedEntries);
recordStore.deleteRecord(Tuple.from(1623L));
scannedEntries = scanMapEntries(recordStore, SIMPLE_TEXT_PREFIX, Tuple.from("civ"));
assertEquals(Collections.emptyList(), scannedEntries);
assertEquals(Arrays.asList(Pair.of(Tuple.from(966L), 1), Pair.of(Tuple.from(1016L), 1)), scanTokenizerVersions(recordStore, SIMPLE_TEXT_PREFIX));
commit(context);
}
}
use of com.apple.foundationdb.record.TestRecordsTextProto.SimpleDocument in project fdb-record-layer by FoundationDB.
the class TextIndexTest method saveSimpleWithAggressiveConflictRanges.
@Test
public void saveSimpleWithAggressiveConflictRanges() throws Exception {
// These two documents are from different languages and thus have no conflicts, so
// without the aggressive conflict ranges, they wouldn't conflict if not
// for the aggressive conflict ranges
final SimpleDocument shakespeareDocument = SimpleDocument.newBuilder().setDocId(1623L).setGroup(0).setText(TextSamples.ROMEO_AND_JULIET_PROLOGUE).build();
final SimpleDocument yiddishDocument = SimpleDocument.newBuilder().setDocId(1945L).setGroup(0).setText(TextSamples.YIDDISH).build();
final RecordMetaDataHook hook = metaDataBuilder -> {
final Index oldIndex = metaDataBuilder.getIndex(TextIndexTestUtils.SIMPLE_DEFAULT_NAME);
metaDataBuilder.removeIndex(TextIndexTestUtils.SIMPLE_DEFAULT_NAME);
final Index newIndex = new Index(TextIndexTestUtils.SIMPLE_DEFAULT_NAME + "-new", oldIndex.getRootExpression(), IndexTypes.TEXT, ImmutableMap.of(IndexOptions.TEXT_ADD_AGGRESSIVE_CONFLICT_RANGES_OPTION, "true"));
metaDataBuilder.addIndex(SIMPLE_DOC, newIndex);
};
saveTwoRecordsConcurrently(hook, shakespeareDocument, yiddishDocument, false);
}
Aggregations