use of org.apache.jackrabbit.oak.spi.query.QueryIndex.IndexPlan in project jackrabbit-oak by apache.
the class LuceneIndexTest method assertFilter.
private static List<String> assertFilter(Filter filter, AdvancedQueryIndex queryIndex, NodeState indexed, List<String> expected) {
List<IndexPlan> plans = queryIndex.getPlans(filter, null, indexed);
Cursor cursor = queryIndex.query(plans.get(0), indexed);
List<String> paths = newArrayList();
while (cursor.hasNext()) {
paths.add(cursor.next().getPath());
}
Collections.sort(paths);
for (String p : expected) {
assertTrue("Expected path " + p + " not found", paths.contains(p));
}
assertEquals("Result set size is different \nExpected: " + expected + "\nActual: " + paths, expected.size(), paths.size());
return paths;
}
use of org.apache.jackrabbit.oak.spi.query.QueryIndex.IndexPlan in project jackrabbit-oak by apache.
the class LuceneIndexTest method testLuceneV1NonExistentProperty.
@Test
public void testLuceneV1NonExistentProperty() throws Exception {
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder defn = newLuceneIndexDefinition(index, "lucene", ImmutableSet.of("String"));
defn.setProperty(LuceneIndexConstants.COMPAT_MODE, IndexFormatVersion.V1.getVersion());
NodeState before = builder.getNodeState();
builder.setProperty("foo", "value-with-dash");
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker = new IndexTracker();
tracker.update(indexed);
AdvancedQueryIndex queryIndex = new LuceneIndex(tracker, null);
FilterImpl filter = createFilter(NT_BASE);
filter.restrictPath("/", Filter.PathRestriction.EXACT);
filter.setFullTextConstraint(FullTextParser.parse("foo", "value-with*"));
List<IndexPlan> plans = queryIndex.getPlans(filter, null, builder.getNodeState());
Cursor cursor = queryIndex.query(plans.get(0), indexed);
assertTrue(cursor.hasNext());
assertEquals("/", cursor.next().getPath());
assertFalse(cursor.hasNext());
//Now perform a query against a field which does not exist
FilterImpl filter2 = createFilter(NT_BASE);
filter2.restrictPath("/", Filter.PathRestriction.EXACT);
filter2.setFullTextConstraint(FullTextParser.parse("baz", "value-with*"));
List<IndexPlan> plans2 = queryIndex.getPlans(filter2, null, builder.getNodeState());
Cursor cursor2 = queryIndex.query(plans2.get(0), indexed);
assertFalse(cursor2.hasNext());
}
use of org.apache.jackrabbit.oak.spi.query.QueryIndex.IndexPlan in project jackrabbit-oak by apache.
the class LuceneIndexTest method indexNodeLockHandling.
@Test
public void indexNodeLockHandling() throws Exception {
tracker = new IndexTracker();
//Create 2 indexes. /oak:index/lucene and /test/oak:index/lucene
//The way LuceneIndexLookup works is. It collect child first and then
//parent
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder nb = newLuceneIndexDefinitionV2(index, "lucene", of(TYPENAME_STRING));
nb.setProperty(LuceneIndexConstants.FULL_TEXT_ENABLED, false);
nb.setProperty(createProperty(INCLUDE_PROPERTY_NAMES, of("foo"), STRINGS));
index = builder.child("test").child(INDEX_DEFINITIONS_NAME);
NodeBuilder nb2 = newLuceneIndexDefinitionV2(index, "lucene", of(TYPENAME_STRING));
nb2.setProperty(LuceneIndexConstants.FULL_TEXT_ENABLED, false);
nb2.setProperty(createProperty(INCLUDE_PROPERTY_NAMES, of("foo"), STRINGS));
NodeState before = builder.getNodeState();
builder.child("test").setProperty("foo", "fox is jumping");
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker.update(indexed);
QueryIndex.AdvancedQueryIndex queryIndex = new LucenePropertyIndex(tracker);
FilterImpl filter = createFilter(NT_BASE);
filter.restrictPath("/test", Filter.PathRestriction.EXACT);
filter.restrictProperty("foo", Operator.EQUAL, PropertyValues.newString("bar"));
builder = indexed.builder();
NodeBuilder dir = builder.child("oak:index").child("lucene").child(":data");
//Mutate the blob to fail on access i.e. create corrupt index
List<Blob> blobs = new ArrayList<Blob>();
Blob b = dir.child("segments_1").getProperty(JCR_DATA).getValue(Type.BINARY, 0);
FailingBlob fb = new FailingBlob(IOUtils.toByteArray(b.getNewStream()));
blobs.add(fb);
dir.child("segments_1").setProperty(JCR_DATA, blobs, BINARIES);
indexed = builder.getNodeState();
tracker.update(indexed);
List<IndexPlan> list = queryIndex.getPlans(filter, null, indexed);
assertEquals("There must be only one plan", 1, list.size());
IndexPlan plan = list.get(0);
assertEquals("Didn't get the expected plan", "/test/oak:index/lucene", plan.getPlanName());
}
use of org.apache.jackrabbit.oak.spi.query.QueryIndex.IndexPlan in project jackrabbit-oak by apache.
the class LuceneIndexTest method assertFilter.
private static List<String> assertFilter(Filter filter, AdvancedQueryIndex queryIndex, NodeState indexed, List<String> expected, boolean ordered) {
if (!ordered) {
return assertFilter(filter, queryIndex, indexed, expected);
}
List<IndexPlan> plans = queryIndex.getPlans(filter, null, indexed);
Cursor cursor = queryIndex.query(plans.get(0), indexed);
List<String> paths = newArrayList();
while (cursor.hasNext()) {
paths.add(cursor.next().getPath());
}
for (String p : expected) {
assertTrue("Expected path " + p + " not found", paths.contains(p));
}
assertEquals("Result set size is different", expected.size(), paths.size());
return paths;
}
use of org.apache.jackrabbit.oak.spi.query.QueryIndex.IndexPlan in project jackrabbit-oak by apache.
the class LuceneIndexTest method testCursorStability.
@Test
public void testCursorStability() throws Exception {
NodeBuilder index = newLucenePropertyIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "lucene", ImmutableSet.of("foo"), null);
NodeBuilder rules = index.child(INDEX_RULES);
NodeBuilder fooProp = rules.child("nt:base").child(LuceneIndexConstants.PROP_NODE).child("foo");
fooProp.setProperty(LuceneIndexConstants.PROP_PROPERTY_INDEX, true);
//1. Create 60 nodes
NodeState before = builder.getNodeState();
int noOfDocs = LucenePropertyIndex.LUCENE_QUERY_BATCH_SIZE + 10;
for (int i = 0; i < noOfDocs; i++) {
builder.child("a" + i).setProperty("foo", (long) i);
}
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker = new IndexTracker();
tracker.update(indexed);
//Perform query and get hold of cursor
AdvancedQueryIndex queryIndex = new LucenePropertyIndex(tracker);
FilterImpl filter = createFilter(NT_BASE);
filter.restrictProperty("foo", Operator.GREATER_OR_EQUAL, PropertyValues.newLong(0L));
List<IndexPlan> plans = queryIndex.getPlans(filter, null, indexed);
Cursor cursor = queryIndex.query(plans.get(0), indexed);
//Trigger loading of cursor
assertTrue(cursor.hasNext());
//Now before traversing further go ahead and delete all but 10 nodes
before = indexed;
builder = indexed.builder();
for (int i = 0; i < noOfDocs - 10; i++) {
builder.child("a" + i).remove();
}
after = builder.getNodeState();
indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
builder = indexed.builder();
//Ensure that Lucene actually removes deleted docs
NodeBuilder idx = builder.child(INDEX_DEFINITIONS_NAME).child("lucene");
purgeDeletedDocs(idx, new IndexDefinition(root, idx.getNodeState(), "/foo"));
int numDeletes = getDeletedDocCount(idx, new IndexDefinition(root, idx.getNodeState(), "/foo"));
Assert.assertEquals(0, numDeletes);
//Update the IndexSearcher
tracker.update(builder.getNodeState());
//its hard to get correct size estimate as post deletion cursor
// would have already picked up 50 docs which would not be considered
//deleted by QE for the revision at which query was triggered
//So just checking for >
List<String> resultPaths = Lists.newArrayList();
while (cursor.hasNext()) {
resultPaths.add(cursor.next().getPath());
}
Set<String> uniquePaths = Sets.newHashSet(resultPaths);
assertEquals(resultPaths.size(), uniquePaths.size());
assertTrue(!uniquePaths.isEmpty());
}
Aggregations