use of org.apache.jackrabbit.oak.query.index.FilterImpl in project jackrabbit-oak by apache.
the class LuceneIndexTest method testLuceneV1NonExistentProperty.
@Test
public void testLuceneV1NonExistentProperty() throws Exception {
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder defn = newLuceneIndexDefinition(index, "lucene", ImmutableSet.of("String"));
defn.setProperty(LuceneIndexConstants.COMPAT_MODE, IndexFormatVersion.V1.getVersion());
NodeState before = builder.getNodeState();
builder.setProperty("foo", "value-with-dash");
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker = new IndexTracker();
tracker.update(indexed);
AdvancedQueryIndex queryIndex = new LuceneIndex(tracker, null);
FilterImpl filter = createFilter(NT_BASE);
filter.restrictPath("/", Filter.PathRestriction.EXACT);
filter.setFullTextConstraint(FullTextParser.parse("foo", "value-with*"));
List<IndexPlan> plans = queryIndex.getPlans(filter, null, builder.getNodeState());
Cursor cursor = queryIndex.query(plans.get(0), indexed);
assertTrue(cursor.hasNext());
assertEquals("/", cursor.next().getPath());
assertFalse(cursor.hasNext());
//Now perform a query against a field which does not exist
FilterImpl filter2 = createFilter(NT_BASE);
filter2.restrictPath("/", Filter.PathRestriction.EXACT);
filter2.setFullTextConstraint(FullTextParser.parse("baz", "value-with*"));
List<IndexPlan> plans2 = queryIndex.getPlans(filter2, null, builder.getNodeState());
Cursor cursor2 = queryIndex.query(plans2.get(0), indexed);
assertFalse(cursor2.hasNext());
}
use of org.apache.jackrabbit.oak.query.index.FilterImpl in project jackrabbit-oak by apache.
the class LuceneIndexTest method analyzerWithStopWords.
@Test
public void analyzerWithStopWords() throws Exception {
NodeBuilder nb = newLuceneIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "lucene", of(TYPENAME_STRING));
TestUtil.useV2(nb);
NodeState before = builder.getNodeState();
builder.setProperty("foo", "fox jumping");
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker = new IndexTracker();
tracker.update(indexed);
AdvancedQueryIndex queryIndex = new LucenePropertyIndex(tracker);
FilterImpl filter = createFilter("nt:base");
filter.setFullTextConstraint(new FullTextTerm(null, "fox jumping", false, false, null));
assertFilter(filter, queryIndex, indexed, ImmutableList.of("/"));
//No stop word configured so default analyzer would also check for 'was'
filter.setFullTextConstraint(new FullTextTerm(null, "fox was jumping", false, false, null));
assertFilter(filter, queryIndex, indexed, Collections.<String>emptyList());
//Change the default analyzer to use the default stopword set
//and trigger a reindex such that new analyzer is used
NodeBuilder anlnb = nb.child(ANALYZERS).child(ANL_DEFAULT);
anlnb.child(ANL_TOKENIZER).setProperty(ANL_NAME, "whitespace");
anlnb.child(ANL_FILTERS).child("stop");
nb.setProperty(IndexConstants.REINDEX_PROPERTY_NAME, true);
before = after;
after = builder.getNodeState();
indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker.update(indexed);
queryIndex = new LucenePropertyIndex(tracker);
filter.setFullTextConstraint(new FullTextTerm(null, "fox jumping", false, false, null));
assertFilter(filter, queryIndex, indexed, ImmutableList.of("/"));
//Now this should get passed as the analyzer would ignore 'was'
filter.setFullTextConstraint(new FullTextTerm(null, "fox was jumping", false, false, null));
assertFilter(filter, queryIndex, indexed, ImmutableList.of("/"));
}
use of org.apache.jackrabbit.oak.query.index.FilterImpl in project jackrabbit-oak by apache.
the class LuceneIndexTest method testPropertyNonExistence.
@Test
public void testPropertyNonExistence() throws Exception {
root = TestUtil.registerTestNodeType(builder).getNodeState();
NodeBuilder index = newLucenePropertyIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "lucene", ImmutableSet.of("foo"), null);
NodeBuilder rules = index.child(INDEX_RULES);
NodeBuilder propNode = rules.child(NT_TEST).child(LuceneIndexConstants.PROP_NODE);
NodeBuilder fooProp = propNode.child("foo");
fooProp.setProperty(LuceneIndexConstants.PROP_PROPERTY_INDEX, true);
fooProp.setProperty(LuceneIndexConstants.PROP_NULL_CHECK_ENABLED, true);
NodeState before = builder.getNodeState();
createNodeWithType(builder, "a", NT_TEST).setProperty("foo", "bar");
createNodeWithType(builder, "b", NT_TEST).setProperty("foo", "bar");
createNodeWithType(builder, "c", NT_TEST);
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker = new IndexTracker();
tracker.update(indexed);
AdvancedQueryIndex queryIndex = new LucenePropertyIndex(tracker);
FilterImpl filter = createFilter(NT_TEST);
filter.restrictProperty("foo", Operator.EQUAL, null);
assertFilter(filter, queryIndex, indexed, ImmutableList.of("/c"));
}
use of org.apache.jackrabbit.oak.query.index.FilterImpl in project jackrabbit-oak by apache.
the class LuceneIndexTest method indexNodeLockHandling.
@Test
public void indexNodeLockHandling() throws Exception {
tracker = new IndexTracker();
//Create 2 indexes. /oak:index/lucene and /test/oak:index/lucene
//The way LuceneIndexLookup works is. It collect child first and then
//parent
NodeBuilder index = builder.child(INDEX_DEFINITIONS_NAME);
NodeBuilder nb = newLuceneIndexDefinitionV2(index, "lucene", of(TYPENAME_STRING));
nb.setProperty(LuceneIndexConstants.FULL_TEXT_ENABLED, false);
nb.setProperty(createProperty(INCLUDE_PROPERTY_NAMES, of("foo"), STRINGS));
index = builder.child("test").child(INDEX_DEFINITIONS_NAME);
NodeBuilder nb2 = newLuceneIndexDefinitionV2(index, "lucene", of(TYPENAME_STRING));
nb2.setProperty(LuceneIndexConstants.FULL_TEXT_ENABLED, false);
nb2.setProperty(createProperty(INCLUDE_PROPERTY_NAMES, of("foo"), STRINGS));
NodeState before = builder.getNodeState();
builder.child("test").setProperty("foo", "fox is jumping");
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker.update(indexed);
QueryIndex.AdvancedQueryIndex queryIndex = new LucenePropertyIndex(tracker);
FilterImpl filter = createFilter(NT_BASE);
filter.restrictPath("/test", Filter.PathRestriction.EXACT);
filter.restrictProperty("foo", Operator.EQUAL, PropertyValues.newString("bar"));
builder = indexed.builder();
NodeBuilder dir = builder.child("oak:index").child("lucene").child(":data");
//Mutate the blob to fail on access i.e. create corrupt index
List<Blob> blobs = new ArrayList<Blob>();
Blob b = dir.child("segments_1").getProperty(JCR_DATA).getValue(Type.BINARY, 0);
FailingBlob fb = new FailingBlob(IOUtils.toByteArray(b.getNewStream()));
blobs.add(fb);
dir.child("segments_1").setProperty(JCR_DATA, blobs, BINARIES);
indexed = builder.getNodeState();
tracker.update(indexed);
List<IndexPlan> list = queryIndex.getPlans(filter, null, indexed);
assertEquals("There must be only one plan", 1, list.size());
IndexPlan plan = list.get(0);
assertEquals("Didn't get the expected plan", "/test/oak:index/lucene", plan.getPlanName());
}
use of org.apache.jackrabbit.oak.query.index.FilterImpl in project jackrabbit-oak by apache.
the class LuceneIndexTest method testCursorStability.
@Test
public void testCursorStability() throws Exception {
NodeBuilder index = newLucenePropertyIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "lucene", ImmutableSet.of("foo"), null);
NodeBuilder rules = index.child(INDEX_RULES);
NodeBuilder fooProp = rules.child("nt:base").child(LuceneIndexConstants.PROP_NODE).child("foo");
fooProp.setProperty(LuceneIndexConstants.PROP_PROPERTY_INDEX, true);
//1. Create 60 nodes
NodeState before = builder.getNodeState();
int noOfDocs = LucenePropertyIndex.LUCENE_QUERY_BATCH_SIZE + 10;
for (int i = 0; i < noOfDocs; i++) {
builder.child("a" + i).setProperty("foo", (long) i);
}
NodeState after = builder.getNodeState();
NodeState indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
tracker = new IndexTracker();
tracker.update(indexed);
//Perform query and get hold of cursor
AdvancedQueryIndex queryIndex = new LucenePropertyIndex(tracker);
FilterImpl filter = createFilter(NT_BASE);
filter.restrictProperty("foo", Operator.GREATER_OR_EQUAL, PropertyValues.newLong(0L));
List<IndexPlan> plans = queryIndex.getPlans(filter, null, indexed);
Cursor cursor = queryIndex.query(plans.get(0), indexed);
//Trigger loading of cursor
assertTrue(cursor.hasNext());
//Now before traversing further go ahead and delete all but 10 nodes
before = indexed;
builder = indexed.builder();
for (int i = 0; i < noOfDocs - 10; i++) {
builder.child("a" + i).remove();
}
after = builder.getNodeState();
indexed = HOOK.processCommit(before, after, CommitInfo.EMPTY);
builder = indexed.builder();
//Ensure that Lucene actually removes deleted docs
NodeBuilder idx = builder.child(INDEX_DEFINITIONS_NAME).child("lucene");
purgeDeletedDocs(idx, new IndexDefinition(root, idx.getNodeState(), "/foo"));
int numDeletes = getDeletedDocCount(idx, new IndexDefinition(root, idx.getNodeState(), "/foo"));
Assert.assertEquals(0, numDeletes);
//Update the IndexSearcher
tracker.update(builder.getNodeState());
//its hard to get correct size estimate as post deletion cursor
// would have already picked up 50 docs which would not be considered
//deleted by QE for the revision at which query was triggered
//So just checking for >
List<String> resultPaths = Lists.newArrayList();
while (cursor.hasNext()) {
resultPaths.add(cursor.next().getPath());
}
Set<String> uniquePaths = Sets.newHashSet(resultPaths);
assertEquals(resultPaths.size(), uniquePaths.size());
assertTrue(!uniquePaths.isEmpty());
}
Aggregations