use of com.yahoo.prelude.IndexFacts in project vespa by vespa-engine.
the class TokenizerTestCase method testExactMatchTokenizationEndsByColon.
@Test
public void testExactMatchTokenizationEndsByColon() {
Index index1 = new Index("testexact1");
index1.setExact(true, null);
Index index2 = new Index("testexact2");
index2.setExact(true, "()/aa*::*&");
IndexFacts facts = new IndexFacts();
facts.addIndex("testsd", index1);
facts.addIndex("testsd", index2);
Tokenizer tokenizer = new Tokenizer(new SimpleLinguistics());
IndexFacts.Session session = facts.newSession(Collections.emptySet(), Collections.emptySet());
List<?> tokens = tokenizer.tokenize("normal a:b (normal testexact1:!/%#%&+-+ ) testexact2:ho_/&%&/()/aa*::*&b:", session);
assertEquals(new Token(WORD, "normal"), tokens.get(0));
assertEquals(new Token(SPACE, " "), tokens.get(1));
assertEquals(new Token(WORD, "a"), tokens.get(2));
assertEquals(new Token(COLON, ":"), tokens.get(3));
assertEquals(new Token(WORD, "b"), tokens.get(4));
assertEquals(new Token(SPACE, " "), tokens.get(5));
assertEquals(new Token(LBRACE, "("), tokens.get(6));
assertEquals(new Token(WORD, "normal"), tokens.get(7));
assertEquals(new Token(SPACE, " "), tokens.get(8));
assertEquals(new Token(WORD, "testexact1"), tokens.get(9));
assertEquals(new Token(COLON, ":"), tokens.get(10));
assertEquals(new Token(WORD, "!/%#%&+-+"), tokens.get(11));
assertEquals(new Token(SPACE, " "), tokens.get(12));
assertEquals(new Token(RBRACE, ")"), tokens.get(13));
assertEquals(new Token(SPACE, " "), tokens.get(14));
assertEquals(new Token(WORD, "testexact2"), tokens.get(15));
assertEquals(new Token(COLON, ":"), tokens.get(16));
assertEquals(new Token(WORD, "ho_/&%&/"), tokens.get(17));
assertEquals(new Token(WORD, "b"), tokens.get(18));
assertEquals(new Token(COLON, ":"), tokens.get(19));
}
use of com.yahoo.prelude.IndexFacts in project vespa by vespa-engine.
the class TokenizerTestCase method testExactMatchTokenizationWithTerminatorTerminatedByEndOfString.
@Test
public void testExactMatchTokenizationWithTerminatorTerminatedByEndOfString() {
Index index1 = new Index("testexact1");
index1.setExact(true, null);
Index index2 = new Index("testexact2");
index2.setExact(true, "()/aa*::*&");
IndexFacts facts = new IndexFacts();
facts.addIndex("testsd", index1);
facts.addIndex("testsd", index2);
Tokenizer tokenizer = new Tokenizer(new SimpleLinguistics());
IndexFacts.Session session = facts.newSession(Collections.emptySet(), Collections.emptySet());
List<?> tokens = tokenizer.tokenize("normal a:b (normal testexact1:/,%#%&+-+ ) testexact2:ho_/&%&/()/aa*::*", session);
assertEquals(new Token(WORD, "normal"), tokens.get(0));
assertEquals(new Token(SPACE, " "), tokens.get(1));
assertEquals(new Token(WORD, "a"), tokens.get(2));
assertEquals(new Token(COLON, ":"), tokens.get(3));
assertEquals(new Token(WORD, "b"), tokens.get(4));
assertEquals(new Token(SPACE, " "), tokens.get(5));
assertEquals(new Token(LBRACE, "("), tokens.get(6));
assertEquals(new Token(WORD, "normal"), tokens.get(7));
assertEquals(new Token(SPACE, " "), tokens.get(8));
assertEquals(new Token(WORD, "testexact1"), tokens.get(9));
assertEquals(new Token(COLON, ":"), tokens.get(10));
assertEquals(new Token(WORD, "/,%#%&+-+"), tokens.get(11));
assertEquals(new Token(SPACE, " "), tokens.get(12));
assertEquals(new Token(RBRACE, ")"), tokens.get(13));
assertEquals(new Token(SPACE, " "), tokens.get(14));
assertEquals(new Token(WORD, "testexact2"), tokens.get(15));
assertEquals(new Token(COLON, ":"), tokens.get(16));
assertEquals(new Token(WORD, "ho_/&%&/()/aa*::*"), tokens.get(17));
assertTrue(((Token) tokens.get(17)).isSpecial());
}
use of com.yahoo.prelude.IndexFacts in project vespa by vespa-engine.
the class TokenizerTestCase method testExactMatchHeuristics.
@Test
public void testExactMatchHeuristics() {
Index index1 = new Index("testexact1");
index1.setExact(true, null);
Index index2 = new Index("testexact2");
index2.setExact(true, "()/aa*::*&");
IndexFacts indexFacts = new IndexFacts();
indexFacts.addIndex("testsd", index1);
indexFacts.addIndex("testsd", index2);
IndexFacts.Session facts = indexFacts.newSession(Collections.emptySet(), Collections.emptySet());
Tokenizer tokenizer = new Tokenizer(new SimpleLinguistics());
List<?> tokens = tokenizer.tokenize("normal a:b (normal testexact1:foo) testexact2:bar", facts);
assertEquals(new Token(WORD, "normal"), tokens.get(0));
assertEquals(new Token(SPACE, " "), tokens.get(1));
assertEquals(new Token(WORD, "a"), tokens.get(2));
assertEquals(new Token(COLON, ":"), tokens.get(3));
assertEquals(new Token(WORD, "b"), tokens.get(4));
assertEquals(new Token(SPACE, " "), tokens.get(5));
assertEquals(new Token(LBRACE, "("), tokens.get(6));
assertEquals(new Token(WORD, "normal"), tokens.get(7));
assertEquals(new Token(SPACE, " "), tokens.get(8));
assertEquals(new Token(WORD, "testexact1"), tokens.get(9));
assertEquals(new Token(COLON, ":"), tokens.get(10));
assertEquals(new Token(WORD, "foo"), tokens.get(11));
assertEquals(new Token(RBRACE, ")"), tokens.get(12));
assertEquals(new Token(SPACE, " "), tokens.get(13));
assertEquals(new Token(WORD, "testexact2"), tokens.get(14));
assertEquals(new Token(COLON, ":"), tokens.get(15));
assertEquals(new Token(WORD, "bar"), tokens.get(16));
tokens = tokenizer.tokenize("testexact1:a*teens", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "a*teens"), tokens.get(2));
tokens = tokenizer.tokenize("testexact1:foo\"bar", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "foo\"bar"), tokens.get(2));
tokens = tokenizer.tokenize("testexact1:foo!bar", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "foo!bar"), tokens.get(2));
tokens = tokenizer.tokenize("testexact1:foo! ", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "foo"), tokens.get(2));
assertEquals(new Token(EXCLAMATION, "!"), tokens.get(3));
assertEquals(new Token(SPACE, " "), tokens.get(4));
tokens = tokenizer.tokenize("testexact1:foo!! ", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "foo"), tokens.get(2));
assertEquals(new Token(EXCLAMATION, "!"), tokens.get(3));
assertEquals(new Token(EXCLAMATION, "!"), tokens.get(4));
assertEquals(new Token(SPACE, " "), tokens.get(5));
tokens = tokenizer.tokenize("testexact1:foo!100 ", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "foo"), tokens.get(2));
assertEquals(new Token(EXCLAMATION, "!"), tokens.get(3));
assertEquals(new Token(NUMBER, "100"), tokens.get(4));
assertEquals(new Token(SPACE, " "), tokens.get(5));
tokens = tokenizer.tokenize("testexact1:foo*!100 ", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "foo"), tokens.get(2));
assertEquals(new Token(STAR, "*"), tokens.get(3));
assertEquals(new Token(EXCLAMATION, "!"), tokens.get(4));
assertEquals(new Token(NUMBER, "100"), tokens.get(5));
assertEquals(new Token(SPACE, " "), tokens.get(6));
tokens = tokenizer.tokenize("testexact1: *\"foo bar\"*!100 ", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(STAR, "*"), tokens.get(2));
assertEquals(new Token(WORD, "foo bar"), tokens.get(3));
assertEquals(new Token(STAR, "*"), tokens.get(4));
assertEquals(new Token(EXCLAMATION, "!"), tokens.get(5));
assertEquals(new Token(NUMBER, "100"), tokens.get(6));
assertEquals(new Token(SPACE, " "), tokens.get(7));
tokens = tokenizer.tokenize("testexact1: *\"foo bar\"*!100", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(STAR, "*"), tokens.get(2));
assertEquals(new Token(WORD, "foo bar"), tokens.get(3));
assertEquals(new Token(STAR, "*"), tokens.get(4));
assertEquals(new Token(EXCLAMATION, "!"), tokens.get(5));
assertEquals(new Token(NUMBER, "100"), tokens.get(6));
tokens = tokenizer.tokenize("testexact1: *foobar*!100", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(STAR, "*"), tokens.get(2));
assertEquals(new Token(WORD, "foobar"), tokens.get(3));
assertEquals(new Token(STAR, "*"), tokens.get(4));
assertEquals(new Token(EXCLAMATION, "!"), tokens.get(5));
assertEquals(new Token(NUMBER, "100"), tokens.get(6));
tokens = tokenizer.tokenize("testexact1: *foobar*!100!", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(STAR, "*"), tokens.get(2));
assertEquals(new Token(WORD, "foobar*!100"), tokens.get(3));
assertEquals(new Token(EXCLAMATION, "!"), tokens.get(4));
tokens = tokenizer.tokenize("testexact1:foo(bar)", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "foo(bar)"), tokens.get(2));
tokens = tokenizer.tokenize("testexact1:\"foo\"", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "foo"), tokens.get(2));
tokens = tokenizer.tokenize("testexact1: foo", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "foo"), tokens.get(2));
tokens = tokenizer.tokenize("testexact1: \"foo\"", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "foo"), tokens.get(2));
tokens = tokenizer.tokenize("testexact1: \"foo\"", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "foo"), tokens.get(2));
tokens = tokenizer.tokenize("testexact1:vespa testexact2:resolved", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "vespa"), tokens.get(2));
assertEquals(new Token(SPACE, " "), tokens.get(3));
assertEquals(new Token(WORD, "testexact2"), tokens.get(4));
assertEquals(new Token(COLON, ":"), tokens.get(5));
assertEquals(new Token(WORD, "resolved"), tokens.get(6));
tokens = tokenizer.tokenize("testexact1:\"news search\" testexact2:resolved", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "news search"), tokens.get(2));
assertEquals(new Token(SPACE, " "), tokens.get(3));
assertEquals(new Token(WORD, "testexact2"), tokens.get(4));
assertEquals(new Token(COLON, ":"), tokens.get(5));
assertEquals(new Token(WORD, "resolved"), tokens.get(6));
tokens = tokenizer.tokenize("(testexact1:\"news search\" testexact1:vespa)", facts);
assertEquals(new Token(LBRACE, "("), tokens.get(0));
assertEquals(new Token(WORD, "testexact1"), tokens.get(1));
assertEquals(new Token(COLON, ":"), tokens.get(2));
assertEquals(new Token(WORD, "news search"), tokens.get(3));
assertEquals(new Token(SPACE, " "), tokens.get(4));
assertEquals(new Token(WORD, "testexact1"), tokens.get(5));
assertEquals(new Token(COLON, ":"), tokens.get(6));
assertEquals(new Token(WORD, "vespa"), tokens.get(7));
assertEquals(new Token(RBRACE, ")"), tokens.get(8));
tokens = tokenizer.tokenize("testexact1:news*", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "news"), tokens.get(2));
assertEquals(new Token(STAR, "*"), tokens.get(3));
tokens = tokenizer.tokenize("testexact1:\"news\"*", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "news"), tokens.get(2));
assertEquals(new Token(STAR, "*"), tokens.get(3));
tokens = tokenizer.tokenize("testexact1:\"news search\"!200", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "news search"), tokens.get(2));
assertEquals(new Token(EXCLAMATION, "!"), tokens.get(3));
assertEquals(new Token(NUMBER, "200"), tokens.get(4));
tokens = tokenizer.tokenize("testexact1:vespa!200", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(WORD, "vespa"), tokens.get(2));
assertEquals(new Token(EXCLAMATION, "!"), tokens.get(3));
assertEquals(new Token(NUMBER, "200"), tokens.get(4));
tokens = tokenizer.tokenize("testexact1:*\"news\"*", facts);
assertEquals(new Token(WORD, "testexact1"), tokens.get(0));
assertEquals(new Token(COLON, ":"), tokens.get(1));
assertEquals(new Token(STAR, "*"), tokens.get(2));
assertEquals(new Token(WORD, "news"), tokens.get(3));
assertEquals(new Token(STAR, "*"), tokens.get(4));
tokens = tokenizer.tokenize("normal(testexact1:foo) testexact2:bar", facts);
assertEquals(new Token(WORD, "normal"), tokens.get(0));
assertEquals(new Token(LBRACE, "("), tokens.get(1));
assertEquals(new Token(WORD, "testexact1"), tokens.get(2));
assertEquals(new Token(COLON, ":"), tokens.get(3));
assertEquals(new Token(WORD, "foo"), tokens.get(4));
assertEquals(new Token(RBRACE, ")"), tokens.get(5));
assertEquals(new Token(SPACE, " "), tokens.get(6));
assertEquals(new Token(WORD, "testexact2"), tokens.get(7));
assertEquals(new Token(COLON, ":"), tokens.get(8));
assertEquals(new Token(WORD, "bar"), tokens.get(9));
tokens = tokenizer.tokenize("normal testexact1:(foo testexact2:bar", facts);
assertEquals(new Token(WORD, "normal"), tokens.get(0));
assertEquals(new Token(SPACE, " "), tokens.get(1));
assertEquals(new Token(WORD, "testexact1"), tokens.get(2));
assertEquals(new Token(COLON, ":"), tokens.get(3));
assertEquals(new Token(WORD, "(foo"), tokens.get(4));
assertEquals(new Token(SPACE, " "), tokens.get(5));
assertEquals(new Token(WORD, "testexact2"), tokens.get(6));
assertEquals(new Token(COLON, ":"), tokens.get(7));
assertEquals(new Token(WORD, "bar"), tokens.get(8));
tokens = tokenizer.tokenize("normal testexact1:foo! testexact2:bar", facts);
assertEquals(new Token(WORD, "normal"), tokens.get(0));
assertEquals(new Token(SPACE, " "), tokens.get(1));
assertEquals(new Token(WORD, "testexact1"), tokens.get(2));
assertEquals(new Token(COLON, ":"), tokens.get(3));
assertEquals(new Token(WORD, "foo"), tokens.get(4));
assertEquals(new Token(EXCLAMATION, "!"), tokens.get(5));
assertEquals(new Token(SPACE, " "), tokens.get(6));
assertEquals(new Token(WORD, "testexact2"), tokens.get(7));
assertEquals(new Token(COLON, ":"), tokens.get(8));
assertEquals(new Token(WORD, "bar"), tokens.get(9));
tokens = tokenizer.tokenize("normal testexact1:foo* testexact2:bar", facts);
assertEquals(new Token(WORD, "normal"), tokens.get(0));
assertEquals(new Token(SPACE, " "), tokens.get(1));
assertEquals(new Token(WORD, "testexact1"), tokens.get(2));
assertEquals(new Token(COLON, ":"), tokens.get(3));
assertEquals(new Token(WORD, "foo"), tokens.get(4));
assertEquals(new Token(STAR, "*"), tokens.get(5));
assertEquals(new Token(SPACE, " "), tokens.get(6));
assertEquals(new Token(WORD, "testexact2"), tokens.get(7));
assertEquals(new Token(COLON, ":"), tokens.get(8));
assertEquals(new Token(WORD, "bar"), tokens.get(9));
tokens = tokenizer.tokenize("normal testexact1: foo* testexact2:bar", facts);
assertEquals(new Token(WORD, "normal"), tokens.get(0));
assertEquals(new Token(SPACE, " "), tokens.get(1));
assertEquals(new Token(WORD, "testexact1"), tokens.get(2));
assertEquals(new Token(COLON, ":"), tokens.get(3));
assertEquals(new Token(WORD, "foo"), tokens.get(4));
assertEquals(new Token(STAR, "*"), tokens.get(5));
assertEquals(new Token(SPACE, " "), tokens.get(6));
assertEquals(new Token(WORD, "testexact2"), tokens.get(7));
assertEquals(new Token(COLON, ":"), tokens.get(8));
assertEquals(new Token(WORD, "bar"), tokens.get(9));
tokens = tokenizer.tokenize("normal testexact1:\" foo\"* testexact2:bar", facts);
assertEquals(new Token(WORD, "normal"), tokens.get(0));
assertEquals(new Token(SPACE, " "), tokens.get(1));
assertEquals(new Token(WORD, "testexact1"), tokens.get(2));
assertEquals(new Token(COLON, ":"), tokens.get(3));
assertEquals(new Token(WORD, " foo"), tokens.get(4));
assertEquals(new Token(STAR, "*"), tokens.get(5));
assertEquals(new Token(SPACE, " "), tokens.get(6));
assertEquals(new Token(WORD, "testexact2"), tokens.get(7));
assertEquals(new Token(COLON, ":"), tokens.get(8));
assertEquals(new Token(WORD, "bar"), tokens.get(9));
}
use of com.yahoo.prelude.IndexFacts in project vespa by vespa-engine.
the class IndexCombinatorTestCase method setUp.
@Before
public void setUp() throws Exception {
transformer = new IndexCombinatorSearcher();
f = new IndexFacts();
f.addIndex("one", "z");
Index i = new Index("default");
i.addCommand("match-group a i");
f.addIndex("one", i);
}
use of com.yahoo.prelude.IndexFacts in project vespa by vespa-engine.
the class ExecutionTestCase method testContextCacheSingleLengthSearchChain.
public void testContextCacheSingleLengthSearchChain() {
IndexFacts[] contextsBefore = new IndexFacts[1];
IndexFacts[] contextsAfter = new IndexFacts[1];
List<Searcher> l = new ArrayList<>(1);
l.add(new ContextCacheSearcher(0, contextsBefore, contextsAfter));
Chain<Searcher> chain = new Chain<>(l);
Query query = new Query("?mutatecontext=0");
new Execution(chain, Execution.Context.createContextStub()).search(query);
assertEquals(contextsBefore[0], contextsAfter[0]);
assertSame(contextsBefore[0], contextsAfter[0]);
}
Aggregations