Search in sources :

Example 6 with PayloadAttribute

use of org.apache.lucene.analysis.tokenattributes.PayloadAttribute in project lucene-solr by apache.

the class SpellingQueryConverter method analyze.

protected void analyze(Collection<Token> result, String text, int offset, int flagsAttValue) throws IOException {
    TokenStream stream = analyzer.tokenStream("", text);
    // TODO: support custom attributes
    CharTermAttribute termAtt = stream.addAttribute(CharTermAttribute.class);
    TypeAttribute typeAtt = stream.addAttribute(TypeAttribute.class);
    PayloadAttribute payloadAtt = stream.addAttribute(PayloadAttribute.class);
    PositionIncrementAttribute posIncAtt = stream.addAttribute(PositionIncrementAttribute.class);
    OffsetAttribute offsetAtt = stream.addAttribute(OffsetAttribute.class);
    stream.reset();
    while (stream.incrementToken()) {
        Token token = new Token();
        token.copyBuffer(termAtt.buffer(), 0, termAtt.length());
        token.setOffset(offset + offsetAtt.startOffset(), offset + offsetAtt.endOffset());
        //overwriting any flags already set...
        token.setFlags(flagsAttValue);
        token.setType(typeAtt.type());
        token.setPayload(payloadAtt.getPayload());
        token.setPositionIncrement(posIncAtt.getPositionIncrement());
        result.add(token);
    }
    stream.end();
    stream.close();
}
Also used : TokenStream(org.apache.lucene.analysis.TokenStream) PayloadAttribute(org.apache.lucene.analysis.tokenattributes.PayloadAttribute) CharTermAttribute(org.apache.lucene.analysis.tokenattributes.CharTermAttribute) TypeAttribute(org.apache.lucene.analysis.tokenattributes.TypeAttribute) OffsetAttribute(org.apache.lucene.analysis.tokenattributes.OffsetAttribute) Token(org.apache.lucene.analysis.Token) PositionIncrementAttribute(org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute)

Example 7 with PayloadAttribute

use of org.apache.lucene.analysis.tokenattributes.PayloadAttribute in project lucene-solr by apache.

the class DelimitedPayloadTokenFilterTest method testIntEncoding.

public void testIntEncoding() throws Exception {
    String test = "The quick|1 red|2 fox|3 jumped over the lazy|5 brown|99 dogs|83";
    DelimitedPayloadTokenFilter filter = new DelimitedPayloadTokenFilter(whitespaceMockTokenizer(test), '|', new IntegerEncoder());
    CharTermAttribute termAtt = filter.getAttribute(CharTermAttribute.class);
    PayloadAttribute payAtt = filter.getAttribute(PayloadAttribute.class);
    filter.reset();
    assertTermEquals("The", filter, termAtt, payAtt, null);
    assertTermEquals("quick", filter, termAtt, payAtt, PayloadHelper.encodeInt(1));
    assertTermEquals("red", filter, termAtt, payAtt, PayloadHelper.encodeInt(2));
    assertTermEquals("fox", filter, termAtt, payAtt, PayloadHelper.encodeInt(3));
    assertTermEquals("jumped", filter, termAtt, payAtt, null);
    assertTermEquals("over", filter, termAtt, payAtt, null);
    assertTermEquals("the", filter, termAtt, payAtt, null);
    assertTermEquals("lazy", filter, termAtt, payAtt, PayloadHelper.encodeInt(5));
    assertTermEquals("brown", filter, termAtt, payAtt, PayloadHelper.encodeInt(99));
    assertTermEquals("dogs", filter, termAtt, payAtt, PayloadHelper.encodeInt(83));
    assertFalse(filter.incrementToken());
    filter.end();
    filter.close();
}
Also used : PayloadAttribute(org.apache.lucene.analysis.tokenattributes.PayloadAttribute) CharTermAttribute(org.apache.lucene.analysis.tokenattributes.CharTermAttribute)

Example 8 with PayloadAttribute

use of org.apache.lucene.analysis.tokenattributes.PayloadAttribute in project lucene-solr by apache.

the class NumericPayloadTokenFilterTest method test.

public void test() throws IOException {
    String test = "The quick red fox jumped over the lazy brown dogs";
    final MockTokenizer input = new MockTokenizer(MockTokenizer.WHITESPACE, false);
    input.setReader(new StringReader(test));
    NumericPayloadTokenFilter nptf = new NumericPayloadTokenFilter(new WordTokenFilter(input), 3, "D");
    boolean seenDogs = false;
    CharTermAttribute termAtt = nptf.getAttribute(CharTermAttribute.class);
    TypeAttribute typeAtt = nptf.getAttribute(TypeAttribute.class);
    PayloadAttribute payloadAtt = nptf.getAttribute(PayloadAttribute.class);
    nptf.reset();
    while (nptf.incrementToken()) {
        if (termAtt.toString().equals("dogs")) {
            seenDogs = true;
            assertTrue(typeAtt.type() + " is not equal to " + "D", typeAtt.type().equals("D") == true);
            assertTrue("payloadAtt.getPayload() is null and it shouldn't be", payloadAtt.getPayload() != null);
            //safe here to just use the bytes, otherwise we should use offset, length
            byte[] bytes = payloadAtt.getPayload().bytes;
            assertTrue(bytes.length + " does not equal: " + payloadAtt.getPayload().length, bytes.length == payloadAtt.getPayload().length);
            assertTrue(payloadAtt.getPayload().offset + " does not equal: " + 0, payloadAtt.getPayload().offset == 0);
            float pay = PayloadHelper.decodeFloat(bytes);
            assertTrue(pay + " does not equal: " + 3, pay == 3);
        } else {
            assertTrue(typeAtt.type() + " is not null and it should be", typeAtt.type().equals("word"));
        }
    }
    assertTrue(seenDogs + " does not equal: " + true, seenDogs == true);
}
Also used : MockTokenizer(org.apache.lucene.analysis.MockTokenizer) PayloadAttribute(org.apache.lucene.analysis.tokenattributes.PayloadAttribute) CharTermAttribute(org.apache.lucene.analysis.tokenattributes.CharTermAttribute) TypeAttribute(org.apache.lucene.analysis.tokenattributes.TypeAttribute) StringReader(java.io.StringReader)

Example 9 with PayloadAttribute

use of org.apache.lucene.analysis.tokenattributes.PayloadAttribute in project lucene-solr by apache.

the class TestDelimitedPayloadTokenFilterFactory method testDelim.

public void testDelim() throws Exception {
    Reader reader = new StringReader("the*0.1 quick*0.1 red*0.1");
    TokenStream stream = new MockTokenizer(MockTokenizer.WHITESPACE, false);
    ((Tokenizer) stream).setReader(reader);
    stream = tokenFilterFactory("DelimitedPayload", "encoder", "float", "delimiter", "*").create(stream);
    stream.reset();
    while (stream.incrementToken()) {
        PayloadAttribute payAttr = stream.getAttribute(PayloadAttribute.class);
        assertNotNull(payAttr);
        byte[] payData = payAttr.getPayload().bytes;
        assertNotNull(payData);
        float payFloat = PayloadHelper.decodeFloat(payData);
        assertEquals(0.1f, payFloat, 0.0f);
    }
    stream.end();
    stream.close();
}
Also used : MockTokenizer(org.apache.lucene.analysis.MockTokenizer) TokenStream(org.apache.lucene.analysis.TokenStream) PayloadAttribute(org.apache.lucene.analysis.tokenattributes.PayloadAttribute) StringReader(java.io.StringReader) StringReader(java.io.StringReader) Reader(java.io.Reader) Tokenizer(org.apache.lucene.analysis.Tokenizer) MockTokenizer(org.apache.lucene.analysis.MockTokenizer)

Example 10 with PayloadAttribute

use of org.apache.lucene.analysis.tokenattributes.PayloadAttribute in project lucene-solr by apache.

the class TestDelimitedPayloadTokenFilterFactory method testEncoder.

public void testEncoder() throws Exception {
    Reader reader = new StringReader("the|0.1 quick|0.1 red|0.1");
    TokenStream stream = new MockTokenizer(MockTokenizer.WHITESPACE, false);
    ((Tokenizer) stream).setReader(reader);
    stream = tokenFilterFactory("DelimitedPayload", "encoder", "float").create(stream);
    stream.reset();
    while (stream.incrementToken()) {
        PayloadAttribute payAttr = stream.getAttribute(PayloadAttribute.class);
        assertNotNull(payAttr);
        byte[] payData = payAttr.getPayload().bytes;
        assertNotNull(payData);
        float payFloat = PayloadHelper.decodeFloat(payData);
        assertEquals(0.1f, payFloat, 0.0f);
    }
    stream.end();
    stream.close();
}
Also used : MockTokenizer(org.apache.lucene.analysis.MockTokenizer) TokenStream(org.apache.lucene.analysis.TokenStream) PayloadAttribute(org.apache.lucene.analysis.tokenattributes.PayloadAttribute) StringReader(java.io.StringReader) StringReader(java.io.StringReader) Reader(java.io.Reader) Tokenizer(org.apache.lucene.analysis.Tokenizer) MockTokenizer(org.apache.lucene.analysis.MockTokenizer)

Aggregations

PayloadAttribute (org.apache.lucene.analysis.tokenattributes.PayloadAttribute)27 CharTermAttribute (org.apache.lucene.analysis.tokenattributes.CharTermAttribute)16 TokenStream (org.apache.lucene.analysis.TokenStream)14 BytesRef (org.apache.lucene.util.BytesRef)13 OffsetAttribute (org.apache.lucene.analysis.tokenattributes.OffsetAttribute)12 PositionIncrementAttribute (org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute)11 TypeAttribute (org.apache.lucene.analysis.tokenattributes.TypeAttribute)10 FlagsAttribute (org.apache.lucene.analysis.tokenattributes.FlagsAttribute)7 StringReader (java.io.StringReader)6 IOException (java.io.IOException)5 Document (org.apache.lucene.document.Document)5 Reader (java.io.Reader)4 Token (org.apache.lucene.analysis.Token)4 Field (org.apache.lucene.document.Field)4 MockTokenizer (org.apache.lucene.analysis.MockTokenizer)3 TermToBytesRefAttribute (org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute)3 LinkedHashMap (java.util.LinkedHashMap)2 LinkedList (java.util.LinkedList)2 Map (java.util.Map)2 TreeMap (java.util.TreeMap)2