use of org.apache.lucene.analysis.tokenattributes.FlagsAttributeImpl in project sukija by ahomansikka.
the class BaseFormTester method test.
public static void test(Reader reader, Writer writer, Voikko voikko, boolean successOnly) throws IOException {
TokenStream t = new HVTokenizer();
((Tokenizer) t).setReader(reader);
t = new BaseFormFilter(t, voikko, successOnly);
CharTermAttribute termAtt = t.addAttribute(CharTermAttribute.class);
BaseFormAttribute baseFormAtt = t.addAttribute(BaseFormAttribute.class);
FlagsAttribute flagsAtt = t.addAttribute(FlagsAttribute.class);
OriginalWordAttribute originalWordAtt = t.addAttribute(OriginalWordAttribute.class);
String orig = "";
TreeSet<String> tset = new TreeSet<String>();
FlagsAttribute flagsA = new FlagsAttributeImpl();
try {
t.reset();
while (t.incrementToken()) {
if (!orig.equals("") && !orig.equals(originalWordAtt.getOriginalWord())) {
writer.write("Sana: " + orig);
if (Constants.hasFlag(flagsA, Constants.FOUND)) {
writer.write(" M " + toString(tset));
}
writer.write("\n");
writer.flush();
tset.clear();
}
orig = originalWordAtt.getOriginalWord();
tset.addAll(baseFormAtt.getBaseForms());
flagsA.setFlags(flagsAtt.getFlags());
}
writer.write("Sana: " + orig);
if (Constants.hasFlag(flagsA, Constants.FOUND)) {
writer.write(" M " + toString(tset));
}
writer.write("\n");
writer.flush();
t.end();
} finally {
t.close();
}
/*
try {
t.reset();
while (t.incrementToken()) {
writer.write ("Sana: " + originalWordAtt.getOriginalWord()
+ " " + termAtt.toString()
+ " " + Constants.toString (flagsAtt)
+ " " + baseFormAtt.getBaseForms().toString()
+ "\n");
writer.flush();
}
t.end();
}
finally {
t.close();
}
*/
}
Aggregations