use of at.ac.univie.mminf.luceneSKOS.analysis.SKOSTypeAttribute.SKOSType in project lucene-skos by behas.
the class SKOSFilterFactory method inform.
@Override
public void inform(ResourceLoader loader) {
try {
if (skosFile.endsWith(".n3") || skosFile.endsWith(".rdf") || skosFile.endsWith(".ttl") || skosFile.endsWith(".zip")) {
skosEngine = SKOSEngineFactory.getSKOSEngine(indexPath != null ? indexPath : "", ((SolrResourceLoader) loader).getConfigDir() + skosFile, languageString != null ? Arrays.asList(languageString.split(" ")) : null);
} else {
throw new IOException("Allowed file suffixes are: .n3 (N3), .rdf (RDF/XML), .ttl (TURTLE) and .zip (ZIP)");
}
} catch (IOException e) {
throw new RuntimeException("Could not instantiate SKOS engine", e);
}
if (expansionTypeString.equalsIgnoreCase(ExpansionType.URI.toString())) {
expansionType = ExpansionType.URI;
} else if (expansionTypeString.equalsIgnoreCase(ExpansionType.LABEL.toString())) {
expansionType = ExpansionType.LABEL;
} else {
throw new IllegalArgumentException("The property 'expansionType' must be either URI or LABEL");
}
if (bufferSizeString != null) {
bufferSize = Integer.parseInt(bufferSizeString);
if (bufferSize < 1) {
throw new IllegalArgumentException("The property 'bufferSize' must be a positive (smallish) integer");
}
}
if (typeString != null) {
List<SKOSType> types = new ArrayList<>();
for (String s : typeString.split(" ")) {
SKOSType st = SKOSType.valueOf(s.toUpperCase(Locale.ROOT));
if (st != null) {
types.add(st);
}
}
type = types;
}
}
use of at.ac.univie.mminf.luceneSKOS.analysis.SKOSTypeAttribute.SKOSType in project lucene-skos by behas.
the class AbstractSKOSFilter method processTermOnStack.
/**
* Replaces the current term (attributes) with term (attributes) from the stack
*
* @throws IOException if analyzer failed
*/
protected void processTermOnStack() throws IOException {
ExpandedTerm expandedTerm = termStack.pop();
String term = expandedTerm.getTerm();
SKOSType termType = expandedTerm.getTermType();
String sTerm;
try {
CharsRefBuilder builder = new CharsRefBuilder();
sTerm = analyze(analyzer, term, builder).toString();
} catch (IllegalArgumentException e) {
// skip this term
return;
}
// copies the values of all attribute implementations from this state into
// the implementations of the target stream
restoreState(current);
// adds the expanded term to the term buffer
termAtt.setEmpty().append(sTerm);
// set position increment to zero to put multiple terms into the same position
posIncrAtt.setPositionIncrement(0);
// set offset of the original expression (usefull for highlighting)
if (expandedTerm.getStart() >= 0 && expandedTerm.getEnd() >= 0)
offsettAtt.setOffset(expandedTerm.getStart(), expandedTerm.getEnd());
// sets the type of the expanded term (pref, alt, broader, narrower, etc.)
skosAtt.setSkosType(termType);
// converts the SKOS Attribute to a payload, which is propagated to the index
byte[] bytes = PayloadHelper.encodeInt(skosAtt.getSkosType().ordinal());
payloadAtt.setPayload(new BytesRef(bytes));
}
Aggregations