Search in sources :

Example 36 with RDFTerm

use of org.apache.clerezza.commons.rdf.RDFTerm in project stanbol by apache.

the class TestEnhancementInterfaces method checkTextAnnotation.

/**
     * Checks if a text annotation is valid.
     */
private void checkTextAnnotation(Graph g, IRI textAnnotation) {
    Iterator<Triple> selectedTextIterator = g.filter(textAnnotation, ENHANCER_SELECTED_TEXT, null);
    // check if the selected text is added
    assertTrue(selectedTextIterator.hasNext());
    // test if the selected text is part of the TEXT_TO_TEST
    RDFTerm object = selectedTextIterator.next().getObject();
    assertTrue(object instanceof Literal);
    assertTrue(SINGLE_SENTENCE.contains(((Literal) object).getLexicalForm()));
    // test if context is added
    Iterator<Triple> selectionContextIterator = g.filter(textAnnotation, ENHANCER_SELECTION_CONTEXT, null);
    assertTrue(selectionContextIterator.hasNext());
    // test if the selected text is part of the TEXT_TO_TEST
    object = selectionContextIterator.next().getObject();
    assertTrue(object instanceof Literal);
    assertTrue(SINGLE_SENTENCE.contains(((Literal) object).getLexicalForm()));
}
Also used : Triple(org.apache.clerezza.commons.rdf.Triple) Literal(org.apache.clerezza.commons.rdf.Literal) RDFTerm(org.apache.clerezza.commons.rdf.RDFTerm)

Example 37 with RDFTerm

use of org.apache.clerezza.commons.rdf.RDFTerm in project stanbol by apache.

the class RdfEntityFactoryTest method getRdfTypes.

private static Set<String> getRdfTypes(Graph graph, IRI node) {
    Iterator<Triple> typeStatements = graph.filter(node, Properties.RDF_TYPE, null);
    Set<String> typeStrings = new HashSet<String>();
    while (typeStatements.hasNext()) {
        RDFTerm type = typeStatements.next().getObject();
        assertTrue(type instanceof IRI);
        typeStrings.add(((IRI) type).getUnicodeString());
    }
    return typeStrings;
}
Also used : Triple(org.apache.clerezza.commons.rdf.Triple) IRI(org.apache.clerezza.commons.rdf.IRI) RDFTerm(org.apache.clerezza.commons.rdf.RDFTerm) HashSet(java.util.HashSet)

Example 38 with RDFTerm

use of org.apache.clerezza.commons.rdf.RDFTerm in project stanbol by apache.

the class ExecutionMetadataHelper method getExecutionPlanNode.

/**
     * Getter for the ep:ExecutionPlan node for the parsed em:ChainExecution node
     * @param em the execution metadata
     * @param chainExecutionNode the chain execution node
     * @return the execution plan node
     */
public static BlankNodeOrIRI getExecutionPlanNode(Graph em, BlankNodeOrIRI chainExecutionNode) {
    Iterator<Triple> it = em.filter(chainExecutionNode, EXECUTION_PLAN, null);
    if (it.hasNext()) {
        Triple t = it.next();
        RDFTerm r = t.getObject();
        if (r instanceof BlankNodeOrIRI) {
            return (BlankNodeOrIRI) r;
        } else {
            throw new IllegalStateException("Value of the property " + EXECUTION_PLAN + " MUST BE a BlankNodeOrIRI (triple: '" + t + "')!");
        }
    } else {
        return null;
    }
}
Also used : Triple(org.apache.clerezza.commons.rdf.Triple) BlankNodeOrIRI(org.apache.clerezza.commons.rdf.BlankNodeOrIRI) RDFTerm(org.apache.clerezza.commons.rdf.RDFTerm)

Example 39 with RDFTerm

use of org.apache.clerezza.commons.rdf.RDFTerm in project stanbol by apache.

the class ExecutionPlanHelper method getExecutionNodes.

/**
     * Getter for the set of ExecutionNodes part of an execution plan.
     * @param ep the execution plan graph
     * @param executionPlanNode the execution plan node
     */
public static Set<BlankNodeOrIRI> getExecutionNodes(Graph ep, final BlankNodeOrIRI executionPlanNode) {
    if (ep == null) {
        throw new IllegalArgumentException("The parsed graph with the Executionplan MUST NOT be NULL!");
    }
    if (executionPlanNode == null) {
        throw new IllegalArgumentException("The parsed execution plan node MUST NOT be NULL!");
    }
    Set<BlankNodeOrIRI> executionNodes = new HashSet<BlankNodeOrIRI>();
    Iterator<Triple> it = ep.filter(executionPlanNode, HAS_EXECUTION_NODE, null);
    while (it.hasNext()) {
        Triple t = it.next();
        RDFTerm node = t.getObject();
        if (node instanceof BlankNodeOrIRI) {
            executionNodes.add((BlankNodeOrIRI) node);
        } else {
            throw new IllegalStateException("The value of the " + HAS_EXECUTION_NODE + " property MUST BE a BlankNodeOrIRI (triple: " + t + ")!");
        }
    }
    return executionNodes;
}
Also used : Triple(org.apache.clerezza.commons.rdf.Triple) BlankNodeOrIRI(org.apache.clerezza.commons.rdf.BlankNodeOrIRI) RDFTerm(org.apache.clerezza.commons.rdf.RDFTerm) HashSet(java.util.HashSet)

Example 40 with RDFTerm

use of org.apache.clerezza.commons.rdf.RDFTerm in project stanbol by apache.

the class EnhancementPropertyTest method testExecutionPropertySupportOfExecutionPlanHelper.

/**
     * This tests if the {@link ExecutionPlanHelper} correctly adds Enhancement
     * Properties to generated Execution plans. <p>
     * NOTE: If this fails also tests testing chain level properties are expected
     * to fail. This only present to validate that the ExecutionPlan is correctly
     * generated by the {@link ExecutionPlanHelper}
     * @throws ChainException
     */
@Test
public void testExecutionPropertySupportOfExecutionPlanHelper() throws ChainException {
    //the value we are setting
    Collection<String> derefernceLanguages = Arrays.asList("en", "de");
    Integer maxSuggestions = Integer.valueOf(5);
    IRI maxSuggestionsProperty = new IRI(NamespaceEnum.ehp + PROPERTY_MAX_SUGGESTIONS);
    IRI dereferenceLanguagesProperty = new IRI(NamespaceEnum.ehp + PROPERTY_DEREFERENCE_LANGUAGES);
    //set up the map with the enhancement properties we want to set for the
    //Enhancement Chain
    Map<String, Map<String, Object>> enhancementProperties = new HashMap<String, Map<String, Object>>();
    Map<String, Object> chainProperties = new HashMap<String, Object>();
    chainProperties.put(PROPERTY_MAX_SUGGESTIONS, maxSuggestions);
    enhancementProperties.put(null, chainProperties);
    Map<String, Object> linkingProperties = new HashMap<String, Object>();
    linkingProperties.put(PROPERTY_DEREFERENCE_LANGUAGES, derefernceLanguages);
    enhancementProperties.put(linking.getName(), linkingProperties);
    //create the ExecutionPlan
    ImmutableGraph ep = ExecutionPlanHelper.calculateExecutionPlan("test", engines, Collections.<String>emptySet(), Collections.<String>emptySet(), enhancementProperties);
    //now assert that the enhancement properties where correctly written
    //first the property we set on the chain level
    BlankNodeOrIRI epNode = ExecutionPlanHelper.getExecutionPlan(ep, "test");
    assertNotNull(epNode);
    Iterator<Triple> maxSuggestionValues = ep.filter(epNode, maxSuggestionsProperty, null);
    assertTrue(maxSuggestionValues.hasNext());
    RDFTerm maxSuggestionValue = maxSuggestionValues.next().getObject();
    assertFalse(maxSuggestionValues.hasNext());
    assertTrue(maxSuggestionValue instanceof Literal);
    assertEquals(maxSuggestions.toString(), ((Literal) maxSuggestionValue).getLexicalForm());
    assertEquals(maxSuggestions, LiteralFactory.getInstance().createObject(Integer.class, (Literal) maxSuggestionValue));
    //second the property we set for the linking engine
    boolean found = false;
    for (BlankNodeOrIRI ee : ExecutionPlanHelper.getExecutionNodes(ep, epNode)) {
        String engineName = ExecutionPlanHelper.getEngine(ep, ee);
        if (linking.getName().equals(engineName)) {
            found = true;
            Iterator<Triple> derefLangValues = ep.filter(ee, dereferenceLanguagesProperty, null);
            assertTrue(derefLangValues.hasNext());
            int numValues = 0;
            while (derefLangValues.hasNext()) {
                RDFTerm r = derefLangValues.next().getObject();
                assertTrue(r instanceof Literal);
                assertTrue(derefernceLanguages.contains(((Literal) r).getLexicalForm()));
                numValues++;
            }
            assertEquals(derefernceLanguages.size(), numValues);
        }
    }
    assertTrue("ExecutionNode for the Linking Engine was not present!", found);
//NOTE: this does not validate that there are no other (not expected)
//      enhancement properties in the executionPlan
}
Also used : IRI(org.apache.clerezza.commons.rdf.IRI) BlankNodeOrIRI(org.apache.clerezza.commons.rdf.BlankNodeOrIRI) HashMap(java.util.HashMap) BlankNodeOrIRI(org.apache.clerezza.commons.rdf.BlankNodeOrIRI) RDFTerm(org.apache.clerezza.commons.rdf.RDFTerm) Triple(org.apache.clerezza.commons.rdf.Triple) Literal(org.apache.clerezza.commons.rdf.Literal) HashMap(java.util.HashMap) Map(java.util.Map) ImmutableGraph(org.apache.clerezza.commons.rdf.ImmutableGraph) Test(org.junit.Test)

Aggregations

RDFTerm (org.apache.clerezza.commons.rdf.RDFTerm)126 IRI (org.apache.clerezza.commons.rdf.IRI)84 Triple (org.apache.clerezza.commons.rdf.Triple)70 BlankNodeOrIRI (org.apache.clerezza.commons.rdf.BlankNodeOrIRI)48 Literal (org.apache.clerezza.commons.rdf.Literal)35 Test (org.junit.Test)35 HashSet (java.util.HashSet)30 HashMap (java.util.HashMap)28 TripleImpl (org.apache.clerezza.commons.rdf.impl.utils.TripleImpl)26 Graph (org.apache.clerezza.commons.rdf.Graph)24 ContentItem (org.apache.stanbol.enhancer.servicesapi.ContentItem)18 ArrayList (java.util.ArrayList)17 PlainLiteralImpl (org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl)16 EngineException (org.apache.stanbol.enhancer.servicesapi.EngineException)13 OWLOntologyID (org.semanticweb.owlapi.model.OWLOntologyID)13 SimpleGraph (org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph)12 Collection (java.util.Collection)10 IndexedGraph (org.apache.stanbol.commons.indexedgraph.IndexedGraph)10 Lock (java.util.concurrent.locks.Lock)9 IOException (java.io.IOException)5