Search in sources :

Example 56 with Literal

use of org.apache.clerezza.commons.rdf.Literal in project stanbol by apache.

the class ExecutionMetadataHelper method setExecutionInProgress.

/**
     * Sets an execution node to in-progress. This also sets the start time to
     * the current time
     * @param graph
     * @param execution
     */
public static void setExecutionInProgress(Graph graph, BlankNodeOrIRI execution) {
    Literal dateTime = lf.createTypedLiteral(new Date());
    setStatus(graph, execution, STATUS_IN_PROGRESS);
    graph.add(new TripleImpl(execution, STARTED, dateTime));
}
Also used : Literal(org.apache.clerezza.commons.rdf.Literal) TripleImpl(org.apache.clerezza.commons.rdf.impl.utils.TripleImpl) Date(java.util.Date)

Example 57 with Literal

use of org.apache.clerezza.commons.rdf.Literal in project stanbol by apache.

the class MetaGraphManager method buildResource.

protected IRI buildResource(final OWLOntologyID publicKey) {
    if (publicKey == null)
        throw new IllegalArgumentException("Cannot build a IRI resource on a null public key!");
    // The IRI is of the form ontologyIRI[:::versionIRI] (TODO use something less conventional?)
    // XXX should versionIRI also include the version IRI set by owners? Currently not
    // Remember not to sanitize logical identifiers.
    org.semanticweb.owlapi.model.IRI ontologyIri = publicKey.getOntologyIRI(), versionIri = publicKey.getVersionIRI();
    if (ontologyIri == null)
        throw new IllegalArgumentException("Cannot build a IRI resource on an anonymous public key!");
    log.debug("Searching for a meta graph entry for public key:");
    log.debug(" -- {}", publicKey);
    IRI match = null;
    LiteralFactory lf = LiteralFactory.getInstance();
    Literal oiri = lf.createTypedLiteral(new IRI(ontologyIri.toString()));
    Literal viri = versionIri == null ? null : lf.createTypedLiteral(new IRI(versionIri.toString()));
    for (Iterator<Triple> it = graph.filter(null, HAS_ONTOLOGY_IRI_URIREF, oiri); it.hasNext(); ) {
        RDFTerm subj = it.next().getSubject();
        log.debug(" -- Ontology IRI match found. Scanning");
        log.debug(" -- RDFTerm : {}", subj);
        if (!(subj instanceof IRI)) {
            log.debug(" ---- (uncomparable: skipping...)");
            continue;
        }
        if (viri != null) {
            // Must find matching versionIRI
            if (graph.contains(new TripleImpl((IRI) subj, HAS_VERSION_IRI_URIREF, viri))) {
                log.debug(" ---- Version IRI match!");
                match = (IRI) subj;
                // Found
                break;
            } else {
                log.debug(" ---- Expected version IRI match not found.");
                // There could be another with the right versionIRI.
                continue;
            }
        } else {
            // Must find unversioned resource
            if (graph.filter((IRI) subj, HAS_VERSION_IRI_URIREF, null).hasNext()) {
                log.debug(" ---- Unexpected version IRI found. Skipping.");
                continue;
            } else {
                log.debug(" ---- Unversioned match!");
                match = (IRI) subj;
                // Found
                break;
            }
        }
    }
    log.debug("Matching IRI in graph : {}", match);
    if (match == null)
        return new IRI(OntologyUtils.encode(publicKey));
    else
        return match;
}
Also used : Triple(org.apache.clerezza.commons.rdf.Triple) IRI(org.apache.clerezza.commons.rdf.IRI) Literal(org.apache.clerezza.commons.rdf.Literal) RDFTerm(org.apache.clerezza.commons.rdf.RDFTerm) TripleImpl(org.apache.clerezza.commons.rdf.impl.utils.TripleImpl) LiteralFactory(org.apache.clerezza.rdf.core.LiteralFactory)

Example 58 with Literal

use of org.apache.clerezza.commons.rdf.Literal in project stanbol by apache.

the class SpotlightEngineUtils method createEntityAnnotation.

/**
	 * Creates a fise:EntityAnnotation for the parsed parameter and
     * adds it the the {@link ContentItem#getMetadata()}. <p>
     * This method assumes a write lock on the parsed content item.
	 * @param annotation the Annotation
	 * @param engine the engine
	 * @param ci the language
	 * @param textAnnotation the TextAnnotation the created
	 * EntityAnnotation links by using dc:relation
	 * @param language the language of the label of the referenced
	 * Entity (or <code>null</code> if none).
	 */
public static void createEntityAnnotation(Annotation annotation, EnhancementEngine engine, ContentItem ci, IRI textAnnotation, Language language) {
    Graph model = ci.getMetadata();
    IRI entityAnnotation = EnhancementEngineHelper.createEntityEnhancement(ci, engine);
    Literal label = new PlainLiteralImpl(annotation.surfaceForm.name, language);
    model.add(new TripleImpl(entityAnnotation, DC_RELATION, textAnnotation));
    model.add(new TripleImpl(entityAnnotation, ENHANCER_ENTITY_LABEL, label));
    model.add(new TripleImpl(entityAnnotation, ENHANCER_ENTITY_REFERENCE, annotation.uri));
    //set the fise:entity-type
    for (String type : annotation.getTypeNames()) {
        IRI annotationType = new IRI(type);
        model.add(new TripleImpl(entityAnnotation, ENHANCER_ENTITY_TYPE, annotationType));
    }
    //TODO (rwesten): Pleas check: I use the similarityScore as fise:confidence value
    model.add(new TripleImpl(entityAnnotation, ENHANCER_CONFIDENCE, literalFactory.createTypedLiteral(annotation.similarityScore)));
    //add spotlight specific information
    model.add(new TripleImpl(entityAnnotation, PROPERTY_PERCENTAGE_OF_SECOND_RANK, literalFactory.createTypedLiteral(annotation.percentageOfSecondRank)));
    model.add(new TripleImpl(entityAnnotation, PROPERTY_SUPPORT, literalFactory.createTypedLiteral(annotation.support)));
    model.add(new TripleImpl(entityAnnotation, PROPERTY_SIMILARITY_SCORE, literalFactory.createTypedLiteral(annotation.similarityScore)));
}
Also used : IRI(org.apache.clerezza.commons.rdf.IRI) Graph(org.apache.clerezza.commons.rdf.Graph) PlainLiteralImpl(org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl) Literal(org.apache.clerezza.commons.rdf.Literal) TripleImpl(org.apache.clerezza.commons.rdf.impl.utils.TripleImpl)

Example 59 with Literal

use of org.apache.clerezza.commons.rdf.Literal in project stanbol by apache.

the class OntologyMappings method addMediaResourceOntologyMappings.

public static void addMediaResourceOntologyMappings(OntologyMappings mappings) {
    mappings.addMappings(new PropertyMapping(ma + "hasContributor", DublinCore.CONTRIBUTOR.getName(), XMPDM.ARTIST.getName(), XMPDM.COMPOSER.getName()));
    mappings.addMapping(new ResourceMapping(ma + "hasLocation", new TypeMapping(ma + "Location"), new PropertyMapping(ma + "locationName", DublinCore.COVERAGE.getName())));
    mappings.addMappings(new PropertyMapping(ma + "hasCreator", DublinCore.CREATOR.getName(), MSOffice.AUTHOR, "initial-creator"));
    mappings.addMappings(new PropertyMapping(ma + "description", DublinCore.DESCRIPTION.getName()));
    mappings.addMappings(new PropertyMapping(ma + "hasFormat", DublinCore.FORMAT.getName(), HttpHeaders.CONTENT_TYPE));
    /*
         * Excerpt of the MA recommendation:
         *   The identifier of a media resource is represented in RDF by the URI 
         *   of the node representing that media resource. If a resource is 
         *   identified by several URI, owl:sameAs should be used.
         */
    mappings.addMappings(new PropertyMapping(OWL.sameAs, RDFS.Resource, DublinCore.IDENTIFIER.getName()));
    mappings.addMappings(new PropertyMapping(ma + "hasLanguage", DublinCore.LANGUAGE.getName(), HttpHeaders.CONTENT_LANGUAGE));
    mappings.addMappings(new PropertyMapping(ma + "editDate", XSD.dateTime, DublinCore.MODIFIED.getName(), MSOffice.LAST_SAVED.getName()));
    mappings.addMappings(new PropertyMapping(ma + "hasPublisher", DublinCore.PUBLISHER.getName()));
    mappings.addMappings(new PropertyMapping(ma + "hasRelatedResource", DublinCore.RELATION.getName()));
    mappings.addMappings(new PropertyMapping(ma + "copyright", RDFS.Resource, //DC:rights and cc:license
    DublinCore.RIGHTS.getName(), CreativeCommons.LICENSE_LOCATION, CreativeCommons.LICENSE_URL, XMPDM.COPYRIGHT.getName()));
    mappings.addMappings(new PropertyMapping(ma + "isMemberOf", DublinCore.SOURCE.getName()));
    mappings.addMappings(new PropertyMapping(ma + "hasKeyword", DublinCore.SUBJECT.getName(), MSOffice.KEYWORDS));
    mappings.addMappings(new PropertyMapping(ma + "title", DublinCore.TITLE.getName(), XMPDM.SCENE.getName(), XMPDM.TAPE_NAME.getName(), XMPDM.SHOT_NAME.getName()));
    mappings.addMapping(new PropertyMapping(ma + "alternativeTitle", XMPDM.ALT_TAPE_NAME.getName()));
    mappings.addMapping(new PropertyMapping(ma + "mainOriginalTitle", XMPDM.ALBUM.getName()));
    mappings.addMappings(new PropertyMapping(ma + "hasGenre", DublinCore.TYPE.getName(), XMPDM.GENRE.getName()));
    mappings.addMappings(new PropertyMapping(ma + "creationDate", XSD.dateTime, DublinCore.DATE.getName(), MSOffice.CREATION_DATE.getName(), "created"));
    mappings.addMapping(new PropertyMapping(ma + "description", DublinCore.DESCRIPTION.getName(), MSOffice.COMMENTS));
    mappings.addMappings(new PropertyMapping(ma + "hasContributor", MSOffice.LAST_AUTHOR, MSOffice.AUTHOR, XMPDM.ENGINEER.getName()));
    //other properties -> Media Ontology
    mappings.addMappings(new PropertyMapping(ma + "hasCreator", "producer", "initial-creator"));
    //EXIF -> Media Ontology
    mappings.addMappings(new PropertyMapping(ma + "frameHeight", XSD.int_, TIFF.IMAGE_LENGTH.getName()));
    mappings.addMappings(new PropertyMapping(ma + "frameWidth", XSD.int_, TIFF.IMAGE_WIDTH.getName()));
    mappings.addMappings(new PropertyMapping(ma + "creationDate", XSD.dateTime, TIFF.ORIGINAL_DATE.getName(), XMPDM.SHOT_DATE.getName()));
    //XMP -> Media Ontology
    //here we need to split up the metadata for the audio and video
    mappings.addMapping(new PropertyMapping(ma + "releaseDate", XSD.dateTime, XMPDM.RELEASE_DATE.getName()));
    mappings.addMapping(new ResourceMapping(ma + "hasTrack", new Mapping[] {}, new Mapping[] { //optional
    new PropertyMapping(ma + "hasFormat", XSD.string, XMPDM.AUDIO_CHANNEL_TYPE.getName()), new PropertyMapping(ma + "hasCompression", XSD.string, XMPDM.AUDIO_COMPRESSOR.getName()), new PropertyMapping(ma + "editDate", XMPDM.AUDIO_MOD_DATE.getName()), new PropertyMapping(ma + "samplingRate", XSD.int_, XMPDM.AUDIO_SAMPLE_RATE.getName()) }, new Mapping[] { new TypeMapping(ma + "MediaFragment"), new TypeMapping(ma + "Track"), new TypeMapping(ma + "AudioTrack") }));
    mappings.addMapping(new ResourceMapping(ma + "hasTrack", new Mapping[] {}, new Mapping[] { //optional
    new PropertyMapping(ma + "hasCompression", XSD.string, XMPDM.VIDEO_COMPRESSOR.getName()), new PropertyMapping(ma + "editDate", XMPDM.VIDEO_MOD_DATE.getName()), new PropertyMapping(ma + "frameRate", XSD.double_, XMPDM.VIDEO_FRAME_RATE.getName()) }, new Mapping[] { //additioanl
    new TypeMapping(ma + "MediaFragment"), new TypeMapping(ma + "Track"), new TypeMapping(ma + "VideoTrack"), new PropertyMapping(ma + "frameHeight", XSD.int_, TIFF.IMAGE_LENGTH.getName()), new PropertyMapping(ma + "frameWidth", XSD.int_, TIFF.IMAGE_WIDTH.getName()) }));
    mappings.addMapping(new PropertyMapping(ma + "numberOfTracks", XSD.int_, XMPDM.TRACK_NUMBER.getName()));
    mappings.addMapping(new PropertyMapping(ma + "averageBitRate", XSD.double_, new //we need to convert from MByte/min to kByte/sec
    Mapping.Converter() {

        @Override
        public RDFTerm convert(RDFTerm value) {
            if (value instanceof Literal && XSD.double_.equals(((Literal) value).getDataType())) {
                LiteralFactory lf = LiteralFactory.getInstance();
                double mm = lf.createObject(Double.class, (Literal) value);
                return lf.createTypedLiteral(Double.valueOf(mm * 1024 / 60));
            } else {
                //do not convert
                return value;
            }
        }
    }, XMPDM.FILE_DATA_RATE.getName()));
    //GEO -> Media RDFTerm Ontology
    mappings.addMapping(new ResourceMapping(ma + "hasLocation", new Mapping[] { //required
    new PropertyMapping(ma + "locationLatitude", XSD.double_, Geographic.LATITUDE.getName()), new PropertyMapping(ma + "locationLongitude", XSD.double_, Geographic.LONGITUDE.getName()) }, new Mapping[] { //optional
    new PropertyMapping(ma + "locationAltitude", XSD.double_, Geographic.ALTITUDE.getName()) }, new Mapping[] { //additional
    new TypeMapping(ma + "Location") }));
}
Also used : Literal(org.apache.clerezza.commons.rdf.Literal) RDFTerm(org.apache.clerezza.commons.rdf.RDFTerm) LiteralFactory(org.apache.clerezza.rdf.core.LiteralFactory)

Example 60 with Literal

use of org.apache.clerezza.commons.rdf.Literal in project stanbol by apache.

the class RdfRepresentation method addTypedLiteral.

private void addTypedLiteral(IRI field, Object literalValue) {
    Literal literal;
    try {
        literal = RdfResourceUtils.createLiteral(literalValue);
    } catch (NoConvertorException e) {
        log.info("No Converter for value type " + literalValue.getClass() + " (parsed for field " + field + ") use toString() to get String representation");
        literal = RdfResourceUtils.createLiteral(literalValue.toString(), null);
    }
    graphNode.addProperty(field, literal);
}
Also used : Literal(org.apache.clerezza.commons.rdf.Literal) NoConvertorException(org.apache.clerezza.rdf.core.NoConvertorException)

Aggregations

Literal (org.apache.clerezza.commons.rdf.Literal)71 IRI (org.apache.clerezza.commons.rdf.IRI)35 RDFTerm (org.apache.clerezza.commons.rdf.RDFTerm)35 Triple (org.apache.clerezza.commons.rdf.Triple)30 BlankNodeOrIRI (org.apache.clerezza.commons.rdf.BlankNodeOrIRI)22 TripleImpl (org.apache.clerezza.commons.rdf.impl.utils.TripleImpl)20 ArrayList (java.util.ArrayList)16 PlainLiteralImpl (org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl)16 Language (org.apache.clerezza.commons.rdf.Language)12 Graph (org.apache.clerezza.commons.rdf.Graph)11 Test (org.junit.Test)10 HashSet (java.util.HashSet)9 Date (java.util.Date)8 Lock (java.util.concurrent.locks.Lock)6 Entity (org.apache.stanbol.enhancer.engines.entitylinking.Entity)5 HashMap (java.util.HashMap)4 SimpleGraph (org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph)4 NoConvertorException (org.apache.clerezza.rdf.core.NoConvertorException)4 Representation (org.apache.stanbol.entityhub.servicesapi.model.Representation)4 Collection (java.util.Collection)3