use of org.openrdf.model.impl.LiteralImpl in project incubator-rya by apache.
the class RdfFileInputFormatTest method testTrigInput.
@Test
public void testTrigInput() throws Exception {
RdfFileInputFormat.setRDFFormat(job, RDFFormat.TRIG);
init(TRIG_INPUT);
Assert.assertTrue(reader.nextKeyValue());
Assert.assertEquals(1, reader.getCurrentKey().get());
Statement expected = new ContextStatementImpl(new URIImpl("http://www.example.org/exampleDocument#Monica"), new URIImpl("http://www.example.org/vocabulary#name"), new LiteralImpl("Monica Murphy"), new URIImpl("http://www.example.org/exampleDocument#G1"));
Statement actual = RyaToRdfConversions.convertStatement(reader.getCurrentValue().getRyaStatement());
Assert.assertEquals(expected, actual);
}
use of org.openrdf.model.impl.LiteralImpl in project incubator-rya by apache.
the class PcjDocumentsWithMockTest method populatePcj.
@Test
public void populatePcj() throws Exception {
final RdfCloudTripleStore ryaStore = new RdfCloudTripleStore();
final MongoDBRyaDAO dao = new MongoDBRyaDAO();
dao.setConf(new StatefulMongoDBRdfConfiguration(conf, getMongoClient()));
dao.init();
ryaStore.setRyaDAO(dao);
ryaStore.initialize();
final SailRepositoryConnection ryaConn = new RyaSailRepository(ryaStore).getConnection();
try {
// Load some Triples into Rya.
final Set<Statement> triples = new HashSet<>();
triples.add(new StatementImpl(new URIImpl("http://Alice"), new URIImpl("http://hasAge"), new NumericLiteralImpl(14, XMLSchema.INTEGER)));
triples.add(new StatementImpl(new URIImpl("http://Alice"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")));
triples.add(new StatementImpl(new URIImpl("http://Bob"), new URIImpl("http://hasAge"), new NumericLiteralImpl(16, XMLSchema.INTEGER)));
triples.add(new StatementImpl(new URIImpl("http://Bob"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")));
triples.add(new StatementImpl(new URIImpl("http://Charlie"), new URIImpl("http://hasAge"), new NumericLiteralImpl(12, XMLSchema.INTEGER)));
triples.add(new StatementImpl(new URIImpl("http://Charlie"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")));
triples.add(new StatementImpl(new URIImpl("http://Eve"), new URIImpl("http://hasAge"), new NumericLiteralImpl(43, XMLSchema.INTEGER)));
triples.add(new StatementImpl(new URIImpl("http://Eve"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")));
for (final Statement triple : triples) {
ryaConn.add(triple);
}
// Create a PCJ table that will include those triples in its results.
final String sparql = "SELECT ?name ?age " + "{" + "?name <http://hasAge> ?age." + "?name <http://playsSport> \"Soccer\" " + "}";
final String pcjTableName = new PcjTableNameFactory().makeTableName(conf.getRyaInstanceName(), "testPcj");
final MongoPcjDocuments pcjs = new MongoPcjDocuments(getMongoClient(), conf.getRyaInstanceName());
pcjs.createAndPopulatePcj(ryaConn, pcjTableName, sparql);
// Make sure the cardinality was updated.
final PcjMetadata metadata = pcjs.getPcjMetadata(pcjTableName);
assertEquals(4, metadata.getCardinality());
} finally {
ryaConn.close();
ryaStore.shutDown();
}
}
use of org.openrdf.model.impl.LiteralImpl in project incubator-rya by apache.
the class AccumuloPcjIT method testEvaluateTwoIndexThreeVarOrder4ThreeBindingSet.
@Test
public void testEvaluateTwoIndexThreeVarOrder4ThreeBindingSet() throws PcjException, RepositoryException, AccumuloException, AccumuloSecurityException, TableExistsException, TableNotFoundException, TupleQueryResultHandlerException, QueryEvaluationException, MalformedQueryException, SailException {
final URI sub3 = new URIImpl("uri:entity3");
final URI subclass3 = new URIImpl("uri:class3");
final URI obj3 = new URIImpl("uri:obj3");
final URI superclass = new URIImpl("uri:superclass");
final URI superclass2 = new URIImpl("uri:superclass2");
final URI superclass3 = new URIImpl("uri:superclass3");
conn.add(sub3, RDF.TYPE, subclass3);
conn.add(sub3, RDFS.LABEL, new LiteralImpl("label3"));
conn.add(sub3, talksTo, obj3);
conn.add(subclass, RDF.TYPE, superclass);
conn.add(subclass2, RDF.TYPE, superclass2);
conn.add(subclass3, RDF.TYPE, superclass3);
conn.add(obj, RDFS.LABEL, new LiteralImpl("label"));
conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2"));
conn.add(obj3, RDFS.LABEL, new LiteralImpl("label3"));
conn.add(obj, RDFS.LABEL, new LiteralImpl("label"));
conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2"));
final String indexSparqlString = //
"" + //
"SELECT ?c ?e ?l " + //
"{" + //
" ?e a ?c . " + //
" ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l " + //
"}";
final String indexSparqlString2 = //
"" + //
"SELECT ?o ?f ?c ?e ?l " + //
"{" + //
" ?e <uri:talksTo> ?o . " + //
" ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l. " + //
" ?c a ?f . " + //
"}";
final String queryString = //
"" + //
"SELECT ?e ?c ?l ?f ?o " + //
"{" + //
" ?e a ?c . " + //
" ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l. " + //
" ?e <uri:talksTo> ?o . " + //
" ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l. " + //
" ?c a ?f . " + //
"}";
final CountingResultHandler crh1 = new CountingResultHandler();
final CountingResultHandler crh2 = new CountingResultHandler();
PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablename + 1, indexSparqlString, new String[] { "c", "e", "l" }, Optional.<PcjVarOrderFactory>absent());
PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablename + 2, indexSparqlString2, new String[] { "o", "f", "c", "e", "l" }, Optional.<PcjVarOrderFactory>absent());
conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1);
PcjIntegrationTestingUtil.deleteCoreRyaTables(accCon, prefix);
pcjConn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2);
Assert.assertEquals(3, crh1.getCount());
Assert.assertEquals(3, crh2.getCount());
}
use of org.openrdf.model.impl.LiteralImpl in project incubator-rya by apache.
the class AccumuloPcjIT method testEvaluateThreeIndexValidate.
@Test
public void testEvaluateThreeIndexValidate() throws Exception {
final URI superclass = new URIImpl("uri:superclass");
final URI superclass2 = new URIImpl("uri:superclass2");
final URI sub = new URIImpl("uri:entity");
subclass = new URIImpl("uri:class");
obj = new URIImpl("uri:obj");
talksTo = new URIImpl("uri:talksTo");
final URI howlsAt = new URIImpl("uri:howlsAt");
final URI subType = new URIImpl("uri:subType");
final URI superSuperclass = new URIImpl("uri:super_superclass");
conn.add(subclass, RDF.TYPE, superclass);
conn.add(subclass2, RDF.TYPE, superclass2);
conn.add(obj, RDFS.LABEL, new LiteralImpl("label"));
conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2"));
conn.add(sub, howlsAt, superclass);
conn.add(superclass, subType, superSuperclass);
conn.add(obj, RDFS.LABEL, new LiteralImpl("label"));
conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2"));
final String indexSparqlString = //
"" + //
"SELECT ?dog ?pig ?duck " + //
"{" + //
" ?pig a ?dog . " + //
" ?pig <http://www.w3.org/2000/01/rdf-schema#label> ?duck " + //
"}";
final String indexSparqlString2 = //
"" + //
"SELECT ?o ?f ?e ?c ?l " + //
"{" + //
" ?e <uri:talksTo> ?o . " + //
" ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l. " + //
" ?c a ?f . " + //
"}";
final String indexSparqlString3 = //
"" + //
"SELECT ?wolf ?sheep ?chicken " + //
"{" + //
" ?wolf <uri:howlsAt> ?sheep . " + //
" ?sheep <uri:subType> ?chicken. " + //
"}";
final String queryString = //
"" + //
"SELECT ?e ?c ?l ?f ?o " + //
"{" + //
" ?e a ?c . " + //
" ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l. " + //
" ?e <uri:talksTo> ?o . " + //
" ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l. " + //
" ?c a ?f . " + //
" ?e <uri:howlsAt> ?f. " + //
" ?f <uri:subType> ?o. " + //
"}";
PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablename + 1, indexSparqlString, new String[] { "dog", "pig", "duck" }, Optional.<PcjVarOrderFactory>absent());
final AccumuloIndexSet ais1 = new AccumuloIndexSet(conf, tablename + 1);
PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablename + 2, indexSparqlString2, new String[] { "o", "f", "e", "c", "l" }, Optional.<PcjVarOrderFactory>absent());
final AccumuloIndexSet ais2 = new AccumuloIndexSet(conf, tablename + 2);
PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablename + 3, indexSparqlString3, new String[] { "wolf", "sheep", "chicken" }, Optional.<PcjVarOrderFactory>absent());
final AccumuloIndexSet ais3 = new AccumuloIndexSet(conf, tablename + 3);
final List<ExternalTupleSet> index = new ArrayList<>();
index.add(ais1);
index.add(ais3);
index.add(ais2);
ParsedQuery pq = null;
final SPARQLParser sp = new SPARQLParser();
pq = sp.parseQuery(queryString, null);
final List<TupleExpr> teList = Lists.newArrayList();
final TupleExpr te = pq.getTupleExpr();
final PCJOptimizer pcj = new PCJOptimizer(index, false, new AccumuloIndexSetProvider(conf));
pcj.optimize(te, null, null);
teList.add(te);
final IndexPlanValidator ipv = new IndexPlanValidator(false);
Assert.assertTrue(ipv.isValid(te));
}
use of org.openrdf.model.impl.LiteralImpl in project incubator-rya by apache.
the class AccumuloPcjIT method testEvaluateTwoIndexTwoVarInvalidOrder.
@Test
public void testEvaluateTwoIndexTwoVarInvalidOrder() throws PcjException, RepositoryException, AccumuloException, AccumuloSecurityException, TableExistsException, TupleQueryResultHandlerException, QueryEvaluationException, MalformedQueryException, SailException, TableNotFoundException {
conn.add(obj, RDFS.LABEL, new LiteralImpl("label"));
conn.add(obj2, RDFS.LABEL, new LiteralImpl("label2"));
final String indexSparqlString = //
"" + //
"SELECT ?e ?c ?l " + //
"{" + //
" ?e a ?c . " + //
" ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l " + //
"}";
final String indexSparqlString2 = //
"" + //
"SELECT ?e ?o ?l " + //
"{" + //
" ?e <uri:talksTo> ?o . " + //
" ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l " + //
"}";
final String queryString = //
"" + //
"SELECT ?e ?c ?l ?o " + //
"{" + //
" ?e a ?c . " + //
" ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . " + //
" ?e <uri:talksTo> ?o . " + //
" ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l " + //
"}";
final CountingResultHandler crh1 = new CountingResultHandler();
final CountingResultHandler crh2 = new CountingResultHandler();
PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablename + 1, indexSparqlString, new String[] { "e", "c", "l" }, Optional.<PcjVarOrderFactory>absent());
PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablename + 2, indexSparqlString2, new String[] { "e", "o", "l" }, Optional.<PcjVarOrderFactory>absent());
conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh1);
PcjIntegrationTestingUtil.deleteCoreRyaTables(accCon, prefix);
pcjConn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(crh2);
Assert.assertEquals(crh1.getCount(), crh2.getCount());
}
Aggregations