use of org.openrdf.model.Statement in project incubator-rya by apache.
the class AccumuloIndexSetTest method accumuloIndexSetTestWithTwoDirectProductBindingSetsWithConstantMapping.
@Test
public void accumuloIndexSetTestWithTwoDirectProductBindingSetsWithConstantMapping() throws Exception {
// Load some Triples into Rya.
final Set<Statement> triples = new HashSet<>();
triples.add(new StatementImpl(new URIImpl("http://Alice"), new URIImpl("http://hasAge"), new NumericLiteralImpl(14, XMLSchema.INTEGER)));
triples.add(new StatementImpl(new URIImpl("http://Alice"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")));
triples.add(new StatementImpl(new URIImpl("http://Bob"), new URIImpl("http://hasAge"), new NumericLiteralImpl(16, XMLSchema.INTEGER)));
triples.add(new StatementImpl(new URIImpl("http://Bob"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")));
triples.add(new StatementImpl(new URIImpl("http://Charlie"), new URIImpl("http://hasAge"), new NumericLiteralImpl(12, XMLSchema.INTEGER)));
triples.add(new StatementImpl(new URIImpl("http://Charlie"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")));
triples.add(new StatementImpl(new URIImpl("http://Eve"), new URIImpl("http://hasAge"), new NumericLiteralImpl(43, XMLSchema.INTEGER)));
triples.add(new StatementImpl(new URIImpl("http://Eve"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")));
for (final Statement triple : triples) {
ryaConn.add(triple);
}
// Create a PCJ table will include those triples in its results.
final String sparql = "SELECT ?name ?age " + "{" + "?name <http://hasAge> ?age." + "?name <http://playsSport> \"Soccer\" " + "}";
final String pcjTableName = new PcjTableNameFactory().makeTableName(prefix, "testPcj");
// Create and populate the PCJ table.
PcjIntegrationTestingUtil.createAndPopulatePcj(ryaConn, accumuloConn, pcjTableName, sparql, new String[] { "name", "age" }, Optional.<PcjVarOrderFactory>absent());
final String sparql2 = "SELECT ?x " + "{" + "?x <http://hasAge> 16 ." + "?x <http://playsSport> \"Soccer\" " + "}";
final SPARQLParser p = new SPARQLParser();
final ParsedQuery pq1 = p.parseQuery(sparql, null);
final ParsedQuery pq2 = p.parseQuery(sparql2, null);
final AccumuloIndexSet ais = new AccumuloIndexSet(conf, pcjTableName);
ais.setProjectionExpr((Projection) QueryVariableNormalizer.getNormalizedIndex(pq2.getTupleExpr(), pq1.getTupleExpr()).get(0));
final QueryBindingSet bs = new QueryBindingSet();
bs.addBinding("birthDate", new LiteralImpl("1983-03-17", new URIImpl("http://www.w3.org/2001/XMLSchema#date")));
bs.addBinding("x", new URIImpl("http://Alice"));
final QueryBindingSet bs2 = new QueryBindingSet();
bs2.addBinding("birthDate", new LiteralImpl("1983-04-18", new URIImpl("http://www.w3.org/2001/XMLSchema#date")));
bs2.addBinding("x", new URIImpl("http://Bob"));
final Set<BindingSet> bSets = Sets.<BindingSet>newHashSet(bs, bs2);
final CloseableIteration<BindingSet, QueryEvaluationException> results = ais.evaluate(bSets);
final Set<BindingSet> fetchedResults = new HashSet<>();
while (results.hasNext()) {
final BindingSet next = results.next();
fetchedResults.add(next);
}
Assert.assertEquals(Sets.<BindingSet>newHashSet(bs2), fetchedResults);
}
use of org.openrdf.model.Statement in project incubator-rya by apache.
the class PeriodicNotificationApplicationIT method periodicApplicationWithAggTest.
@Test
public void periodicApplicationWithAggTest() throws Exception {
final String sparql = // n
"prefix function: <http://org.apache.rya/function#> " + // n
"prefix time: <http://www.w3.org/2006/time#> " + // n
"select (count(?obs) as ?total) where {" + // n
"Filter(function:periodic(?time, 1, .25, time:minutes)) " + // n
"?obs <uri:hasTime> ?time. " + // n
"?obs <uri:hasId> ?id } ";
// make data
final int periodMult = 15;
final ValueFactory vf = new ValueFactoryImpl();
final DatatypeFactory dtf = DatatypeFactory.newInstance();
// results more predictable
while (System.currentTimeMillis() % (periodMult * 1000) > 500) {
;
}
final ZonedDateTime time = ZonedDateTime.now();
final ZonedDateTime zTime1 = time.minusSeconds(2 * periodMult);
final String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime2 = zTime1.minusSeconds(periodMult);
final String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime3 = zTime2.minusSeconds(periodMult);
final String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
final Collection<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time1))), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasId"), vf.createLiteral("id_1")), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time2))), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasId"), vf.createLiteral("id_2")), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time3))), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasId"), vf.createLiteral("id_3")));
try (FluoClient fluo = FluoClientFactory.getFluoClient(conf.getFluoAppName(), Optional.of(conf.getFluoTableName()), conf)) {
final Connector connector = ConfigUtils.getConnector(conf);
final PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(connector, conf.getTablePrefix());
final CreatePeriodicQuery periodicQuery = new CreatePeriodicQuery(fluo, storage);
final String id = FluoQueryUtils.convertFluoQueryIdToPcjId(periodicQuery.createPeriodicQuery(sparql, registrar).getQueryId());
addData(statements);
app.start();
final Multimap<Long, BindingSet> expected = HashMultimap.create();
try (KafkaConsumer<String, BindingSet> consumer = new KafkaConsumer<>(kafkaProps, new StringDeserializer(), new BindingSetSerDe())) {
consumer.subscribe(Arrays.asList(id));
final long end = System.currentTimeMillis() + 4 * periodMult * 1000;
long lastBinId = 0L;
long binId = 0L;
final List<Long> ids = new ArrayList<>();
while (System.currentTimeMillis() < end) {
final ConsumerRecords<String, BindingSet> records = consumer.poll(periodMult * 1000);
for (final ConsumerRecord<String, BindingSet> record : records) {
final BindingSet result = record.value();
binId = Long.parseLong(result.getBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID).getValue().stringValue());
if (lastBinId != binId) {
lastBinId = binId;
ids.add(binId);
}
expected.put(binId, result);
}
}
Assert.assertEquals(3, expected.asMap().size());
int i = 0;
for (final Long ident : ids) {
Assert.assertEquals(1, expected.get(ident).size());
final BindingSet bs = expected.get(ident).iterator().next();
final Value val = bs.getValue("total");
final int total = Integer.parseInt(val.stringValue());
Assert.assertEquals(3 - i, total);
i++;
}
}
final Set<BindingSet> expectedResults = new HashSet<>();
try (CloseableIterator<BindingSet> results = storage.listResults(id, Optional.empty())) {
results.forEachRemaining(x -> expectedResults.add(x));
Assert.assertEquals(0, expectedResults.size());
}
}
}
use of org.openrdf.model.Statement in project incubator-rya by apache.
the class PeriodicNotificationApplicationIT method periodicApplicationTest.
@Test
public void periodicApplicationTest() throws Exception {
final String sparql = // n
"prefix function: <http://org.apache.rya/function#> " + // n
"prefix time: <http://www.w3.org/2006/time#> " + // n
"select ?obs ?id where {" + // n
"Filter(function:periodic(?time, 1, .25, time:minutes)) " + // n
"?obs <uri:hasTime> ?time. " + // n
"?obs <uri:hasId> ?id } ";
// make data
final int periodMult = 15;
final ValueFactory vf = new ValueFactoryImpl();
final DatatypeFactory dtf = DatatypeFactory.newInstance();
// results more predictable
while (System.currentTimeMillis() % (periodMult * 1000) > 500) {
;
}
final ZonedDateTime time = ZonedDateTime.now();
final ZonedDateTime zTime1 = time.minusSeconds(2 * periodMult);
final String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime2 = zTime1.minusSeconds(periodMult);
final String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime3 = zTime2.minusSeconds(periodMult);
final String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
final Collection<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time1))), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasId"), vf.createLiteral("id_1")), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time2))), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasId"), vf.createLiteral("id_2")), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time3))), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasId"), vf.createLiteral("id_3")));
try (FluoClient fluo = FluoClientFactory.getFluoClient(conf.getFluoAppName(), Optional.of(conf.getFluoTableName()), conf)) {
final Connector connector = ConfigUtils.getConnector(conf);
final PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(connector, conf.getTablePrefix());
final CreatePeriodicQuery periodicQuery = new CreatePeriodicQuery(fluo, storage);
final String id = FluoQueryUtils.convertFluoQueryIdToPcjId(periodicQuery.createPeriodicQuery(sparql, registrar).getQueryId());
addData(statements);
app.start();
final Multimap<Long, BindingSet> expected = HashMultimap.create();
try (KafkaConsumer<String, BindingSet> consumer = new KafkaConsumer<>(kafkaProps, new StringDeserializer(), new BindingSetSerDe())) {
consumer.subscribe(Arrays.asList(id));
final long end = System.currentTimeMillis() + 4 * periodMult * 1000;
long lastBinId = 0L;
long binId = 0L;
final List<Long> ids = new ArrayList<>();
while (System.currentTimeMillis() < end) {
final ConsumerRecords<String, BindingSet> records = consumer.poll(periodMult * 1000);
for (final ConsumerRecord<String, BindingSet> record : records) {
final BindingSet result = record.value();
binId = Long.parseLong(result.getBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID).getValue().stringValue());
if (lastBinId != binId) {
lastBinId = binId;
ids.add(binId);
}
expected.put(binId, result);
}
}
Assert.assertEquals(3, expected.asMap().size());
int i = 0;
for (final Long ident : ids) {
Assert.assertEquals(3 - i, expected.get(ident).size());
i++;
}
}
final Set<BindingSet> expectedResults = new HashSet<>();
try (CloseableIterator<BindingSet> results = storage.listResults(id, Optional.empty())) {
results.forEachRemaining(x -> expectedResults.add(x));
Assert.assertEquals(0, expectedResults.size());
}
}
}
use of org.openrdf.model.Statement in project incubator-rya by apache.
the class PeriodicNotificationApplicationIT method periodicApplicationWithAggAndGroupByTest.
@Test
public void periodicApplicationWithAggAndGroupByTest() throws Exception {
final String sparql = // n
"prefix function: <http://org.apache.rya/function#> " + // n
"prefix time: <http://www.w3.org/2006/time#> " + // n
"select ?type (count(?obs) as ?total) where {" + // n
"Filter(function:periodic(?time, 1, .25, time:minutes)) " + // n
"?obs <uri:hasTime> ?time. " + // n
"?obs <uri:hasObsType> ?type } group by ?type";
// make data
final int periodMult = 15;
final ValueFactory vf = new ValueFactoryImpl();
final DatatypeFactory dtf = DatatypeFactory.newInstance();
// results more predictable
while (System.currentTimeMillis() % (periodMult * 1000) > 500) {
;
}
final ZonedDateTime time = ZonedDateTime.now();
final ZonedDateTime zTime1 = time.minusSeconds(2 * periodMult);
final String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime2 = zTime1.minusSeconds(periodMult);
final String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime3 = zTime2.minusSeconds(periodMult);
final String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
final Collection<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time1))), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasObsType"), vf.createLiteral("ship")), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time1))), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasObsType"), vf.createLiteral("airplane")), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time2))), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasObsType"), vf.createLiteral("ship")), vf.createStatement(vf.createURI("urn:obs_4"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time2))), vf.createStatement(vf.createURI("urn:obs_4"), vf.createURI("uri:hasObsType"), vf.createLiteral("airplane")), vf.createStatement(vf.createURI("urn:obs_5"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time3))), vf.createStatement(vf.createURI("urn:obs_5"), vf.createURI("uri:hasObsType"), vf.createLiteral("automobile")));
try (FluoClient fluo = FluoClientFactory.getFluoClient(conf.getFluoAppName(), Optional.of(conf.getFluoTableName()), conf)) {
final Connector connector = ConfigUtils.getConnector(conf);
final PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(connector, conf.getTablePrefix());
final CreatePeriodicQuery periodicQuery = new CreatePeriodicQuery(fluo, storage);
final String id = FluoQueryUtils.convertFluoQueryIdToPcjId(periodicQuery.createPeriodicQuery(sparql, registrar).getQueryId());
addData(statements);
app.start();
final Multimap<Long, BindingSet> actual = HashMultimap.create();
try (KafkaConsumer<String, BindingSet> consumer = new KafkaConsumer<>(kafkaProps, new StringDeserializer(), new BindingSetSerDe())) {
consumer.subscribe(Arrays.asList(id));
final long end = System.currentTimeMillis() + 4 * periodMult * 1000;
long lastBinId = 0L;
long binId = 0L;
final List<Long> ids = new ArrayList<>();
while (System.currentTimeMillis() < end) {
final ConsumerRecords<String, BindingSet> records = consumer.poll(periodMult * 1000);
for (final ConsumerRecord<String, BindingSet> record : records) {
final BindingSet result = record.value();
binId = Long.parseLong(result.getBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID).getValue().stringValue());
if (lastBinId != binId) {
lastBinId = binId;
ids.add(binId);
}
actual.put(binId, result);
}
}
final Map<Long, Set<BindingSet>> expected = new HashMap<>();
final Set<BindingSet> expected1 = new HashSet<>();
final QueryBindingSet bs1 = new QueryBindingSet();
bs1.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(0)));
bs1.addBinding("total", new LiteralImpl("2", XMLSchema.INTEGER));
bs1.addBinding("type", vf.createLiteral("airplane"));
final QueryBindingSet bs2 = new QueryBindingSet();
bs2.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(0)));
bs2.addBinding("total", new LiteralImpl("2", XMLSchema.INTEGER));
bs2.addBinding("type", vf.createLiteral("ship"));
final QueryBindingSet bs3 = new QueryBindingSet();
bs3.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(0)));
bs3.addBinding("total", new LiteralImpl("1", XMLSchema.INTEGER));
bs3.addBinding("type", vf.createLiteral("automobile"));
expected1.add(bs1);
expected1.add(bs2);
expected1.add(bs3);
final Set<BindingSet> expected2 = new HashSet<>();
final QueryBindingSet bs4 = new QueryBindingSet();
bs4.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(1)));
bs4.addBinding("total", new LiteralImpl("2", XMLSchema.INTEGER));
bs4.addBinding("type", vf.createLiteral("airplane"));
final QueryBindingSet bs5 = new QueryBindingSet();
bs5.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(1)));
bs5.addBinding("total", new LiteralImpl("2", XMLSchema.INTEGER));
bs5.addBinding("type", vf.createLiteral("ship"));
expected2.add(bs4);
expected2.add(bs5);
final Set<BindingSet> expected3 = new HashSet<>();
final QueryBindingSet bs6 = new QueryBindingSet();
bs6.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(2)));
bs6.addBinding("total", new LiteralImpl("1", XMLSchema.INTEGER));
bs6.addBinding("type", vf.createLiteral("ship"));
final QueryBindingSet bs7 = new QueryBindingSet();
bs7.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(2)));
bs7.addBinding("total", new LiteralImpl("1", XMLSchema.INTEGER));
bs7.addBinding("type", vf.createLiteral("airplane"));
expected3.add(bs6);
expected3.add(bs7);
expected.put(ids.get(0), expected1);
expected.put(ids.get(1), expected2);
expected.put(ids.get(2), expected3);
Assert.assertEquals(3, actual.asMap().size());
for (final Long ident : ids) {
Assert.assertEquals(expected.get(ident), actual.get(ident));
}
}
final Set<BindingSet> expectedResults = new HashSet<>();
try (CloseableIterator<BindingSet> results = storage.listResults(id, Optional.empty())) {
results.forEachRemaining(x -> expectedResults.add(x));
Assert.assertEquals(0, expectedResults.size());
}
}
}
use of org.openrdf.model.Statement in project incubator-rya by apache.
the class ConformanceTest method run.
@Override
public int run(final String[] args) throws Exception {
// Validate command
if (args.length < 1 || args.length > 2) {
System.out.println("Usage:\n");
System.out.println("\tConformanceTest [configuration options] " + "<test-file> <temp-dir>\n");
System.out.println("to load test data from an RDF file " + "(configuration property " + MRUtils.FORMAT_PROP + " specifies the format, default RDF/XML); or\n");
System.out.println("\tConformanceTest [configuration options] <temp-dir>\n");
System.out.println("to load test data from a Rya instance (specified " + "using standard configuration properties).\n");
System.out.println("For each test given, run the reasoner over the " + "premise ontology using a temporary Mini Accumulo instance " + "at <temp-dir>, then report conformance results.");
System.exit(1);
}
final Set<Value> conformanceTestURIs = new HashSet<>();
Collection<OwlTest> conformanceTests = new LinkedList<>();
final List<OwlTest> successes = new LinkedList<>();
final List<OwlTest> failures = new LinkedList<>();
final Configuration conf = getConf();
Repository repo;
File workingDir;
// If tests are in a file, stick them in a repository for querying
if (args.length == 2) {
workingDir = new File(PathUtils.clean(args[1]));
RDFFormat inputFormat = RDFFormat.RDFXML;
final String formatString = conf.get(MRUtils.FORMAT_PROP);
if (formatString != null) {
inputFormat = RDFFormat.valueOf(formatString);
}
repo = new SailRepository(new MemoryStore());
repo.initialize();
final RepositoryConnection conn = repo.getConnection();
FileInputStream fileInput = new FileInputStream(PathUtils.clean(args[0]));
conn.add(fileInput, "", inputFormat);
fileInput.close();
conn.close();
} else // Otherwise, get a Rya repository
{
workingDir = new File(PathUtils.clean(args[0]));
repo = MRReasoningUtils.getRepository(conf);
repo.initialize();
}
// Query for the tests we're interested in
final RepositoryConnection conn = repo.getConnection();
conformanceTestURIs.addAll(getTestURIs(conn, TEST_INCONSISTENCY));
conformanceTestURIs.addAll(getTestURIs(conn, TEST_CONSISTENCY));
conformanceTestURIs.addAll(getTestURIs(conn, TEST_ENTAILMENT));
conformanceTestURIs.addAll(getTestURIs(conn, TEST_NONENTAILMENT));
conformanceTests = getTests(conn, conformanceTestURIs);
conn.close();
repo.shutDown();
// Set up a MiniAccumulo cluster and set up conf to connect to it
final String username = "root";
final String password = "root";
final MiniAccumuloCluster mini = new MiniAccumuloCluster(workingDir, password);
mini.start();
conf.set(MRUtils.AC_INSTANCE_PROP, mini.getInstanceName());
conf.set(MRUtils.AC_ZK_PROP, mini.getZooKeepers());
conf.set(MRUtils.AC_USERNAME_PROP, username);
conf.set(MRUtils.AC_PWD_PROP, password);
conf.setBoolean(MRUtils.AC_MOCK_PROP, false);
conf.set(MRUtils.TABLE_PREFIX_PROPERTY, "temp_");
// Run the conformance tests
int result;
for (final OwlTest test : conformanceTests) {
System.out.println(test.uri);
result = runTest(conf, args, test);
if (result != 0) {
return result;
}
if (test.success) {
successes.add(test);
System.out.println("(SUCCESS)");
} else {
failures.add(test);
System.out.println("(FAIL)");
}
}
mini.stop();
System.out.println("\n" + successes.size() + " successful tests:");
for (final OwlTest test : successes) {
System.out.println("\t[SUCCESS] " + test.type() + " " + test.name);
}
System.out.println("\n" + failures.size() + " failed tests:");
for (final OwlTest test : failures) {
System.out.println("\t[FAIL] " + test.type() + " " + test.name);
System.out.println("\t\t(" + test.description + ")");
for (final Statement triple : test.error) {
if (test.types.contains(TEST_ENTAILMENT)) {
System.out.println("\t\tExpected: " + triple);
} else if (test.types.contains(TEST_NONENTAILMENT)) {
System.out.println("\t\tUnexpected: " + triple);
}
}
}
return 0;
}
Aggregations