use of org.openrdf.model.ValueFactory in project incubator-rya by apache.
the class MongoFreeTextIndexerIT method testContextSearch.
@Test
public void testContextSearch() throws Exception {
try (MongoFreeTextIndexer f = new MongoFreeTextIndexer()) {
f.setConf(conf);
f.init();
final ValueFactory vf = new ValueFactoryImpl();
final URI subject = new URIImpl("foo:subj");
final URI predicate = new URIImpl(RDFS.COMMENT.toString());
final Value object = vf.createLiteral("this is a new hat");
final URI context = new URIImpl("foo:context");
final Statement statement = vf.createStatement(subject, predicate, object, context);
f.storeStatement(RdfToRyaConversions.convertStatement(statement));
f.flush();
assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", EMPTY_CONSTRAINTS)));
assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", new StatementConstraints().setContext(context))));
assertEquals(Sets.newHashSet(), getSet(f.queryText("hat", new StatementConstraints().setContext(vf.createURI("foo:context2")))));
}
}
use of org.openrdf.model.ValueFactory in project incubator-rya by apache.
the class RyaDirectExample method testAddAndFreeTextSearchWithPCJ.
private static void testAddAndFreeTextSearchWithPCJ(final SailRepositoryConnection conn) throws Exception {
// add data to the repository using the SailRepository add methods
final ValueFactory f = conn.getValueFactory();
final URI person = f.createURI("http://example.org/ontology/Person");
String uuid;
uuid = "urn:people:alice";
conn.add(f.createURI(uuid), RDF.TYPE, person);
conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Alice Palace Hose", f.createURI("xsd:string")));
uuid = "urn:people:bobss";
conn.add(f.createURI(uuid), RDF.TYPE, person);
conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Bob Snob Hose", "en"));
String queryString;
TupleQuery tupleQuery;
CountingResultHandler tupleHandler;
// ///////////// search for alice
queryString = //
"PREFIX fts: <http://rdf.useekm.com/fts#> " + //
"SELECT ?person ?match ?e ?c ?l ?o " + //
"{" + //
" ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . " + //
" FILTER(fts:text(?match, \"pal*\")) " + //
"}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 1);
// ///////////// search for alice and bob
queryString = //
"PREFIX fts: <http://rdf.useekm.com/fts#> " + //
"SELECT ?person ?match " + //
"{" + //
" ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . " + //
" ?person a <http://example.org/ontology/Person> . " + //
" FILTER(fts:text(?match, \"(alice | bob) *SE\")) " + //
"}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 2);
// ///////////// search for alice and bob
queryString = //
"PREFIX fts: <http://rdf.useekm.com/fts#> " + //
"SELECT ?person ?match " + //
"{" + //
" ?person a <http://example.org/ontology/Person> . " + //
" ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . " + //
" FILTER(fts:text(?match, \"(alice | bob) *SE\")) " + //
" FILTER(fts:text(?match, \"pal*\")) " + //
"}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 1);
// ///////////// search for bob
queryString = //
"PREFIX fts: <http://rdf.useekm.com/fts#> " + //
"SELECT ?person ?match ?e ?c ?l ?o " + //
"{" + //
" ?e a ?c . " + //
" ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . " + //
" ?e <uri:talksTo> ?o . " + //
" ?person a <http://example.org/ontology/Person> . " + //
" ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . " + //
" FILTER(fts:text(?match, \"!alice & hose\")) " + //
"}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 1);
}
use of org.openrdf.model.ValueFactory in project incubator-rya by apache.
the class PeriodicNotificationApplicationIT method periodicApplicationWithAggTest.
@Test
public void periodicApplicationWithAggTest() throws Exception {
final String sparql = // n
"prefix function: <http://org.apache.rya/function#> " + // n
"prefix time: <http://www.w3.org/2006/time#> " + // n
"select (count(?obs) as ?total) where {" + // n
"Filter(function:periodic(?time, 1, .25, time:minutes)) " + // n
"?obs <uri:hasTime> ?time. " + // n
"?obs <uri:hasId> ?id } ";
// make data
final int periodMult = 15;
final ValueFactory vf = new ValueFactoryImpl();
final DatatypeFactory dtf = DatatypeFactory.newInstance();
// results more predictable
while (System.currentTimeMillis() % (periodMult * 1000) > 500) {
;
}
final ZonedDateTime time = ZonedDateTime.now();
final ZonedDateTime zTime1 = time.minusSeconds(2 * periodMult);
final String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime2 = zTime1.minusSeconds(periodMult);
final String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime3 = zTime2.minusSeconds(periodMult);
final String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
final Collection<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time1))), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasId"), vf.createLiteral("id_1")), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time2))), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasId"), vf.createLiteral("id_2")), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time3))), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasId"), vf.createLiteral("id_3")));
try (FluoClient fluo = FluoClientFactory.getFluoClient(conf.getFluoAppName(), Optional.of(conf.getFluoTableName()), conf)) {
final Connector connector = ConfigUtils.getConnector(conf);
final PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(connector, conf.getTablePrefix());
final CreatePeriodicQuery periodicQuery = new CreatePeriodicQuery(fluo, storage);
final String id = FluoQueryUtils.convertFluoQueryIdToPcjId(periodicQuery.createPeriodicQuery(sparql, registrar).getQueryId());
addData(statements);
app.start();
final Multimap<Long, BindingSet> expected = HashMultimap.create();
try (KafkaConsumer<String, BindingSet> consumer = new KafkaConsumer<>(kafkaProps, new StringDeserializer(), new BindingSetSerDe())) {
consumer.subscribe(Arrays.asList(id));
final long end = System.currentTimeMillis() + 4 * periodMult * 1000;
long lastBinId = 0L;
long binId = 0L;
final List<Long> ids = new ArrayList<>();
while (System.currentTimeMillis() < end) {
final ConsumerRecords<String, BindingSet> records = consumer.poll(periodMult * 1000);
for (final ConsumerRecord<String, BindingSet> record : records) {
final BindingSet result = record.value();
binId = Long.parseLong(result.getBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID).getValue().stringValue());
if (lastBinId != binId) {
lastBinId = binId;
ids.add(binId);
}
expected.put(binId, result);
}
}
Assert.assertEquals(3, expected.asMap().size());
int i = 0;
for (final Long ident : ids) {
Assert.assertEquals(1, expected.get(ident).size());
final BindingSet bs = expected.get(ident).iterator().next();
final Value val = bs.getValue("total");
final int total = Integer.parseInt(val.stringValue());
Assert.assertEquals(3 - i, total);
i++;
}
}
final Set<BindingSet> expectedResults = new HashSet<>();
try (CloseableIterator<BindingSet> results = storage.listResults(id, Optional.empty())) {
results.forEachRemaining(x -> expectedResults.add(x));
Assert.assertEquals(0, expectedResults.size());
}
}
}
use of org.openrdf.model.ValueFactory in project incubator-rya by apache.
the class PeriodicNotificationApplicationIT method periodicApplicationTest.
@Test
public void periodicApplicationTest() throws Exception {
final String sparql = // n
"prefix function: <http://org.apache.rya/function#> " + // n
"prefix time: <http://www.w3.org/2006/time#> " + // n
"select ?obs ?id where {" + // n
"Filter(function:periodic(?time, 1, .25, time:minutes)) " + // n
"?obs <uri:hasTime> ?time. " + // n
"?obs <uri:hasId> ?id } ";
// make data
final int periodMult = 15;
final ValueFactory vf = new ValueFactoryImpl();
final DatatypeFactory dtf = DatatypeFactory.newInstance();
// results more predictable
while (System.currentTimeMillis() % (periodMult * 1000) > 500) {
;
}
final ZonedDateTime time = ZonedDateTime.now();
final ZonedDateTime zTime1 = time.minusSeconds(2 * periodMult);
final String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime2 = zTime1.minusSeconds(periodMult);
final String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime3 = zTime2.minusSeconds(periodMult);
final String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
final Collection<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time1))), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasId"), vf.createLiteral("id_1")), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time2))), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasId"), vf.createLiteral("id_2")), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time3))), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasId"), vf.createLiteral("id_3")));
try (FluoClient fluo = FluoClientFactory.getFluoClient(conf.getFluoAppName(), Optional.of(conf.getFluoTableName()), conf)) {
final Connector connector = ConfigUtils.getConnector(conf);
final PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(connector, conf.getTablePrefix());
final CreatePeriodicQuery periodicQuery = new CreatePeriodicQuery(fluo, storage);
final String id = FluoQueryUtils.convertFluoQueryIdToPcjId(periodicQuery.createPeriodicQuery(sparql, registrar).getQueryId());
addData(statements);
app.start();
final Multimap<Long, BindingSet> expected = HashMultimap.create();
try (KafkaConsumer<String, BindingSet> consumer = new KafkaConsumer<>(kafkaProps, new StringDeserializer(), new BindingSetSerDe())) {
consumer.subscribe(Arrays.asList(id));
final long end = System.currentTimeMillis() + 4 * periodMult * 1000;
long lastBinId = 0L;
long binId = 0L;
final List<Long> ids = new ArrayList<>();
while (System.currentTimeMillis() < end) {
final ConsumerRecords<String, BindingSet> records = consumer.poll(periodMult * 1000);
for (final ConsumerRecord<String, BindingSet> record : records) {
final BindingSet result = record.value();
binId = Long.parseLong(result.getBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID).getValue().stringValue());
if (lastBinId != binId) {
lastBinId = binId;
ids.add(binId);
}
expected.put(binId, result);
}
}
Assert.assertEquals(3, expected.asMap().size());
int i = 0;
for (final Long ident : ids) {
Assert.assertEquals(3 - i, expected.get(ident).size());
i++;
}
}
final Set<BindingSet> expectedResults = new HashSet<>();
try (CloseableIterator<BindingSet> results = storage.listResults(id, Optional.empty())) {
results.forEachRemaining(x -> expectedResults.add(x));
Assert.assertEquals(0, expectedResults.size());
}
}
}
use of org.openrdf.model.ValueFactory in project incubator-rya by apache.
the class PeriodicNotificationApplicationIT method periodicApplicationWithAggAndGroupByTest.
@Test
public void periodicApplicationWithAggAndGroupByTest() throws Exception {
final String sparql = // n
"prefix function: <http://org.apache.rya/function#> " + // n
"prefix time: <http://www.w3.org/2006/time#> " + // n
"select ?type (count(?obs) as ?total) where {" + // n
"Filter(function:periodic(?time, 1, .25, time:minutes)) " + // n
"?obs <uri:hasTime> ?time. " + // n
"?obs <uri:hasObsType> ?type } group by ?type";
// make data
final int periodMult = 15;
final ValueFactory vf = new ValueFactoryImpl();
final DatatypeFactory dtf = DatatypeFactory.newInstance();
// results more predictable
while (System.currentTimeMillis() % (periodMult * 1000) > 500) {
;
}
final ZonedDateTime time = ZonedDateTime.now();
final ZonedDateTime zTime1 = time.minusSeconds(2 * periodMult);
final String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime2 = zTime1.minusSeconds(periodMult);
final String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime3 = zTime2.minusSeconds(periodMult);
final String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
final Collection<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time1))), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasObsType"), vf.createLiteral("ship")), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time1))), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasObsType"), vf.createLiteral("airplane")), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time2))), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasObsType"), vf.createLiteral("ship")), vf.createStatement(vf.createURI("urn:obs_4"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time2))), vf.createStatement(vf.createURI("urn:obs_4"), vf.createURI("uri:hasObsType"), vf.createLiteral("airplane")), vf.createStatement(vf.createURI("urn:obs_5"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time3))), vf.createStatement(vf.createURI("urn:obs_5"), vf.createURI("uri:hasObsType"), vf.createLiteral("automobile")));
try (FluoClient fluo = FluoClientFactory.getFluoClient(conf.getFluoAppName(), Optional.of(conf.getFluoTableName()), conf)) {
final Connector connector = ConfigUtils.getConnector(conf);
final PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(connector, conf.getTablePrefix());
final CreatePeriodicQuery periodicQuery = new CreatePeriodicQuery(fluo, storage);
final String id = FluoQueryUtils.convertFluoQueryIdToPcjId(periodicQuery.createPeriodicQuery(sparql, registrar).getQueryId());
addData(statements);
app.start();
final Multimap<Long, BindingSet> actual = HashMultimap.create();
try (KafkaConsumer<String, BindingSet> consumer = new KafkaConsumer<>(kafkaProps, new StringDeserializer(), new BindingSetSerDe())) {
consumer.subscribe(Arrays.asList(id));
final long end = System.currentTimeMillis() + 4 * periodMult * 1000;
long lastBinId = 0L;
long binId = 0L;
final List<Long> ids = new ArrayList<>();
while (System.currentTimeMillis() < end) {
final ConsumerRecords<String, BindingSet> records = consumer.poll(periodMult * 1000);
for (final ConsumerRecord<String, BindingSet> record : records) {
final BindingSet result = record.value();
binId = Long.parseLong(result.getBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID).getValue().stringValue());
if (lastBinId != binId) {
lastBinId = binId;
ids.add(binId);
}
actual.put(binId, result);
}
}
final Map<Long, Set<BindingSet>> expected = new HashMap<>();
final Set<BindingSet> expected1 = new HashSet<>();
final QueryBindingSet bs1 = new QueryBindingSet();
bs1.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(0)));
bs1.addBinding("total", new LiteralImpl("2", XMLSchema.INTEGER));
bs1.addBinding("type", vf.createLiteral("airplane"));
final QueryBindingSet bs2 = new QueryBindingSet();
bs2.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(0)));
bs2.addBinding("total", new LiteralImpl("2", XMLSchema.INTEGER));
bs2.addBinding("type", vf.createLiteral("ship"));
final QueryBindingSet bs3 = new QueryBindingSet();
bs3.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(0)));
bs3.addBinding("total", new LiteralImpl("1", XMLSchema.INTEGER));
bs3.addBinding("type", vf.createLiteral("automobile"));
expected1.add(bs1);
expected1.add(bs2);
expected1.add(bs3);
final Set<BindingSet> expected2 = new HashSet<>();
final QueryBindingSet bs4 = new QueryBindingSet();
bs4.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(1)));
bs4.addBinding("total", new LiteralImpl("2", XMLSchema.INTEGER));
bs4.addBinding("type", vf.createLiteral("airplane"));
final QueryBindingSet bs5 = new QueryBindingSet();
bs5.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(1)));
bs5.addBinding("total", new LiteralImpl("2", XMLSchema.INTEGER));
bs5.addBinding("type", vf.createLiteral("ship"));
expected2.add(bs4);
expected2.add(bs5);
final Set<BindingSet> expected3 = new HashSet<>();
final QueryBindingSet bs6 = new QueryBindingSet();
bs6.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(2)));
bs6.addBinding("total", new LiteralImpl("1", XMLSchema.INTEGER));
bs6.addBinding("type", vf.createLiteral("ship"));
final QueryBindingSet bs7 = new QueryBindingSet();
bs7.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(2)));
bs7.addBinding("total", new LiteralImpl("1", XMLSchema.INTEGER));
bs7.addBinding("type", vf.createLiteral("airplane"));
expected3.add(bs6);
expected3.add(bs7);
expected.put(ids.get(0), expected1);
expected.put(ids.get(1), expected2);
expected.put(ids.get(2), expected3);
Assert.assertEquals(3, actual.asMap().size());
for (final Long ident : ids) {
Assert.assertEquals(expected.get(ident), actual.get(ident));
}
}
final Set<BindingSet> expectedResults = new HashSet<>();
try (CloseableIterator<BindingSet> results = storage.listResults(id, Optional.empty())) {
results.forEachRemaining(x -> expectedResults.add(x));
Assert.assertEquals(0, expectedResults.size());
}
}
}
Aggregations