use of org.openrdf.query.impl.MapBindingSet in project incubator-rya by apache.
the class QueryIT method withCustomFilters.
@Test
public void withCustomFilters() throws Exception {
final String sparql = "prefix ryafunc: <tag:rya.apache.org,2017:function#> " + "SELECT ?name ?age " + "{ " + "FILTER( ryafunc:isTeen(?age) ) . " + "?name <http://hasAge> ?age . " + "?name <http://playsSport> \"Soccer\" . " + "}";
// Register a custom Filter.
final Function fooFunction = new Function() {
@Override
public String getURI() {
return "tag:rya.apache.org,2017:function#isTeen";
}
static final int TEEN_THRESHOLD = 20;
@Override
public Value evaluate(final ValueFactory valueFactory, final Value... args) throws ValueExprEvaluationException {
if (args.length != 1) {
throw new ValueExprEvaluationException("isTeen() requires exactly 1 argument, got " + args.length);
}
if (args[0] instanceof Literal) {
final Literal literal = (Literal) args[0];
final URI datatype = literal.getDatatype();
// ABS function accepts only numeric literals
if (datatype != null && XMLDatatypeUtil.isNumericDatatype(datatype)) {
if (XMLDatatypeUtil.isDecimalDatatype(datatype)) {
final BigDecimal bigValue = literal.decimalValue();
return BooleanLiteralImpl.valueOf(bigValue.compareTo(new BigDecimal(TEEN_THRESHOLD)) < 0);
} else if (XMLDatatypeUtil.isFloatingPointDatatype(datatype)) {
final double doubleValue = literal.doubleValue();
return BooleanLiteralImpl.valueOf(doubleValue < TEEN_THRESHOLD);
} else {
throw new ValueExprEvaluationException("unexpected datatype (expect decimal/int or floating) for function operand: " + args[0]);
}
} else {
throw new ValueExprEvaluationException("unexpected input value (expect non-null and numeric) for function: " + args[0]);
}
} else {
throw new ValueExprEvaluationException("unexpected input value (expect literal) for function: " + args[0]);
}
}
};
// Add our new function to the registry
FunctionRegistry.getInstance().add(fooFunction);
// Create the Statements that will be loaded into Rya.
final ValueFactory vf = new ValueFactoryImpl();
final Collection<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createURI("http://Alice"), vf.createURI("http://hasAge"), vf.createLiteral(18)), vf.createStatement(vf.createURI("http://Bob"), vf.createURI("http://hasAge"), vf.createLiteral(30)), vf.createStatement(vf.createURI("http://Charlie"), vf.createURI("http://hasAge"), vf.createLiteral(14)), vf.createStatement(vf.createURI("http://David"), vf.createURI("http://hasAge"), vf.createLiteral(16)), vf.createStatement(vf.createURI("http://Eve"), vf.createURI("http://hasAge"), vf.createLiteral(35)), vf.createStatement(vf.createURI("http://Alice"), vf.createURI("http://playsSport"), vf.createLiteral("Soccer")), vf.createStatement(vf.createURI("http://Bob"), vf.createURI("http://playsSport"), vf.createLiteral("Soccer")), vf.createStatement(vf.createURI("http://Charlie"), vf.createURI("http://playsSport"), vf.createLiteral("Basketball")), vf.createStatement(vf.createURI("http://Charlie"), vf.createURI("http://playsSport"), vf.createLiteral("Soccer")), vf.createStatement(vf.createURI("http://David"), vf.createURI("http://playsSport"), vf.createLiteral("Basketball")));
// Create the expected results of the SPARQL query once the PCJ has been computed.
final Set<BindingSet> expectedResults = new HashSet<>();
MapBindingSet bs = new MapBindingSet();
bs.addBinding("name", vf.createURI("http://Alice"));
bs.addBinding("age", vf.createLiteral("18", XMLSchema.INTEGER));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("name", vf.createURI("http://Charlie"));
bs.addBinding("age", vf.createLiteral("14", XMLSchema.INTEGER));
expectedResults.add(bs);
// Verify the end results of the query match the expected results.
runTest(sparql, statements, expectedResults, ExportStrategy.RYA);
}
use of org.openrdf.query.impl.MapBindingSet in project incubator-rya by apache.
the class QueryIT method dateTimeWithinNow.
@Test
public void dateTimeWithinNow() throws Exception {
final ValueFactory vf = new ValueFactoryImpl();
final DatatypeFactory dtf = DatatypeFactory.newInstance();
FunctionRegistry.getInstance().add(new DateTimeWithinPeriod());
final String sparql = "PREFIX fn: <" + FN.NAMESPACE + ">" + "SELECT ?event ?startTime WHERE { ?event <uri:startTime> ?startTime. " + "FILTER(fn:dateTimeWithin(?startTime, NOW(), 30, <" + OWLTime.SECONDS_URI + "> ))}";
final ZonedDateTime zTime = ZonedDateTime.now();
final String time = zTime.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime1 = zTime.minusSeconds(30);
final String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
final Literal lit = vf.createLiteral(dtf.newXMLGregorianCalendar(time));
final Literal lit1 = vf.createLiteral(dtf.newXMLGregorianCalendar(time1));
// Create the Statements that will be loaded into Rya.
final Collection<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createURI("uri:event1"), vf.createURI("uri:startTime"), lit), vf.createStatement(vf.createURI("uri:event2"), vf.createURI("uri:startTime"), lit1));
// Create the expected results of the SPARQL query once the PCJ has been computed.
final Set<BindingSet> expectedResults = new HashSet<>();
final MapBindingSet bs = new MapBindingSet();
bs.addBinding("event", vf.createURI("uri:event1"));
bs.addBinding("startTime", lit);
expectedResults.add(bs);
// Verify the end results of the query match the expected results.
runTest(sparql, statements, expectedResults, ExportStrategy.RYA);
}
use of org.openrdf.query.impl.MapBindingSet in project incubator-rya by apache.
the class RyaInputIncrementalUpdateIT method historicAndStreamMultiVariables.
@Test
public void historicAndStreamMultiVariables() throws Exception {
// A query that finds people who talk to other people and work at Chipotle.
final String sparql = "SELECT ?x ?y " + "WHERE { " + "?x <http://talksTo> ?y. " + "?x <http://worksAt> <http://Chipotle>." + "}";
// Triples that are loaded into Rya before the PCJ is created.
final ValueFactory vf = new ValueFactoryImpl();
final Set<Statement> historicTriples = Sets.newHashSet(vf.createStatement(vf.createURI("http://Alice"), vf.createURI("http://talksTo"), vf.createURI("http://Eve")), vf.createStatement(vf.createURI("http://Alice"), vf.createURI("http://worksAt"), vf.createURI("http://Chipotle")), vf.createStatement(vf.createURI("http://Joe"), vf.createURI("http://worksAt"), vf.createURI("http://Chipotle")));
// Triples that will be streamed into Fluo after the PCJ has been
final Set<Statement> streamedTriples = Sets.newHashSet(vf.createStatement(vf.createURI("http://Frank"), vf.createURI("http://talksTo"), vf.createURI("http://Betty")), vf.createStatement(vf.createURI("http://Joe"), vf.createURI("http://talksTo"), vf.createURI("http://Alice")), vf.createStatement(vf.createURI("http://Frank"), vf.createURI("http://worksAt"), vf.createURI("http://Chipotle")));
// Load the historic data into Rya.
final SailRepositoryConnection ryaConn = super.getRyaSailRepository().getConnection();
for (final Statement triple : historicTriples) {
ryaConn.add(triple);
}
// Create the PCJ table.
final Connector accumuloConn = super.getAccumuloConnector();
final PrecomputedJoinStorage pcjStorage = new AccumuloPcjStorage(accumuloConn, getRyaInstanceName());
final String pcjId = pcjStorage.createPcj(sparql);
try (FluoClient fluoClient = FluoFactory.newClient(super.getFluoConfiguration())) {
// Tell the Fluo app to maintain the PCJ.
new CreateFluoPcj().withRyaIntegration(pcjId, pcjStorage, fluoClient, accumuloConn, getRyaInstanceName());
super.getMiniFluo().waitForObservers();
// Load the streaming data into Rya.
for (final Statement triple : streamedTriples) {
ryaConn.add(triple);
}
// Ensure Alice is a match.
super.getMiniFluo().waitForObservers();
final Set<BindingSet> expected = new HashSet<>();
MapBindingSet bs = new MapBindingSet();
bs.addBinding("x", vf.createURI("http://Alice"));
bs.addBinding("y", vf.createURI("http://Eve"));
expected.add(bs);
bs = new MapBindingSet();
bs.addBinding("x", vf.createURI("http://Frank"));
bs.addBinding("y", vf.createURI("http://Betty"));
expected.add(bs);
bs = new MapBindingSet();
bs.addBinding("x", vf.createURI("http://Joe"));
bs.addBinding("y", vf.createURI("http://Alice"));
expected.add(bs);
final Set<BindingSet> results = new HashSet<>();
try (CloseableIterator<BindingSet> resultIt = pcjStorage.listResults(pcjId)) {
while (resultIt.hasNext()) {
results.add(resultIt.next());
}
}
assertEquals(expected, results);
}
}
use of org.openrdf.query.impl.MapBindingSet in project incubator-rya by apache.
the class BindingSetUtil method keepBindings.
/**
* Create a new {@link BindingSet} that only includes the bindings whose names appear within the {@code variableOrder}.
* If no binding is found for a variable, then that binding is just omitted from the resulting object.
*
* @param variableOrder - Defines which bindings will be kept. (not null)
* @param bindingSet - Contains the source {@link Binding}s. (not null)
* @return A new {@link BindingSet} containing only the specified bindings.
*/
public static BindingSet keepBindings(final VariableOrder variableOrder, final BindingSet bindingSet) {
requireNonNull(variableOrder);
requireNonNull(bindingSet);
final MapBindingSet result = new MapBindingSet();
for (final String bindingName : variableOrder) {
if (bindingSet.hasBinding(bindingName)) {
final Binding binding = bindingSet.getBinding(bindingName);
result.addBinding(binding);
}
}
return result;
}
use of org.openrdf.query.impl.MapBindingSet in project incubator-rya by apache.
the class PeriodicNotificationBinPrunerIT method periodicPrunerTest.
@Test
public void periodicPrunerTest() throws Exception {
String sparql = // n
"prefix function: <http://org.apache.rya/function#> " + // n
"prefix time: <http://www.w3.org/2006/time#> " + // n
"select ?id (count(?obs) as ?total) where {" + // n
"Filter(function:periodic(?time, 2, .5, time:hours)) " + // n
"?obs <uri:hasTime> ?time. " + // n
"?obs <uri:hasId> ?id } group by ?id";
FluoClient fluo = new FluoClientImpl(super.getFluoConfiguration());
// initialize resources and create pcj
PeriodicQueryResultStorage periodicStorage = new AccumuloPeriodicQueryResultStorage(super.getAccumuloConnector(), getRyaInstanceName());
CreatePeriodicQuery createPeriodicQuery = new CreatePeriodicQuery(fluo, periodicStorage);
String queryId = FluoQueryUtils.convertFluoQueryIdToPcjId(createPeriodicQuery.createPeriodicQuery(sparql).getQueryId());
// create statements to ingest into Fluo
final ValueFactory vf = new ValueFactoryImpl();
final DatatypeFactory dtf = DatatypeFactory.newInstance();
ZonedDateTime time = ZonedDateTime.now();
long currentTime = time.toInstant().toEpochMilli();
ZonedDateTime zTime1 = time.minusMinutes(30);
String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
ZonedDateTime zTime2 = zTime1.minusMinutes(30);
String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
ZonedDateTime zTime3 = zTime2.minusMinutes(30);
String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
ZonedDateTime zTime4 = zTime3.minusMinutes(30);
String time4 = zTime4.format(DateTimeFormatter.ISO_INSTANT);
final Collection<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time1))), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasId"), vf.createLiteral("id_1")), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time2))), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasId"), vf.createLiteral("id_2")), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time3))), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasId"), vf.createLiteral("id_3")), vf.createStatement(vf.createURI("urn:obs_4"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time4))), vf.createStatement(vf.createURI("urn:obs_4"), vf.createURI("uri:hasId"), vf.createLiteral("id_4")), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time4))), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasId"), vf.createLiteral("id_1")), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time3))), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasId"), vf.createLiteral("id_2")));
// add statements to Fluo
InsertTriples inserter = new InsertTriples();
statements.forEach(x -> inserter.insert(fluo, RdfToRyaConversions.convertStatement(x)));
super.getMiniFluo().waitForObservers();
// FluoITHelper.printFluoTable(fluo);
// Create the expected results of the SPARQL query once the PCJ has been
// computed.
final Set<BindingSet> expected1 = new HashSet<>();
final Set<BindingSet> expected2 = new HashSet<>();
final Set<BindingSet> expected3 = new HashSet<>();
final Set<BindingSet> expected4 = new HashSet<>();
long period = 1800000;
long binId = (currentTime / period) * period;
long bin1 = binId;
long bin2 = binId + period;
long bin3 = binId + 2 * period;
long bin4 = binId + 3 * period;
MapBindingSet bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("2", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_1", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin1));
expected1.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("2", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_2", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin1));
expected1.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_3", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin1));
expected1.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_4", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin1));
expected1.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_1", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin2));
expected2.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("2", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_2", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin2));
expected2.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_3", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin2));
expected2.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_1", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin3));
expected3.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_2", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin3));
expected3.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_1", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin4));
expected4.add(bs);
// make sure that expected and actual results align after ingest
compareResults(periodicStorage, queryId, bin1, expected1);
compareResults(periodicStorage, queryId, bin2, expected2);
compareResults(periodicStorage, queryId, bin3, expected3);
compareResults(periodicStorage, queryId, bin4, expected4);
BlockingQueue<NodeBin> bins = new LinkedBlockingQueue<>();
PeriodicQueryPrunerExecutor pruner = new PeriodicQueryPrunerExecutor(periodicStorage, fluo, 1, bins);
pruner.start();
bins.add(new NodeBin(queryId, bin1));
bins.add(new NodeBin(queryId, bin2));
bins.add(new NodeBin(queryId, bin3));
bins.add(new NodeBin(queryId, bin4));
Thread.sleep(10000);
compareResults(periodicStorage, queryId, bin1, new HashSet<>());
compareResults(periodicStorage, queryId, bin2, new HashSet<>());
compareResults(periodicStorage, queryId, bin3, new HashSet<>());
compareResults(periodicStorage, queryId, bin4, new HashSet<>());
compareFluoCounts(fluo, queryId, bin1);
compareFluoCounts(fluo, queryId, bin2);
compareFluoCounts(fluo, queryId, bin3);
compareFluoCounts(fluo, queryId, bin4);
pruner.stop();
}
Aggregations