use of org.openrdf.model.ValueFactory in project incubator-rya by apache.
the class KafkaExportIT method max.
@Test
public void max() throws Exception {
// A query that finds the maximum price for an item within the inventory.
final String sparql = "SELECT (max(?price) as ?maxPrice) { " + "?item <urn:price> ?price . " + "}";
// Create the Statements that will be loaded into Rya.
final ValueFactory vf = new ValueFactoryImpl();
final Collection<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createURI("urn:apple"), vf.createURI("urn:price"), vf.createLiteral(2.50)), vf.createStatement(vf.createURI("urn:gum"), vf.createURI("urn:price"), vf.createLiteral(0.99)), vf.createStatement(vf.createURI("urn:sandwich"), vf.createURI("urn:price"), vf.createLiteral(4.99)));
// Create the PCJ in Fluo and load the statements into Rya.
final String pcjId = loadDataAndCreateQuery(sparql, statements);
// Create the expected results of the SPARQL query once the PCJ has been computed.
final MapBindingSet expectedResult = new MapBindingSet();
expectedResult.addBinding("maxPrice", vf.createLiteral(4.99));
// Ensure the last result matches the expected result.
final VisibilityBindingSet result = readLastResult(pcjId);
assertEquals(expectedResult, result);
}
use of org.openrdf.model.ValueFactory in project incubator-rya by apache.
the class KafkaExportIT method groupBySingleBinding.
@Test
public void groupBySingleBinding() throws Exception {
// A query that groups what is aggregated by one of the keys.
final String sparql = "SELECT ?item (avg(?price) as ?averagePrice) {" + "?item <urn:price> ?price . " + "} " + "GROUP BY ?item";
// Create the Statements that will be loaded into Rya.
final ValueFactory vf = new ValueFactoryImpl();
final Collection<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createURI("urn:apple"), vf.createURI("urn:price"), vf.createLiteral(5.25)), vf.createStatement(vf.createURI("urn:apple"), vf.createURI("urn:price"), vf.createLiteral(7)), vf.createStatement(vf.createURI("urn:apple"), vf.createURI("urn:price"), vf.createLiteral(2.75)), vf.createStatement(vf.createURI("urn:banana"), vf.createURI("urn:price"), vf.createLiteral(2.75)), vf.createStatement(vf.createURI("urn:banana"), vf.createURI("urn:price"), vf.createLiteral(1.99)));
// Create the PCJ in Fluo and load the statements into Rya.
final String pcjId = loadDataAndCreateQuery(sparql, statements);
// Create the expected results of the SPARQL query once the PCJ has been computed.
final Set<VisibilityBindingSet> expectedResults = new HashSet<>();
MapBindingSet bs = new MapBindingSet();
bs.addBinding("item", vf.createURI("urn:apple"));
bs.addBinding("averagePrice", vf.createLiteral("5.0", XMLSchema.DECIMAL));
expectedResults.add(new VisibilityBindingSet(bs));
bs = new MapBindingSet();
bs.addBinding("item", vf.createURI("urn:banana"));
bs.addBinding("averagePrice", vf.createLiteral("2.37", XMLSchema.DECIMAL));
expectedResults.add(new VisibilityBindingSet(bs));
// Verify the end results of the query match the expected results.
final Set<VisibilityBindingSet> results = readGroupedResults(pcjId, new VariableOrder("item"));
assertEquals(expectedResults, results);
}
use of org.openrdf.model.ValueFactory in project incubator-rya by apache.
the class KafkaExportIT method average.
@Test
public void average() throws Exception {
// A query that finds the average price for an item that is in the inventory.
final String sparql = "SELECT (avg(?price) as ?averagePrice) { " + "?item <urn:price> ?price . " + "}";
// Create the Statements that will be loaded into Rya.
final ValueFactory vf = new ValueFactoryImpl();
final Collection<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createURI("urn:apple"), vf.createURI("urn:price"), vf.createLiteral(3)), vf.createStatement(vf.createURI("urn:gum"), vf.createURI("urn:price"), vf.createLiteral(4)), vf.createStatement(vf.createURI("urn:sandwich"), vf.createURI("urn:price"), vf.createLiteral(8)));
// Create the PCJ in Fluo and load the statements into Rya.
final String pcjId = loadDataAndCreateQuery(sparql, statements);
// Create the expected results of the SPARQL query once the PCJ has been computed.
final MapBindingSet expectedResult = new MapBindingSet();
expectedResult.addBinding("averagePrice", vf.createLiteral("5", XMLSchema.DECIMAL));
// Ensure the last result matches the expected result.
final VisibilityBindingSet result = readLastResult(pcjId);
assertEquals(expectedResult, result);
}
use of org.openrdf.model.ValueFactory in project incubator-rya by apache.
the class QueryIT method nestedJoinPeriodicQueryWithAggregationAndGroupBy.
@Test
public void nestedJoinPeriodicQueryWithAggregationAndGroupBy() throws Exception {
final String query = // n
"prefix function: <http://org.apache.rya/function#> " + // n
"prefix time: <http://www.w3.org/2006/time#> " + "select ?location ?total ?population " + "where { Filter(?total > 1)" + "?location <uri:hasPopulation> ?population . {" + // n
"select ?location (count(?obs) as ?total) where {" + // n
"Filter(function:periodic(?time, 2, .5, time:hours)) " + // n
"?obs <uri:hasTime> ?time. " + // n
"?obs <uri:hasLoc> ?location } group by ?location }}";
// Create the Statements that will be loaded into Rya.
final ValueFactory vf = new ValueFactoryImpl();
final DatatypeFactory dtf = DatatypeFactory.newInstance();
final ZonedDateTime time = ZonedDateTime.now();
final long currentTime = time.toInstant().toEpochMilli();
final ZonedDateTime zTime1 = time.minusMinutes(30);
final String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime2 = zTime1.minusMinutes(30);
final String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime3 = zTime2.minusMinutes(30);
final String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime4 = zTime3.minusMinutes(30);
final String time4 = zTime4.format(DateTimeFormatter.ISO_INSTANT);
final Collection<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time1))), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasLoc"), vf.createURI("uri:loc_1")), vf.createStatement(vf.createURI("uri:loc_1"), vf.createURI("uri:hasPopulation"), vf.createLiteral(3500)), vf.createStatement(vf.createURI("uri:loc_2"), vf.createURI("uri:hasPopulation"), vf.createLiteral(8000)), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time2))), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasLoc"), vf.createURI("uri:loc_2")), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time3))), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasLoc"), vf.createURI("uri:loc_3")), vf.createStatement(vf.createURI("urn:obs_4"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time4))), vf.createStatement(vf.createURI("urn:obs_4"), vf.createURI("uri:hasLoc"), vf.createURI("uri:loc_4")), vf.createStatement(vf.createURI("urn:obs_5"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time4))), vf.createStatement(vf.createURI("urn:obs_5"), vf.createURI("uri:hasLoc"), vf.createURI("uri:loc_1")), vf.createStatement(vf.createURI("urn:obs_6"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time3))), vf.createStatement(vf.createURI("urn:obs_6"), vf.createURI("uri:hasLoc"), vf.createURI("uri:loc_2")));
// Create the expected results of the SPARQL query once the PCJ has been computed.
final Set<BindingSet> expectedResults = new HashSet<>();
final long period = 1800000;
final long binId = currentTime / period * period;
MapBindingSet bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("2", XMLSchema.INTEGER));
bs.addBinding("location", vf.createURI("uri:loc_1"));
bs.addBinding("population", vf.createLiteral("3500", XMLSchema.INTEGER));
bs.addBinding("periodicBinId", vf.createLiteral(binId));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("2", XMLSchema.INTEGER));
bs.addBinding("location", vf.createURI("uri:loc_2"));
bs.addBinding("population", vf.createLiteral("8000", XMLSchema.INTEGER));
bs.addBinding("periodicBinId", vf.createLiteral(binId));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("2", XMLSchema.INTEGER));
bs.addBinding("location", vf.createURI("uri:loc_2"));
bs.addBinding("population", vf.createLiteral("8000", XMLSchema.INTEGER));
bs.addBinding("periodicBinId", vf.createLiteral(binId + period));
expectedResults.add(bs);
// Verify the end results of the query match the expected results.
runTest(query, statements, expectedResults, ExportStrategy.PERIODIC);
}
use of org.openrdf.model.ValueFactory in project incubator-rya by apache.
the class QueryIT method periodicQueryTestWithAggregationAndGroupBy.
@Test
public void periodicQueryTestWithAggregationAndGroupBy() throws Exception {
final String query = // n
"prefix function: <http://org.apache.rya/function#> " + // n
"prefix time: <http://www.w3.org/2006/time#> " + // n
"select ?id (count(?obs) as ?total) where {" + // n
"Filter(function:periodic(?time, 2, .5, time:hours)) " + // n
"?obs <uri:hasTime> ?time. " + // n
"?obs <uri:hasId> ?id } group by ?id";
// Create the Statements that will be loaded into Rya.
final ValueFactory vf = new ValueFactoryImpl();
final DatatypeFactory dtf = DatatypeFactory.newInstance();
final ZonedDateTime time = ZonedDateTime.now();
final long currentTime = time.toInstant().toEpochMilli();
final ZonedDateTime zTime1 = time.minusMinutes(30);
final String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime2 = zTime1.minusMinutes(30);
final String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime3 = zTime2.minusMinutes(30);
final String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
final ZonedDateTime zTime4 = zTime3.minusMinutes(30);
final String time4 = zTime4.format(DateTimeFormatter.ISO_INSTANT);
final Collection<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time1))), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasId"), vf.createLiteral("id_1")), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time2))), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasId"), vf.createLiteral("id_2")), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time3))), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasId"), vf.createLiteral("id_3")), vf.createStatement(vf.createURI("urn:obs_4"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time4))), vf.createStatement(vf.createURI("urn:obs_4"), vf.createURI("uri:hasId"), vf.createLiteral("id_4")), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time4))), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasId"), vf.createLiteral("id_1")), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time3))), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasId"), vf.createLiteral("id_2")));
// Create the expected results of the SPARQL query once the PCJ has been computed.
final Set<BindingSet> expectedResults = new HashSet<>();
final long period = 1800000;
final long binId = currentTime / period * period;
MapBindingSet bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("2", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_1", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(binId));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("2", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_2", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(binId));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_3", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(binId));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_4", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(binId));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_1", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(binId + period));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("2", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_2", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(binId + period));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_3", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(binId + period));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_1", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(binId + 2 * period));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_2", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(binId + 2 * period));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_1", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(binId + 3 * period));
expectedResults.add(bs);
// Verify the end results of the query match the expected results.
runTest(query, statements, expectedResults, ExportStrategy.PERIODIC);
}
Aggregations