use of org.apache.fluo.core.client.FluoClientImpl in project incubator-rya by apache.
the class PeriodicNotificationBinPrunerIT method periodicPrunerTest.
@Test
public void periodicPrunerTest() throws Exception {
String sparql = // n
"prefix function: <http://org.apache.rya/function#> " + // n
"prefix time: <http://www.w3.org/2006/time#> " + // n
"select ?id (count(?obs) as ?total) where {" + // n
"Filter(function:periodic(?time, 2, .5, time:hours)) " + // n
"?obs <uri:hasTime> ?time. " + // n
"?obs <uri:hasId> ?id } group by ?id";
FluoClient fluo = new FluoClientImpl(super.getFluoConfiguration());
// initialize resources and create pcj
PeriodicQueryResultStorage periodicStorage = new AccumuloPeriodicQueryResultStorage(super.getAccumuloConnector(), getRyaInstanceName());
CreatePeriodicQuery createPeriodicQuery = new CreatePeriodicQuery(fluo, periodicStorage);
String queryId = FluoQueryUtils.convertFluoQueryIdToPcjId(createPeriodicQuery.createPeriodicQuery(sparql).getQueryId());
// create statements to ingest into Fluo
final ValueFactory vf = new ValueFactoryImpl();
final DatatypeFactory dtf = DatatypeFactory.newInstance();
ZonedDateTime time = ZonedDateTime.now();
long currentTime = time.toInstant().toEpochMilli();
ZonedDateTime zTime1 = time.minusMinutes(30);
String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
ZonedDateTime zTime2 = zTime1.minusMinutes(30);
String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
ZonedDateTime zTime3 = zTime2.minusMinutes(30);
String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
ZonedDateTime zTime4 = zTime3.minusMinutes(30);
String time4 = zTime4.format(DateTimeFormatter.ISO_INSTANT);
final Collection<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time1))), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasId"), vf.createLiteral("id_1")), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time2))), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasId"), vf.createLiteral("id_2")), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time3))), vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasId"), vf.createLiteral("id_3")), vf.createStatement(vf.createURI("urn:obs_4"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time4))), vf.createStatement(vf.createURI("urn:obs_4"), vf.createURI("uri:hasId"), vf.createLiteral("id_4")), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time4))), vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasId"), vf.createLiteral("id_1")), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasTime"), vf.createLiteral(dtf.newXMLGregorianCalendar(time3))), vf.createStatement(vf.createURI("urn:obs_2"), vf.createURI("uri:hasId"), vf.createLiteral("id_2")));
// add statements to Fluo
InsertTriples inserter = new InsertTriples();
statements.forEach(x -> inserter.insert(fluo, RdfToRyaConversions.convertStatement(x)));
super.getMiniFluo().waitForObservers();
// FluoITHelper.printFluoTable(fluo);
// Create the expected results of the SPARQL query once the PCJ has been
// computed.
final Set<BindingSet> expected1 = new HashSet<>();
final Set<BindingSet> expected2 = new HashSet<>();
final Set<BindingSet> expected3 = new HashSet<>();
final Set<BindingSet> expected4 = new HashSet<>();
long period = 1800000;
long binId = (currentTime / period) * period;
long bin1 = binId;
long bin2 = binId + period;
long bin3 = binId + 2 * period;
long bin4 = binId + 3 * period;
MapBindingSet bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("2", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_1", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin1));
expected1.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("2", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_2", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin1));
expected1.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_3", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin1));
expected1.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_4", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin1));
expected1.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_1", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin2));
expected2.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("2", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_2", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin2));
expected2.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_3", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin2));
expected2.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_1", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin3));
expected3.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_2", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin3));
expected3.add(bs);
bs = new MapBindingSet();
bs.addBinding("total", vf.createLiteral("1", XMLSchema.INTEGER));
bs.addBinding("id", vf.createLiteral("id_1", XMLSchema.STRING));
bs.addBinding("periodicBinId", vf.createLiteral(bin4));
expected4.add(bs);
// make sure that expected and actual results align after ingest
compareResults(periodicStorage, queryId, bin1, expected1);
compareResults(periodicStorage, queryId, bin2, expected2);
compareResults(periodicStorage, queryId, bin3, expected3);
compareResults(periodicStorage, queryId, bin4, expected4);
BlockingQueue<NodeBin> bins = new LinkedBlockingQueue<>();
PeriodicQueryPrunerExecutor pruner = new PeriodicQueryPrunerExecutor(periodicStorage, fluo, 1, bins);
pruner.start();
bins.add(new NodeBin(queryId, bin1));
bins.add(new NodeBin(queryId, bin2));
bins.add(new NodeBin(queryId, bin3));
bins.add(new NodeBin(queryId, bin4));
Thread.sleep(10000);
compareResults(periodicStorage, queryId, bin1, new HashSet<>());
compareResults(periodicStorage, queryId, bin2, new HashSet<>());
compareResults(periodicStorage, queryId, bin3, new HashSet<>());
compareResults(periodicStorage, queryId, bin4, new HashSet<>());
compareFluoCounts(fluo, queryId, bin1);
compareFluoCounts(fluo, queryId, bin2);
compareFluoCounts(fluo, queryId, bin3);
compareFluoCounts(fluo, queryId, bin4);
pruner.stop();
}
use of org.apache.fluo.core.client.FluoClientImpl in project incubator-rya by apache.
the class PeriodicNotificationApplicationIT method addData.
private void addData(final Collection<Statement> statements) throws DatatypeConfigurationException {
// add statements to Fluo
try (FluoClient fluo = new FluoClientImpl(getFluoConfiguration())) {
final InsertTriples inserter = new InsertTriples();
statements.forEach(x -> inserter.insert(fluo, RdfToRyaConversions.convertStatement(x)));
getMiniFluo().waitForObservers();
}
}
use of org.apache.fluo.core.client.FluoClientImpl in project incubator-rya by apache.
the class PeriodicNotificationProviderIT method testProvider.
@Test
public void testProvider() throws MalformedQueryException, InterruptedException, UnsupportedQueryException {
String sparql = // n
"prefix function: <http://org.apache.rya/function#> " + // n
"prefix time: <http://www.w3.org/2006/time#> " + // n
"select ?id (count(?obs) as ?total) where {" + // n
"Filter(function:periodic(?time, 1, .25, time:minutes)) " + // n
"?obs <uri:hasTime> ?time. " + // n
"?obs <uri:hasId> ?id } group by ?id";
BlockingQueue<TimestampedNotification> notifications = new LinkedBlockingQueue<>();
PeriodicNotificationCoordinatorExecutor coord = new PeriodicNotificationCoordinatorExecutor(2, notifications);
PeriodicNotificationProvider provider = new PeriodicNotificationProvider();
CreateFluoPcj pcj = new CreateFluoPcj();
String id = null;
try (FluoClient fluo = new FluoClientImpl(getFluoConfiguration())) {
id = pcj.createPcj(FluoQueryUtils.createNewPcjId(), sparql, Sets.newHashSet(), fluo).getQueryId();
provider.processRegisteredNotifications(coord, fluo.newSnapshot());
}
TimestampedNotification notification = notifications.take();
Assert.assertEquals(5000, notification.getInitialDelay());
Assert.assertEquals(15000, notification.getPeriod());
Assert.assertEquals(TimeUnit.MILLISECONDS, notification.getTimeUnit());
Assert.assertEquals(FluoQueryUtils.convertFluoQueryIdToPcjId(id), notification.getId());
}
use of org.apache.fluo.core.client.FluoClientImpl in project incubator-rya by apache.
the class BatchIT method multiJoinBatchIntegrationTest.
@Test
public void multiJoinBatchIntegrationTest() throws Exception {
final String sparql = "SELECT ?subject1 ?subject2 ?object1 ?object2 WHERE { ?subject1 <urn:predicate_1> ?object1; " + " <urn:predicate_2> ?object2 ." + " ?subject2 <urn:predicate_3> ?object2 } ";
try (FluoClient fluoClient = new FluoClientImpl(getFluoConfiguration())) {
RyaURI subj1 = new RyaURI("urn:subject_1");
RyaStatement statement1 = new RyaStatement(subj1, new RyaURI("urn:predicate_1"), null);
RyaStatement statement2 = new RyaStatement(subj1, new RyaURI("urn:predicate_2"), null);
Set<RyaStatement> statements1 = getRyaStatements(statement1, 10);
Set<RyaStatement> statements2 = getRyaStatements(statement2, 10);
RyaURI subj2 = new RyaURI("urn:subject_2");
RyaStatement statement3 = new RyaStatement(subj2, new RyaURI("urn:predicate_3"), null);
Set<RyaStatement> statements3 = getRyaStatements(statement3, 10);
// Create the PCJ table.
final PrecomputedJoinStorage pcjStorage = new AccumuloPcjStorage(getAccumuloConnector(), getRyaInstanceName());
final String pcjId = pcjStorage.createPcj(sparql);
// Tell the Fluo app to maintain the PCJ and sets batch scan size for StatementPatterns to 5 and
// batch size of joins to 5.
String queryId = new CreateFluoPcj(5, 5).withRyaIntegration(pcjId, pcjStorage, fluoClient, getAccumuloConnector(), getRyaInstanceName()).getQueryId();
List<String> ids = getNodeIdStrings(fluoClient, queryId);
// Stream the data into Fluo.
InsertTriples inserter = new InsertTriples();
inserter.insert(fluoClient, statements1, Optional.absent());
inserter.insert(fluoClient, statements2, Optional.absent());
inserter.insert(fluoClient, statements3, Optional.absent());
getMiniFluo().waitForObservers();
verifyCounts(fluoClient, ids, Arrays.asList(100, 100, 100, 100, 10, 10, 10));
}
}
use of org.apache.fluo.core.client.FluoClientImpl in project incubator-rya by apache.
the class QueryIT method runTest.
public void runTest(final String sparql, final Collection<Statement> statements, final Collection<BindingSet> expectedResults, final ExportStrategy strategy) throws Exception {
requireNonNull(sparql);
requireNonNull(statements);
requireNonNull(expectedResults);
// Register the PCJ with Rya.
final Connector accumuloConn = super.getAccumuloConnector();
final RyaClient ryaClient = AccumuloRyaClientFactory.build(createConnectionDetails(), accumuloConn);
switch(strategy) {
case RYA:
ryaClient.getCreatePCJ().createPCJ(getRyaInstanceName(), sparql);
addStatementsAndWait(statements);
// Fetch the value that is stored within the PCJ table.
try (final PrecomputedJoinStorage pcjStorage = new AccumuloPcjStorage(accumuloConn, getRyaInstanceName())) {
final String pcjId = pcjStorage.listPcjs().get(0);
final Set<BindingSet> results = Sets.newHashSet(pcjStorage.listResults(pcjId));
// Ensure the result of the query matches the expected result.
assertEquals(expectedResults, results);
}
break;
case PERIODIC:
final PeriodicQueryResultStorage periodicStorage = new AccumuloPeriodicQueryResultStorage(accumuloConn, getRyaInstanceName());
final String periodicId = periodicStorage.createPeriodicQuery(sparql);
try (FluoClient fluo = new FluoClientImpl(super.getFluoConfiguration())) {
new CreateFluoPcj().createPcj(periodicId, sparql, Sets.newHashSet(ExportStrategy.PERIODIC), fluo);
}
addStatementsAndWait(statements);
final Set<BindingSet> results = Sets.newHashSet();
try (CloseableIterator<BindingSet> resultIter = periodicStorage.listResults(periodicId, Optional.empty())) {
while (resultIter.hasNext()) {
results.add(resultIter.next());
}
}
assertEquals(expectedResults, results);
break;
default:
throw new RuntimeException("Invalid export option");
}
}
Aggregations