use of org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException in project incubator-rya by apache.
the class AccumuloCreatePCJ method createPCJ.
@Override
public String createPCJ(final String instanceName, final String sparql, Set<ExportStrategy> strategies) throws InstanceDoesNotExistException, RyaClientException {
requireNonNull(instanceName);
requireNonNull(sparql);
final Optional<RyaDetails> ryaDetailsHolder = getInstanceDetails.getDetails(instanceName);
final boolean ryaInstanceExists = ryaDetailsHolder.isPresent();
if (!ryaInstanceExists) {
throw new InstanceDoesNotExistException(String.format("The '%s' instance of Rya does not exist.", instanceName));
}
final PCJIndexDetails pcjIndexDetails = ryaDetailsHolder.get().getPCJIndexDetails();
final boolean pcjIndexingEnabeld = pcjIndexDetails.isEnabled();
if (!pcjIndexingEnabeld) {
throw new RyaClientException(String.format("The '%s' instance of Rya does not have PCJ Indexing enabled.", instanceName));
}
// Create the PCJ table that will receive the index results.
final String pcjId;
try (final PrecomputedJoinStorage pcjStorage = new AccumuloPcjStorage(getConnector(), instanceName)) {
pcjId = pcjStorage.createPcj(sparql);
// If a Fluo application is being used, task it with updating the PCJ.
final Optional<FluoDetails> fluoDetailsHolder = pcjIndexDetails.getFluoDetails();
if (fluoDetailsHolder.isPresent()) {
final String fluoAppName = fluoDetailsHolder.get().getUpdateAppName();
try {
updateFluoApp(instanceName, fluoAppName, pcjId, sparql, strategies);
} catch (RepositoryException | MalformedQueryException | SailException | QueryEvaluationException | PcjException | RyaDAOException e) {
throw new RyaClientException("Problem while initializing the Fluo application with the new PCJ.", e);
} catch (UnsupportedQueryException e) {
throw new RyaClientException("The new PCJ could not be initialized because it either contains an unsupported query node " + "or an invalid ExportStrategy for the given QueryType. Projection queries can be exported to either Rya or Kafka," + "unless they contain an aggregation, in which case they can only be exported to Kafka. Construct queries can be exported" + "to Rya and Kafka, and Periodic queries can only be exported to Rya.");
}
// Update the Rya Details to indicate the PCJ is being updated incrementally.
final RyaDetailsRepository detailsRepo = new AccumuloRyaInstanceDetailsRepository(getConnector(), instanceName);
try {
new RyaDetailsUpdater(detailsRepo).update(new RyaDetailsMutator() {
@Override
public RyaDetails mutate(final RyaDetails originalDetails) throws CouldNotApplyMutationException {
// Update the original PCJ Details to indicate they are incrementally updated.
final PCJDetails originalPCJDetails = originalDetails.getPCJIndexDetails().getPCJDetails().get(pcjId);
final PCJDetails.Builder mutatedPCJDetails = PCJDetails.builder(originalPCJDetails).setUpdateStrategy(PCJUpdateStrategy.INCREMENTAL);
// Replace the old PCJ Details with the updated ones.
final RyaDetails.Builder builder = RyaDetails.builder(originalDetails);
builder.getPCJIndexDetails().addPCJDetails(mutatedPCJDetails);
return builder.build();
}
});
} catch (RyaDetailsRepositoryException | CouldNotApplyMutationException e) {
throw new RyaClientException("Problem while updating the Rya instance's Details to indicate the PCJ is being incrementally updated.", e);
}
}
// Return the ID that was assigned to the PCJ.
return pcjId;
} catch (final PCJStorageException e) {
throw new RyaClientException("Problem while initializing the PCJ table.", e);
}
}
use of org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException in project incubator-rya by apache.
the class CreatePeriodicQuery method createPeriodicQuery.
/**
* Creates a Periodic Query by adding the query to Fluo and using the resulting
* Fluo id to create a {@link PeriodicQueryResultStorage} table. Additionally,
* the associated PeriodicNotification is registered with the Periodic Query Service.
*
* @param sparql - sparql query registered to Fluo whose results are stored in PeriodicQueryResultStorage table
* @param notificationClient - {@link PeriodicNotificationClient} for registering new PeriodicNotifications
* @return FluoQuery indicating the metadata of the registered SPARQL query
*/
public FluoQuery createPeriodicQuery(String sparql, PeriodicNotificationClient notificationClient) throws PeriodicQueryCreationException {
try {
Optional<PeriodicQueryNode> optNode = PeriodicQueryUtil.getPeriodicNode(sparql);
if (optNode.isPresent()) {
PeriodicQueryNode periodicNode = optNode.get();
String pcjId = FluoQueryUtils.createNewPcjId();
// register query with Fluo
CreateFluoPcj createPcj = new CreateFluoPcj();
FluoQuery fluoQuery = createPcj.createPcj(pcjId, sparql, Sets.newHashSet(ExportStrategy.PERIODIC), fluoClient);
// register query with PeriodicResultStorage table
periodicStorage.createPeriodicQuery(pcjId, sparql);
// create notification
PeriodicNotification notification = PeriodicNotification.builder().id(pcjId).period(periodicNode.getPeriod()).timeUnit(periodicNode.getUnit()).build();
// register notification with periodic notification app
notificationClient.addNotification(notification);
return fluoQuery;
} else {
throw new RuntimeException("Invalid PeriodicQuery. Query must possess a PeriodicQuery Filter.");
}
} catch (MalformedQueryException | PeriodicQueryStorageException | UnsupportedQueryException e) {
throw new PeriodicQueryCreationException(e);
}
}
use of org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException in project incubator-rya by apache.
the class CreatePeriodicQuery method createPeriodicQuery.
/**
* Creates a Periodic Query by adding the query to Fluo and using the resulting
* Fluo id to create a {@link PeriodicQueryResultStorage} table.
*
* @param sparql - sparql query registered to Fluo whose results are stored in PeriodicQueryResultStorage table
* @return FluoQuery indicating the metadata of the registered SPARQL query
*/
public FluoQuery createPeriodicQuery(String sparql) throws PeriodicQueryCreationException {
try {
Optional<PeriodicQueryNode> optNode = PeriodicQueryUtil.getPeriodicNode(sparql);
if (optNode.isPresent()) {
String pcjId = FluoQueryUtils.createNewPcjId();
// register query with Fluo
CreateFluoPcj createPcj = new CreateFluoPcj();
FluoQuery fluoQuery = createPcj.createPcj(pcjId, sparql, Sets.newHashSet(ExportStrategy.PERIODIC), fluoClient);
// register query with PeriodicResultStorage table
periodicStorage.createPeriodicQuery(pcjId, sparql);
return fluoQuery;
} else {
throw new RuntimeException("Invalid PeriodicQuery. Query must possess a PeriodicQuery Filter.");
}
} catch (MalformedQueryException | PeriodicQueryStorageException | UnsupportedQueryException e) {
throw new PeriodicQueryCreationException(e);
}
}
use of org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException in project incubator-rya by apache.
the class CreateFluoPcj method makeFluoQuery.
private FluoQuery makeFluoQuery(String sparql, String pcjId, Set<ExportStrategy> strategies) throws MalformedQueryException, UnsupportedQueryException {
String queryId = NodeType.generateNewIdForType(NodeType.QUERY, pcjId);
SparqlFluoQueryBuilder builder = new SparqlFluoQueryBuilder().setExportStrategies(strategies).setFluoQueryId(queryId).setSparql(sparql).setJoinBatchSize(joinBatchSize);
FluoQuery query = builder.build();
if (query.getQueryType() == QueryType.PERIODIC && !Sets.newHashSet(ExportStrategy.PERIODIC).containsAll(strategies)) {
throw new UnsupportedQueryException("Periodic Queries must only utilize the PeriodicExport or the NoOpExport ExportStrategy.");
}
if (query.getQueryType() != QueryType.PERIODIC && strategies.contains(ExportStrategy.PERIODIC)) {
throw new UnsupportedQueryException("Only Periodic Queries can utilize the PeriodicExport ExportStrategy.");
}
return query;
}
use of org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException in project incubator-rya by apache.
the class FluoAndHistoricPcjsDemo method execute.
@Override
public void execute(final MiniAccumuloCluster accumulo, final Connector accumuloConn, final String ryaTablePrefix, final RyaSailRepository ryaRepo, final RepositoryConnection ryaConn, final MiniFluo fluo, final FluoClient fluoClient) throws DemoExecutionException {
log.setLevel(Level.INFO);
// 1. Introduce some RDF Statements that we are going to start with and
// pause so the presenter can introduce this information to the audience.
final Set<RyaStatement> relevantHistoricStatements = Sets.newHashSet(new RyaStatement(eve, talksTo, charlie), new RyaStatement(david, talksTo, alice), new RyaStatement(alice, worksAt, coffeeShop), new RyaStatement(bob, worksAt, coffeeShop));
log.info("We add some Statements that are relevant to the query we will compute:");
prettyLogStatements(relevantHistoricStatements);
waitForEnter();
log.info("We also some more Satements that aren't realted to the query we will compute");
final Set<RyaStatement> otherHistoricStatements = Sets.newHashSet(new RyaStatement(henry, worksAt, burgerShop), new RyaStatement(irene, worksAt, burgerShop), new RyaStatement(justin, worksAt, burgerShop), new RyaStatement(kristi, worksAt, burgerShop), new RyaStatement(luke, worksAt, burgerShop), new RyaStatement(manny, worksAt, cupcakeShop), new RyaStatement(nate, worksAt, cupcakeShop), new RyaStatement(olivia, worksAt, cupcakeShop), new RyaStatement(paul, worksAt, cupcakeShop), new RyaStatement(ross, worksAt, cupcakeShop), new RyaStatement(henry, talksTo, irene), new RyaStatement(henry, talksTo, justin), new RyaStatement(kristi, talksTo, irene), new RyaStatement(luke, talksTo, irene), new RyaStatement(sally, talksTo, paul), new RyaStatement(sally, talksTo, ross), new RyaStatement(sally, talksTo, kristi), new RyaStatement(tim, talksTo, nate), new RyaStatement(tim, talksTo, paul), new RyaStatement(tim, talksTo, kristi));
log.info("Theese statements will also be inserted into the core Rya tables:");
prettyLogStatements(otherHistoricStatements);
waitForEnter();
// 2. Load the statements into the core Rya tables.
log.info("Loading the historic statements into Rya...");
loadDataIntoRya(ryaConn, relevantHistoricStatements);
loadDataIntoRya(ryaConn, otherHistoricStatements);
log.info("");
// 3. Introduce the query that we're going to load into Fluo and pause so that the
// presenter may show what they believe the expected output should be.
final String sparql = "SELECT ?patron ?employee " + "WHERE { " + "?patron <http://talksTo> ?employee. " + "?employee <http://worksAt> <http://CoffeeShop>. " + "}";
log.info("The following SPARQL query will be loaded into the Fluo application for incremental updates:");
prettyLogSparql(sparql);
waitForEnter();
// 4. Write the query to Fluo and import the historic matches. Wait for the app to finish exporting results.
log.info("Telling Fluo to maintain the query and import the historic Statement Pattern matches.");
final PrecomputedJoinStorage pcjStorage = new AccumuloPcjStorage(accumuloConn, ryaTablePrefix);
final String pcjId;
try {
// Create the PCJ Index in Rya.
pcjId = pcjStorage.createPcj(sparql);
// Tell the Fluo app to maintain it.
new CreateFluoPcj().withRyaIntegration(pcjId, pcjStorage, fluoClient, accumuloConn, ryaTablePrefix);
} catch (MalformedQueryException | PcjException | RyaDAOException | UnsupportedQueryException e) {
throw new DemoExecutionException("Error while using Fluo to compute and export historic matches, so the demo can not continue. Exiting.", e);
}
log.info("Waiting for the fluo application to finish exporting the initial results...");
fluo.waitForObservers();
log.info("Historic result exporting finished.");
log.info("");
// 5. Show that the Fluo app exported the results to the PCJ table in Accumulo.
log.info("The following Binding Sets were exported to the PCJ with ID '" + pcjId + "' in Rya:");
try (CloseableIterator<BindingSet> resultsIt = pcjStorage.listResults(pcjId)) {
while (resultsIt.hasNext()) {
log.info(" " + resultsIt.next());
}
} catch (final Exception e) {
throw new DemoExecutionException("Could not fetch the PCJ's reuslts from Accumulo. Exiting.", e);
}
waitForEnter();
// 6. Introduce some new Statements that we will stream into the Fluo app.
final RyaStatement newLeft = new RyaStatement(george, talksTo, frank);
final RyaStatement newRight = new RyaStatement(frank, worksAt, coffeeShop);
final RyaStatement joinLeft = new RyaStatement(eve, talksTo, bob);
final RyaStatement joinRight = new RyaStatement(charlie, worksAt, coffeeShop);
final Set<RyaStatement> relevantstreamedStatements = Sets.newHashSet(newLeft, newRight, joinLeft, joinRight);
log.info("We stream these relevant Statements into Fluo and the core Rya tables:");
log.info(prettyFormat(newLeft) + " - Part of a new result");
log.info(prettyFormat(newRight) + " - Other part of a new result");
log.info(prettyFormat(joinLeft) + " - Joins with a historic <http://talksTo> statement");
log.info(prettyFormat(joinRight) + " - Joins with a historic <http://worksA>t statement");
waitForEnter();
final Set<RyaStatement> otherStreamedStatements = Sets.newHashSet(new RyaStatement(alice, talksTo, tim), new RyaStatement(bob, talksTo, tim), new RyaStatement(charlie, talksTo, tim), new RyaStatement(frank, talksTo, tim), new RyaStatement(david, talksTo, tim), new RyaStatement(eve, talksTo, sally), new RyaStatement(george, talksTo, sally), new RyaStatement(henry, talksTo, sally), new RyaStatement(irene, talksTo, sally), new RyaStatement(justin, talksTo, sally), new RyaStatement(kristi, talksTo, manny), new RyaStatement(luke, talksTo, manny), new RyaStatement(manny, talksTo, paul), new RyaStatement(nate, talksTo, manny), new RyaStatement(olivia, talksTo, manny), new RyaStatement(paul, talksTo, kristi), new RyaStatement(ross, talksTo, kristi), new RyaStatement(sally, talksTo, kristi), new RyaStatement(olivia, talksTo, kristi), new RyaStatement(olivia, talksTo, kristi));
log.info("We also stream these irrelevant Statements into Fluo and the core Rya tables:");
prettyLogStatements(otherStreamedStatements);
waitForEnter();
// 7. Insert the new triples into the core Rya tables and the Fluo app.
loadDataIntoRya(ryaConn, relevantstreamedStatements);
loadDataIntoFluo(fluoClient, relevantstreamedStatements);
log.info("Waiting for the fluo application to finish exporting the newly streamed results...");
fluo.waitForObservers();
log.info("Streamed result exporting finished.");
log.info("");
// 8. Show the new results have been exported to the PCJ table in Accumulo.
log.info("The following Binding Sets were exported to the PCJ with ID '" + pcjId + "' in Rya:");
try (CloseableIterator<BindingSet> resultsIt = pcjStorage.listResults(pcjId)) {
while (resultsIt.hasNext()) {
log.info(" " + resultsIt.next());
}
} catch (final Exception e) {
throw new DemoExecutionException("Could not fetch the PCJ's reuslts from Accumulo. Exiting.", e);
}
log.info("");
}
Aggregations