use of org.apache.rya.indexing.pcj.fluo.app.query.FluoQuery in project incubator-rya by apache.
the class CreateFluoPcj method makeFluoQuery.
private FluoQuery makeFluoQuery(String sparql, String pcjId, Set<ExportStrategy> strategies) throws MalformedQueryException, UnsupportedQueryException {
String queryId = NodeType.generateNewIdForType(NodeType.QUERY, pcjId);
SparqlFluoQueryBuilder builder = new SparqlFluoQueryBuilder().setExportStrategies(strategies).setFluoQueryId(queryId).setSparql(sparql).setJoinBatchSize(joinBatchSize);
FluoQuery query = builder.build();
if (query.getQueryType() == QueryType.PERIODIC && !Sets.newHashSet(ExportStrategy.PERIODIC).containsAll(strategies)) {
throw new UnsupportedQueryException("Periodic Queries must only utilize the PeriodicExport or the NoOpExport ExportStrategy.");
}
if (query.getQueryType() != QueryType.PERIODIC && strategies.contains(ExportStrategy.PERIODIC)) {
throw new UnsupportedQueryException("Only Periodic Queries can utilize the PeriodicExport ExportStrategy.");
}
return query;
}
use of org.apache.rya.indexing.pcj.fluo.app.query.FluoQuery in project incubator-rya by apache.
the class CreateFluoPcj method withRyaIntegration.
/**
* Tells the Fluo PCJ Updater application to maintain a new PCJ.
* <p>
* This call scans Rya for Statement Pattern matches and inserts them into
* the Fluo application. This method does not verify that a PcjTable with the
* the given pcjId actually exists, so one should verify that the table exists before
* using the Rya ExportStrategy. Results will be exported according to the Set of
* {@link ExportStrategy} enums.
*
* @param pcjId - Identifies the PCJ that will be updated by the Fluo app. (not null)
* @param sparql - sparql query that will registered with Fluo. (not null)
* @param strategies - ExportStrategies used to specify how final results will be handled
* @param fluo - A connection to the Fluo application that updates the PCJ index. (not null)
* @param accumulo - Accumulo connector for connecting with Accumulo
* @param ryaInstance - name of Rya instance to connect to
* @return FluoQuery containing the metadata for the newly registered SPARQL query
* @throws MalformedQueryException The SPARQL query stored for the {@code pcjId} is malformed.
* @throws PcjException The PCJ Metadata for {@code pcjId} could not be read from {@code pcjStorage}.
* @throws RyaDAOException Historic PCJ results could not be loaded because of a problem with {@code rya}.
* @throws UnsupportedQueryException
*/
public FluoQuery withRyaIntegration(final String pcjId, final String sparql, final Set<ExportStrategy> strategies, final FluoClient fluo, final Connector accumulo, final String ryaInstance) throws MalformedQueryException, PcjException, RyaDAOException, UnsupportedQueryException {
requireNonNull(pcjId);
requireNonNull(sparql);
requireNonNull(fluo);
requireNonNull(accumulo);
requireNonNull(ryaInstance);
// Write the SPARQL query's structure to the Fluo Application.
final FluoQuery fluoQuery = createPcj(pcjId, sparql, strategies, fluo);
// import results already ingested into Rya that match query
importHistoricResultsIntoFluo(fluo, fluoQuery, accumulo, ryaInstance);
// return queryId to the caller for later monitoring from the export.
return fluoQuery;
}
use of org.apache.rya.indexing.pcj.fluo.app.query.FluoQuery in project incubator-rya by apache.
the class CreateFluoPcj method withRyaIntegration.
/**
* Tells the Fluo PCJ Updater application to maintain a new PCJ.
* <p>
* This call scans Rya for Statement Pattern matches and inserts them into
* the Fluo application. It is assumed that results for any query registered
* using this method will be exported to Kafka according to the Kafka {@link ExportStrategy}.
*
* @param sparql - sparql query that will registered with Fluo. (not null)
* @param fluo - A connection to the Fluo application that updates the PCJ index. (not null)
* @param accumulo - Accumulo connector for connecting with Accumulo
* @param ryaInstance - Name of Rya instance to connect to
* @return The Fluo application's Query ID of the query that was created.
* @throws MalformedQueryException The SPARQL query stored for the {@code pcjId} is malformed.
* @throws PcjException The PCJ Metadata for {@code pcjId} could not be read from {@code pcjStorage}.
* @throws RyaDAOException Historic PCJ results could not be loaded because of a problem with {@code rya}.
* @throws UnsupportedQueryException
*/
public String withRyaIntegration(final String sparql, final FluoClient fluo, final Connector accumulo, final String ryaInstance) throws MalformedQueryException, PcjException, RyaDAOException, UnsupportedQueryException {
requireNonNull(sparql);
requireNonNull(fluo);
requireNonNull(accumulo);
requireNonNull(ryaInstance);
// Write the SPARQL query's structure to the Fluo Application.
final FluoQuery fluoQuery = createPcj(sparql, fluo);
// import results already ingested into Rya that match query
importHistoricResultsIntoFluo(fluo, fluoQuery, accumulo, ryaInstance);
// return queryId to the caller for later monitoring from the export.
return fluoQuery.getQueryMetadata().getNodeId();
}
use of org.apache.rya.indexing.pcj.fluo.app.query.FluoQuery in project incubator-rya by apache.
the class DeleteFluoPcj method getNodeIds.
/**
* This method retrieves all of the nodeIds that are part of the query with
* specified pcjId.
*
* @param tx - Transaction of a given Fluo table. (not null)
* @param pcjId - Id of query. (not null)
* @return list of Node IDs associated with the query {@code pcjId}.
* @throws UnsupportedQueryException - thrown when Fluo app is unable to read FluoQuery associated
* with given pcjId.
*/
private List<String> getNodeIds(Transaction tx, String pcjId) throws UnsupportedQueryException {
requireNonNull(tx);
requireNonNull(pcjId);
String queryId = NodeType.generateNewIdForType(NodeType.QUERY, pcjId);
FluoQuery fluoQuery = dao.readFluoQuery(tx, queryId);
return FluoQueryUtils.collectNodeIds(fluoQuery);
}
use of org.apache.rya.indexing.pcj.fluo.app.query.FluoQuery in project incubator-rya by apache.
the class QueryReportRenderer method render.
/**
* Pretty render a {@link QueryReport}.
*
* @param queryReport - The report that will be rendered. (not null)
* @return A pretty render of the report.
* @throws Exception Indicates the SPARQL could not be rendered for some reason.
*/
public String render(final QueryReport queryReport) throws Exception {
checkNotNull(queryReport);
final Report.Builder builder = Report.builder();
final FluoQuery metadata = queryReport.getFluoQuery();
QueryMetadata queryMetadata = metadata.getQueryMetadata();
builder.appendItem(new ReportItem(""));
builder.appendItem(new ReportItem("QUERY NODE"));
builder.appendItem(new ReportItem("Node ID", queryMetadata.getNodeId()));
builder.appendItem(new ReportItem("Variable Order", queryMetadata.getVariableOrder().toString()));
builder.appendItem(new ReportItem("SPARQL", queryMetadata.getSparql()));
builder.appendItem(new ReportItem("Child Node ID", queryMetadata.getChildNodeId()));
builder.appendItem(new ReportItem("Count", "" + queryReport.getCount(queryMetadata.getNodeId())));
if (metadata.getQueryType() == QueryType.CONSTRUCT) {
builder.appendItem(new ReportItem(""));
final ConstructQueryMetadata constructMetadata = metadata.getConstructQueryMetadata().get();
builder.appendItem(new ReportItem("CONSTRUCT QUERY NODE"));
builder.appendItem(new ReportItem("Node ID", constructMetadata.getNodeId()));
builder.appendItem(new ReportItem("Variable Order", constructMetadata.getVariableOrder().toString()));
builder.appendItem(new ReportItem("Parent Node ID", constructMetadata.getParentNodeId()));
builder.appendItem(new ReportItem("Child Node ID", constructMetadata.getChildNodeId()));
builder.appendItem(new ReportItem("Construct Graph", constructMetadata.getConstructGraph().toString()));
builder.appendItem(new ReportItem("Count", "" + queryReport.getCount(constructMetadata.getNodeId())));
}
for (ProjectionMetadata projectionMetadata : metadata.getProjectionMetadata()) {
builder.appendItem(new ReportItem(""));
builder.appendItem(new ReportItem("PROJECTION NODE"));
builder.appendItem(new ReportItem("Node ID", projectionMetadata.getNodeId()));
builder.appendItem(new ReportItem("Variable Order", projectionMetadata.getVariableOrder().toString()));
builder.appendItem(new ReportItem("Parent Node ID", projectionMetadata.getParentNodeId()));
builder.appendItem(new ReportItem("Child Node ID", projectionMetadata.getChildNodeId()));
builder.appendItem(new ReportItem("Count", "" + queryReport.getCount(projectionMetadata.getNodeId())));
}
for (final FilterMetadata filterMetadata : metadata.getFilterMetadata()) {
builder.appendItem(new ReportItem(""));
builder.appendItem(new ReportItem("FILTER NODE"));
builder.appendItem(new ReportItem("Node ID", filterMetadata.getNodeId()));
builder.appendItem(new ReportItem("Variable Order", filterMetadata.getVariableOrder().toString()));
builder.appendItem(new ReportItem("Filter SPARQL", prettyFormatSparql(filterMetadata.getFilterSparql())));
builder.appendItem(new ReportItem("Parent Node ID", filterMetadata.getParentNodeId()));
builder.appendItem(new ReportItem("Child Node ID", filterMetadata.getChildNodeId()));
builder.appendItem(new ReportItem("Count", "" + queryReport.getCount(filterMetadata.getNodeId())));
}
for (final JoinMetadata joinMetadata : metadata.getJoinMetadata()) {
builder.appendItem(new ReportItem(""));
builder.appendItem(new ReportItem("JOIN NODE"));
builder.appendItem(new ReportItem("Node ID", joinMetadata.getNodeId()));
builder.appendItem(new ReportItem("Variable Order", joinMetadata.getVariableOrder().toString()));
builder.appendItem(new ReportItem("Parent Node ID", joinMetadata.getParentNodeId()));
builder.appendItem(new ReportItem("Left Child Node ID", joinMetadata.getLeftChildNodeId()));
builder.appendItem(new ReportItem("Right Child Node ID", joinMetadata.getRightChildNodeId()));
builder.appendItem(new ReportItem("Count", "" + queryReport.getCount(joinMetadata.getNodeId())));
}
for (final StatementPatternMetadata spMetadata : metadata.getStatementPatternMetadata()) {
builder.appendItem(new ReportItem(""));
builder.appendItem(new ReportItem("STATEMENT PATTERN NODE"));
builder.appendItem(new ReportItem("Node ID", spMetadata.getNodeId()));
builder.appendItem(new ReportItem("Variable Order", spMetadata.getVariableOrder().toString()));
builder.appendItem(new ReportItem("Statement Pattern", spMetadata.getStatementPattern()));
builder.appendItem(new ReportItem("Parent Node ID", spMetadata.getParentNodeId()));
builder.appendItem(new ReportItem("Count", "" + queryReport.getCount(spMetadata.getNodeId())));
}
return builder.build().toString();
}
Aggregations