use of org.apache.rya.streams.api.queries.QueryRepository in project incubator-rya by apache.
the class AddQueryCommand method execute.
@Override
public void execute(final String[] args) throws ArgumentsException, ExecutionException {
requireNonNull(args);
// Parse the command line arguments.
final AddParameters params = new AddParameters();
try {
new JCommander(params, args);
} catch (final ParameterException e) {
throw new ArgumentsException("Could not add a new query because of invalid command line parameters.", e);
}
// Create the Kafka backed QueryChangeLog.
final String bootstrapServers = params.kafkaIP + ":" + params.kafkaPort;
final String topic = KafkaTopics.queryChangeLogTopic(params.ryaInstance);
final QueryChangeLog queryChangeLog = KafkaQueryChangeLogFactory.make(bootstrapServers, topic);
// The AddQuery command doesn't use the scheduled service feature.
final Scheduler scheduler = Scheduler.newFixedRateSchedule(0L, 5, TimeUnit.SECONDS);
final QueryRepository queryRepo = new InMemoryQueryRepository(queryChangeLog, scheduler);
// Execute the add query command.
try {
final AddQuery addQuery = new DefaultAddQuery(queryRepo);
try {
final Boolean isActive = Boolean.parseBoolean(params.isActive);
final Boolean isInsert = Boolean.parseBoolean(params.isInsert);
// If the query's results are meant to be written back to Rya, make sure it creates statements.
if (isInsert) {
final boolean isConstructQuery = QueryInvestigator.isConstruct(params.query);
final boolean isInsertQuery = QueryInvestigator.isInsertWhere(params.query);
if (isConstructQuery) {
System.out.println("WARNING: CONSTRUCT is part of the SPARQL Query API, so they do not normally\n" + "get written back to the triple store. Consider using an INSERT, which is\n" + "part of the SPARQL Update API, in the future.");
}
if (!(isConstructQuery || isInsertQuery)) {
throw new ArgumentsException("Only CONSTRUCT queries and INSERT updates may be inserted back to the triple store.");
}
}
final StreamsQuery query = addQuery.addQuery(params.query, isActive, isInsert);
System.out.println("Added query: " + query.getSparql());
} catch (final RyaStreamsException e) {
throw new ExecutionException("Unable to add the query to Rya Streams.", e);
}
} catch (final MalformedQueryException e) {
throw new ArgumentsException("Could not parse the provided query.", e);
}
}
use of org.apache.rya.streams.api.queries.QueryRepository in project incubator-rya by apache.
the class DefaultAddQueryTest method addQuery_validSparql.
@Test
public void addQuery_validSparql() throws Exception {
// Valid SPARQL.
final String sparql = "SELECT * WHERE { ?person <urn:worksAt> ?business }";
// Setup the interactor.
final QueryRepository repo = mock(QueryRepository.class);
final AddQuery addQuery = new DefaultAddQuery(repo);
// Add the query.
addQuery.addQuery(sparql, true, true);
// Verify the call was forwarded to the repository.
verify(repo, times(1)).add(eq(sparql), eq(true), eq(true));
}
use of org.apache.rya.streams.api.queries.QueryRepository in project incubator-rya by apache.
the class DefaultAddQueryTest method addQuery_invalidSparql.
@Test(expected = RyaStreamsException.class)
public void addQuery_invalidSparql() throws Exception {
// Inalid SPARQL.
final String sparql = "This is not sparql.";
// Setup the interactor.
final QueryRepository repo = mock(QueryRepository.class);
final AddQuery addQuery = new DefaultAddQuery(repo);
// Add the query.
addQuery.addQuery(sparql, true, true);
}
use of org.apache.rya.streams.api.queries.QueryRepository in project incubator-rya by apache.
the class StreamResultsCommand method execute.
@Override
public void execute(final String[] args) throws ArgumentsException, ExecutionException {
requireNonNull(args);
// Parse the command line arguments.
final StreamResultsParameters params = new StreamResultsParameters();
try {
new JCommander(params, args);
} catch (final ParameterException e) {
throw new ArgumentsException("Could not stream the query's results because of invalid command line parameters.", e);
}
// Create the Kafka backed QueryChangeLog.
final String bootstrapServers = params.kafkaIP + ":" + params.kafkaPort;
final String topic = KafkaTopics.queryChangeLogTopic(params.ryaInstance);
final QueryChangeLog queryChangeLog = KafkaQueryChangeLogFactory.make(bootstrapServers, topic);
// Parse the Query ID from the command line parameters.
final UUID queryId;
try {
queryId = UUID.fromString(params.queryId);
} catch (final IllegalArgumentException e) {
throw new ArgumentsException("Invalid Query ID " + params.queryId);
}
// The DeleteQuery command doesn't use the scheduled service feature.
final Scheduler scheduler = Scheduler.newFixedRateSchedule(0L, 5, TimeUnit.SECONDS);
final QueryRepository queryRepo = new InMemoryQueryRepository(queryChangeLog, scheduler);
// Fetch the SPARQL of the query whose results will be streamed.
final String sparql;
try {
final Optional<StreamsQuery> sQuery = queryRepo.get(queryId);
if (!sQuery.isPresent()) {
throw new ExecutionException("Could not read the results for query with ID " + queryId + " because no such query exists.");
}
sparql = sQuery.get().getSparql();
} catch (final Exception e) {
throw new ExecutionException("Problem encountered while closing the QueryRepository.", e);
}
// This command executes until the application is killed, so create a kill boolean.
final AtomicBoolean finished = new AtomicBoolean(false);
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
finished.set(true);
}
});
// Build the interactor based on the type of result the query produces.
final GetQueryResultStream<?> getQueryResultStream;
try {
final TupleExpr tupleExpr = new SPARQLParser().parseQuery(sparql, null).getTupleExpr();
if (tupleExpr instanceof Reduced) {
getQueryResultStream = new KafkaGetQueryResultStream<>(params.kafkaIP, params.kafkaPort, VisibilityStatementDeserializer.class);
} else {
getQueryResultStream = new KafkaGetQueryResultStream<>(params.kafkaIP, params.kafkaPort, VisibilityBindingSetDeserializer.class);
}
} catch (final MalformedQueryException e) {
throw new ExecutionException("Could not parse the SPARQL for the query: " + sparql, e);
}
// Iterate through the results and print them to the console until the program or the stream ends.
try (final QueryResultStream<?> stream = getQueryResultStream.fromStart(params.ryaInstance, queryId)) {
while (!finished.get()) {
for (final Object result : stream.poll(1000)) {
System.out.println(result);
}
}
} catch (final Exception e) {
System.err.println("Error while reading the results from the stream.");
e.printStackTrace();
System.exit(1);
}
}
Aggregations