use of org.eclipse.rdf4j.sail.SailException in project incubator-rya by apache.
the class MongoBatchUpdatePCJ method updatePCJResults.
private void updatePCJResults(final String ryaInstanceName, final String pcjId, final MongoClient client) throws InstanceDoesNotExistException, PCJDoesNotExistException, RyaClientException {
// Things that have to be closed before we exit.
Sail sail = null;
SailConnection sailConn = null;
try (final PrecomputedJoinStorage pcjStorage = new MongoPcjStorage(client, ryaInstanceName)) {
// Create an instance of Sail backed by the Rya instance.
sail = connectToRya(ryaInstanceName);
final SailRepository sailRepo = new SailRepository(sail);
final SailRepositoryConnection sailRepoConn = sailRepo.getConnection();
// Purge the old results from the PCJ.
try {
pcjStorage.purge(pcjId);
} catch (final PCJStorageException e) {
throw new RyaClientException("Could not batch update PCJ with ID '" + pcjId + "' because the old " + "results could not be purged from it.", e);
}
// Parse the PCJ's SPARQL query.
final PcjMetadata metadata = pcjStorage.getPcjMetadata(pcjId);
final String sparql = metadata.getSparql();
sailConn = sail.getConnection();
final TupleQuery tupleQuery = sailRepoConn.prepareTupleQuery(QueryLanguage.SPARQL, sparql);
// Execute the query.
final List<VisibilityBindingSet> batch = new ArrayList<>(1000);
tupleQuery.evaluate(new AbstractTupleQueryResultHandler() {
@Override
public void handleSolution(final BindingSet bindingSet) throws TupleQueryResultHandlerException {
final VisibilityBindingSet result = new VisibilityBindingSet(bindingSet, "");
log.warn("Visibility information on the binding set is lost during a batch update." + " This can create data leaks.");
batch.add(result);
if (batch.size() == 1000) {
try {
pcjStorage.addResults(pcjId, batch);
} catch (final PCJStorageException e) {
throw new TupleQueryResultHandlerException("Fail to batch load new results into the PCJ with ID '" + pcjId + "'.", e);
}
batch.clear();
}
}
});
if (!batch.isEmpty()) {
pcjStorage.addResults(pcjId, batch);
batch.clear();
}
} catch (final MalformedQueryException | PCJStorageException | SailException | QueryEvaluationException | RepositoryException | TupleQueryResultHandlerException e) {
throw new RyaClientException("Fail to batch load new results into the PCJ with ID '" + pcjId + "'.", e);
} finally {
if (sailConn != null) {
try {
sailConn.close();
} catch (final SailException e) {
log.warn(e.getMessage(), e);
}
}
if (sail != null) {
try {
sail.shutDown();
} catch (final SailException e) {
log.warn(e.getMessage(), e);
}
}
}
}
use of org.eclipse.rdf4j.sail.SailException in project incubator-rya by apache.
the class MongoRyaSinkTask method makeSail.
@Override
protected Sail makeSail(final Map<String, String> taskConfig) {
requireNonNull(taskConfig);
// Parse the configuration object.
final MongoRyaSinkConfig config = new MongoRyaSinkConfig(taskConfig);
// Move the configuration into a Rya Configuration object.
final MongoDBRdfConfiguration ryaConfig = new MongoDBRdfConfiguration();
ConfigUtils.setUseMongo(ryaConfig, true);
ryaConfig.setMongoDBName(config.getRyaInstanceName());
ryaConfig.setTablePrefix(config.getRyaInstanceName());
ryaConfig.setMongoHostname(config.getHostname());
ryaConfig.setMongoPort("" + config.getPort());
if (!Strings.isNullOrEmpty(config.getUsername()) && !Strings.isNullOrEmpty(config.getPassword())) {
ryaConfig.setMongoUser(config.getUsername());
ryaConfig.setMongoPassword(config.getPassword());
}
// Create the Sail object.
try {
return RyaSailFactory.getInstance(ryaConfig);
} catch (final SailException | AccumuloException | AccumuloSecurityException | RyaDAOException | InferenceEngineException e) {
throw new ConnectException("Could not connect to the Rya Instance named " + config.getRyaInstanceName(), e);
}
}
use of org.eclipse.rdf4j.sail.SailException in project incubator-rya by apache.
the class RyaSinkTaskTest method singleRecord.
@Test
public void singleRecord() {
// Create the Statements that will be put by the task.
final ValueFactory vf = SimpleValueFactory.getInstance();
final Set<Statement> statements = Sets.newHashSet(vf.createStatement(vf.createIRI("urn:Alice"), vf.createIRI("urn:WorksAt"), vf.createIRI("urn:Taco Shop"), vf.createIRI("urn:graph1")), vf.createStatement(vf.createIRI("urn:Bob"), vf.createIRI("urn:TalksTo"), vf.createIRI("urn:Charlie"), vf.createIRI("urn:graph2")), vf.createStatement(vf.createIRI("urn:Eve"), vf.createIRI("urn:ListensTo"), vf.createIRI("urn:Alice"), vf.createIRI("urn:graph1")));
// Create the task that will be tested.
final InMemoryRyaSinkTask task = new InMemoryRyaSinkTask();
// Setup the properties that will be used to configure the task. We don't actually need to set anything
// here since we're always returning true for ryaInstanceExists(...) and use an in memory RDF store.
final Map<String, String> props = new HashMap<>();
try {
// Start the task.
task.start(props);
// Put the statements as a SinkRecord.
task.put(Collections.singleton(new SinkRecord("topic", 1, null, "key", null, statements, 0)));
// Flush the statements.
task.flush(new HashMap<>());
// Fetch the stored Statements to show they match the original set.
final Set<Statement> fetched = new HashSet<>();
final Sail sail = task.makeSail(props);
try (SailConnection conn = sail.getConnection();
CloseableIteration<? extends Statement, SailException> it = conn.getStatements(null, null, null, false)) {
while (it.hasNext()) {
fetched.add(it.next());
}
}
assertEquals(statements, fetched);
} finally {
// Stop the task.
task.stop();
}
}
use of org.eclipse.rdf4j.sail.SailException in project incubator-rya by apache.
the class MergeDriverClient method main.
public static void main(final String[] args) throws ParseException, MergeConfigurationException, UnknownHostException, MergerException, java.text.ParseException, SailException, AccumuloException, AccumuloSecurityException, InferenceEngineException, RepositoryException, MalformedQueryException, UpdateExecutionException {
final String log4jConfiguration = System.getProperties().getProperty("log4j.configuration");
if (StringUtils.isNotBlank(log4jConfiguration)) {
final String parsedConfiguration = PathUtils.clean(StringUtils.removeStart(log4jConfiguration, "file:"));
final File configFile = new File(parsedConfiguration);
if (configFile.exists()) {
DOMConfigurator.configure(parsedConfiguration);
} else {
BasicConfigurator.configure();
}
}
final MergeConfigurationCLI config = new MergeConfigurationCLI(args);
try {
configuration = config.createConfiguration();
} catch (final MergeConfigurationException e) {
LOG.error("Configuration failed.", e);
}
final boolean useTimeSync = configuration.getUseNtpServer();
Optional<Long> offset = Optional.absent();
if (useTimeSync) {
final String tomcat = configuration.getChildTomcatUrl();
final String ntpHost = configuration.getNtpServerHost();
try {
offset = Optional.fromNullable(TimeUtils.getNtpServerAndMachineTimeDifference(ntpHost, tomcat));
} catch (final IOException e) {
LOG.error("Unable to get time difference between time server: " + ntpHost + " and the server: " + tomcat, e);
}
}
final StatementStoreFactory storeFactory = new StatementStoreFactory(configuration);
try {
final RyaStatementStore parentStore = storeFactory.getParentStatementStore();
final RyaStatementStore childStore = storeFactory.getChildStatementStore();
LOG.info("Starting Merge Tool");
if (configuration.getParentDBType() == ACCUMULO && configuration.getChildDBType() == ACCUMULO) {
final AccumuloRyaStatementStore childAStore = (AccumuloRyaStatementStore) childStore;
final AccumuloRyaStatementStore parentAStore = (AccumuloRyaStatementStore) parentStore;
// do map reduce merging.
// TODO: Run Merger
} else {
if (configuration.getMergePolicy() == TIMESTAMP) {
final TimestampPolicyMergeConfiguration timeConfig = (TimestampPolicyMergeConfiguration) configuration;
final Long timeOffset;
if (offset.isPresent()) {
timeOffset = offset.get();
} else {
timeOffset = 0L;
}
final MemoryTimeMerger merger = new MemoryTimeMerger(parentStore, childStore, new VisibilityStatementMerger(), timeConfig.getToolStartTime(), configuration.getParentRyaInstanceName(), timeOffset);
merger.runJob();
}
}
} catch (final Exception e) {
LOG.error("Something went wrong creating a Rya Statement Store connection.", e);
}
Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(final Thread thread, final Throwable throwable) {
LOG.error("Uncaught exception in " + thread.getName(), throwable);
}
});
LOG.info("Finished running Merge Tool");
System.exit(1);
}
use of org.eclipse.rdf4j.sail.SailException in project incubator-rya by apache.
the class RdfCloudTripleStoreConnection method addStatementInternal.
@Override
protected void addStatementInternal(final Resource subject, final IRI predicate, final Value object, final Resource... contexts) throws SailException {
try {
final String cv_s = conf.getCv();
final byte[] cv = cv_s == null ? null : cv_s.getBytes(StandardCharsets.UTF_8);
final List<RyaStatement> ryaStatements = new ArrayList<>();
if (contexts != null && contexts.length > 0) {
for (final Resource context : contexts) {
final RyaStatement statement = new RyaStatement(RdfToRyaConversions.convertResource(subject), RdfToRyaConversions.convertIRI(predicate), RdfToRyaConversions.convertValue(object), RdfToRyaConversions.convertResource(context), null, new StatementMetadata(), cv);
ryaStatements.add(statement);
}
} else {
final RyaStatement statement = new RyaStatement(RdfToRyaConversions.convertResource(subject), RdfToRyaConversions.convertIRI(predicate), RdfToRyaConversions.convertValue(object), null, null, new StatementMetadata(), cv);
ryaStatements.add(statement);
}
ryaDAO.add(ryaStatements.iterator());
} catch (final RyaDAOException e) {
throw new SailException(e);
}
}
Aggregations