use of org.openrdf.sail.SailException in project incubator-rya by apache.
the class RdfCloudTripleStoreConnection method addStatementInternal.
@Override
protected void addStatementInternal(final Resource subject, final URI predicate, final Value object, final Resource... contexts) throws SailException {
try {
final String cv_s = conf.getCv();
final byte[] cv = cv_s == null ? null : cv_s.getBytes(StandardCharsets.UTF_8);
final List<RyaStatement> ryaStatements = new ArrayList<>();
if (contexts != null && contexts.length > 0) {
for (final Resource context : contexts) {
final RyaStatement statement = new RyaStatement(RdfToRyaConversions.convertResource(subject), RdfToRyaConversions.convertURI(predicate), RdfToRyaConversions.convertValue(object), RdfToRyaConversions.convertResource(context), null, new StatementMetadata(), cv);
ryaStatements.add(statement);
}
} else {
final RyaStatement statement = new RyaStatement(RdfToRyaConversions.convertResource(subject), RdfToRyaConversions.convertURI(predicate), RdfToRyaConversions.convertValue(object), null, null, new StatementMetadata(), cv);
ryaStatements.add(statement);
}
ryaDAO.add(ryaStatements.iterator());
} catch (final RyaDAOException e) {
throw new SailException(e);
}
}
use of org.openrdf.sail.SailException in project incubator-rya by apache.
the class RdfCloudTripleStoreConnection method getStatementsInternal.
@Override
protected CloseableIteration<? extends Statement, SailException> getStatementsInternal(final Resource subject, final URI predicate, final Value object, final boolean flag, final Resource... contexts) throws SailException {
// try {
// have to do this to get the inferred values
// TODO: Will this method reduce performance?
final Var subjVar = decorateValue(subject, "s");
final Var predVar = decorateValue(predicate, "p");
final Var objVar = decorateValue(object, "o");
StatementPattern sp = null;
final boolean hasContext = contexts != null && contexts.length > 0;
final Resource context = (hasContext) ? contexts[0] : null;
final Var cntxtVar = decorateValue(context, "c");
// TODO: Only using one context here
sp = new StatementPattern(subjVar, predVar, objVar, cntxtVar);
// return new StoreTripleSource(store.getConf()).getStatements(resource, uri, value, contexts);
final CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluate = evaluate(sp, null, null, false);
return new // TODO: Use a util class to do this
CloseableIteration<Statement, SailException>() {
private boolean isClosed = false;
@Override
public void close() throws SailException {
isClosed = true;
try {
evaluate.close();
} catch (final QueryEvaluationException e) {
throw new SailException(e);
}
}
@Override
public boolean hasNext() throws SailException {
try {
return evaluate.hasNext();
} catch (final QueryEvaluationException e) {
throw new SailException(e);
}
}
@Override
public Statement next() throws SailException {
if (!hasNext() || isClosed) {
throw new NoSuchElementException();
}
try {
final BindingSet next = evaluate.next();
final Resource bs_subj = (Resource) ((subjVar.hasValue()) ? subjVar.getValue() : next.getBinding(subjVar.getName()).getValue());
final URI bs_pred = (URI) ((predVar.hasValue()) ? predVar.getValue() : next.getBinding(predVar.getName()).getValue());
final Value bs_obj = (objVar.hasValue()) ? objVar.getValue() : (Value) next.getBinding(objVar.getName()).getValue();
final Binding b_cntxt = next.getBinding(cntxtVar.getName());
// convert BindingSet to Statement
if (b_cntxt != null) {
return new ContextStatementImpl(bs_subj, bs_pred, bs_obj, (Resource) b_cntxt.getValue());
} else {
return new StatementImpl(bs_subj, bs_pred, bs_obj);
}
} catch (final QueryEvaluationException e) {
throw new SailException(e);
}
}
@Override
public void remove() throws SailException {
try {
evaluate.remove();
} catch (final QueryEvaluationException e) {
throw new SailException(e);
}
}
};
// } catch (QueryEvaluationException e) {
// throw new SailException(e);
// }
}
use of org.openrdf.sail.SailException in project incubator-rya by apache.
the class MergeDriverClient method main.
public static void main(final String[] args) throws ParseException, MergeConfigurationException, UnknownHostException, MergerException, java.text.ParseException, SailException, AccumuloException, AccumuloSecurityException, InferenceEngineException, RepositoryException, MalformedQueryException, UpdateExecutionException {
final String log4jConfiguration = System.getProperties().getProperty("log4j.configuration");
if (StringUtils.isNotBlank(log4jConfiguration)) {
final String parsedConfiguration = PathUtils.clean(StringUtils.removeStart(log4jConfiguration, "file:"));
final File configFile = new File(parsedConfiguration);
if (configFile.exists()) {
DOMConfigurator.configure(parsedConfiguration);
} else {
BasicConfigurator.configure();
}
}
final MergeConfigurationCLI config = new MergeConfigurationCLI(args);
try {
configuration = config.createConfiguration();
} catch (final MergeConfigurationException e) {
LOG.error("Configuration failed.", e);
}
final boolean useTimeSync = configuration.getUseNtpServer();
Optional<Long> offset = Optional.absent();
if (useTimeSync) {
final String tomcat = configuration.getChildTomcatUrl();
final String ntpHost = configuration.getNtpServerHost();
try {
offset = Optional.<Long>fromNullable(TimeUtils.getNtpServerAndMachineTimeDifference(ntpHost, tomcat));
} catch (final IOException e) {
LOG.error("Unable to get time difference between time server: " + ntpHost + " and the server: " + tomcat, e);
}
}
final StatementStoreFactory storeFactory = new StatementStoreFactory(configuration);
try {
final RyaStatementStore parentStore = storeFactory.getParentStatementStore();
final RyaStatementStore childStore = storeFactory.getChildStatementStore();
LOG.info("Starting Merge Tool");
if (configuration.getParentDBType() == ACCUMULO && configuration.getChildDBType() == ACCUMULO) {
final AccumuloRyaStatementStore childAStore = (AccumuloRyaStatementStore) childStore;
final AccumuloRyaStatementStore parentAStore = (AccumuloRyaStatementStore) parentStore;
// do map reduce merging.
// TODO: Run Merger
} else {
if (configuration.getMergePolicy() == TIMESTAMP) {
final TimestampPolicyMergeConfiguration timeConfig = (TimestampPolicyMergeConfiguration) configuration;
final Long timeOffset;
if (offset.isPresent()) {
timeOffset = offset.get();
} else {
timeOffset = 0L;
}
final MemoryTimeMerger merger = new MemoryTimeMerger(parentStore, childStore, new VisibilityStatementMerger(), timeConfig.getToolStartTime(), configuration.getParentRyaInstanceName(), timeOffset);
merger.runJob();
}
}
} catch (final Exception e) {
LOG.error("Something went wrong creating a Rya Statement Store connection.", e);
}
Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(final Thread thread, final Throwable throwable) {
LOG.error("Uncaught exception in " + thread.getName(), throwable);
}
});
LOG.info("Finished running Merge Tool");
System.exit(1);
}
use of org.openrdf.sail.SailException in project incubator-rya by apache.
the class AccumuloLoadStatementsFile method loadStatements.
@Override
public void loadStatements(final String ryaInstanceName, final Path statementsFile, final RDFFormat format) throws InstanceDoesNotExistException, RyaClientException {
requireNonNull(ryaInstanceName);
requireNonNull(statementsFile);
requireNonNull(format);
// Ensure the Rya Instance exists.
if (!instanceExists.exists(ryaInstanceName)) {
throw new InstanceDoesNotExistException(String.format("There is no Rya instance named '%s'.", ryaInstanceName));
}
Sail sail = null;
SailRepository sailRepo = null;
SailRepositoryConnection sailRepoConn = null;
try {
// Get a Sail object that is connected to the Rya instance.
final AccumuloRdfConfiguration ryaConf = getAccumuloConnectionDetails().buildAccumuloRdfConfiguration(ryaInstanceName);
// RYA-327 should address this hardcoded value.
ryaConf.setFlush(false);
sail = RyaSailFactory.getInstance(ryaConf);
// Load the file.
sailRepo = new SailRepository(sail);
sailRepoConn = sailRepo.getConnection();
sailRepoConn.add(statementsFile.toFile(), null, format);
} catch (final SailException | AccumuloException | AccumuloSecurityException | RyaDAOException | InferenceEngineException e) {
log.warn("Exception while loading:", e);
throw new RyaClientException("A problem connecting to the Rya instance named '" + ryaInstanceName + "' has caused the load to fail.", e);
} catch (final RepositoryException | RDFParseException | UnsupportedRDFormatException | IOException e) {
log.warn("Exception while loading:", e);
throw new RyaClientException("A problem processing the RDF file has caused the load into Rya instance named " + ryaInstanceName + "to fail.", e);
} finally {
// Shut it all down.
if (sailRepoConn != null) {
try {
sailRepoConn.close();
} catch (final RepositoryException e) {
log.warn("Couldn't close the SailRepoConnection that is attached to the Rya instance.", e);
}
}
if (sailRepo != null) {
try {
sailRepo.shutDown();
} catch (final RepositoryException e) {
log.warn("Couldn't shut down the SailRepository that is attached to the Rya instance.", e);
}
}
if (sail != null) {
try {
sail.shutDown();
} catch (final SailException e) {
log.warn("Couldn't shut down the Sail that is attached to the Rya instance.", e);
}
}
}
}
use of org.openrdf.sail.SailException in project incubator-rya by apache.
the class MongoBatchUpdatePCJ method updatePCJResults.
private void updatePCJResults(final String ryaInstanceName, final String pcjId, final MongoClient client) throws InstanceDoesNotExistException, PCJDoesNotExistException, RyaClientException {
// Things that have to be closed before we exit.
Sail sail = null;
SailConnection sailConn = null;
try (final PrecomputedJoinStorage pcjStorage = new MongoPcjStorage(client, ryaInstanceName)) {
// Create an instance of Sail backed by the Rya instance.
sail = connectToRya(ryaInstanceName);
final SailRepository sailRepo = new SailRepository(sail);
final SailRepositoryConnection sailRepoConn = sailRepo.getConnection();
// Purge the old results from the PCJ.
try {
pcjStorage.purge(pcjId);
} catch (final PCJStorageException e) {
throw new RyaClientException("Could not batch update PCJ with ID '" + pcjId + "' because the old " + "results could not be purged from it.", e);
}
// Parse the PCJ's SPARQL query.
final PcjMetadata metadata = pcjStorage.getPcjMetadata(pcjId);
final String sparql = metadata.getSparql();
sailConn = sail.getConnection();
final TupleQuery tupleQuery = sailRepoConn.prepareTupleQuery(QueryLanguage.SPARQL, sparql);
// Execute the query.
final List<VisibilityBindingSet> batch = new ArrayList<>(1000);
tupleQuery.evaluate(new TupleQueryResultHandlerBase() {
@Override
public void handleSolution(final BindingSet bindingSet) throws TupleQueryResultHandlerException {
final VisibilityBindingSet result = new VisibilityBindingSet(bindingSet, "");
log.warn("Visibility information on the binding set is lost during a batch update." + " This can create data leaks.");
batch.add(result);
if (batch.size() == 1000) {
try {
pcjStorage.addResults(pcjId, batch);
} catch (final PCJStorageException e) {
throw new TupleQueryResultHandlerException("Fail to batch load new results into the PCJ with ID '" + pcjId + "'.", e);
}
batch.clear();
}
}
});
if (!batch.isEmpty()) {
pcjStorage.addResults(pcjId, batch);
batch.clear();
}
} catch (final MalformedQueryException | PCJStorageException | SailException | QueryEvaluationException | RepositoryException | TupleQueryResultHandlerException e) {
throw new RyaClientException("Fail to batch load new results into the PCJ with ID '" + pcjId + "'.", e);
} finally {
if (sailConn != null) {
try {
sailConn.close();
} catch (final SailException e) {
log.warn(e.getMessage(), e);
}
}
if (sail != null) {
try {
sail.shutDown();
} catch (final SailException e) {
log.warn(e.getMessage(), e);
}
}
}
}
Aggregations