Search in sources :

Example 16 with SailException

use of org.eclipse.rdf4j.sail.SailException in project incubator-rya by apache.

the class AccumuloIndexSetProvider method getIndices.

@Override
protected List<ExternalTupleSet> getIndices() throws PcjIndexSetException {
    requireNonNull(conf);
    try {
        final String tablePrefix = requireNonNull(conf.get(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX));
        final Connector conn = requireNonNull(ConfigUtils.getConnector(conf));
        List<String> tables = null;
        if (conf instanceof RdfCloudTripleStoreConfiguration) {
            tables = ((RdfCloudTripleStoreConfiguration) conf).getPcjTables();
        }
        // this maps associates pcj table name with pcj sparql query
        final Map<String, String> indexTables = Maps.newLinkedHashMap();
        try (final PrecomputedJoinStorage storage = new AccumuloPcjStorage(conn, tablePrefix)) {
            final PcjTableNameFactory pcjFactory = new PcjTableNameFactory();
            final boolean tablesProvided = tables != null && !tables.isEmpty();
            if (tablesProvided) {
                // if tables provided, associate table name with sparql
                for (final String table : tables) {
                    indexTables.put(table, storage.getPcjMetadata(pcjFactory.getPcjId(table)).getSparql());
                }
            } else if (hasRyaDetails(tablePrefix, conn)) {
                // If this is a newer install of Rya, and it has PCJ Details, then
                // use those.
                final List<String> ids = storage.listPcjs();
                for (final String id : ids) {
                    indexTables.put(pcjFactory.makeTableName(tablePrefix, id), storage.getPcjMetadata(id).getSparql());
                }
            } else {
                // Otherwise figure it out by scanning tables.
                final PcjTables pcjTables = new PcjTables();
                for (final String table : conn.tableOperations().list()) {
                    if (table.startsWith(tablePrefix + "INDEX")) {
                        indexTables.put(table, pcjTables.getPcjMetadata(conn, table).getSparql());
                    }
                }
            }
        }
        // use table name sparql map (indexTables) to create {@link
        // AccumuloIndexSet}
        final List<ExternalTupleSet> index = Lists.newArrayList();
        if (indexTables.isEmpty()) {
            log.info("No Index found");
        } else {
            for (final String table : indexTables.keySet()) {
                final String indexSparqlString = indexTables.get(table);
                index.add(new AccumuloIndexSet(indexSparqlString, conf, table));
            }
        }
        return index;
    } catch (final PCJStorageException | AccumuloException | AccumuloSecurityException | MalformedQueryException | SailException | QueryEvaluationException | TableNotFoundException e) {
        throw new PcjIndexSetException("Failed to retrieve the indicies.", e);
    }
}
Also used : Connector(org.apache.accumulo.core.client.Connector) AccumuloException(org.apache.accumulo.core.client.AccumuloException) AccumuloPcjStorage(org.apache.rya.indexing.pcj.storage.accumulo.AccumuloPcjStorage) AccumuloIndexSet(org.apache.rya.indexing.external.tupleSet.AccumuloIndexSet) PcjTableNameFactory(org.apache.rya.indexing.pcj.storage.accumulo.PcjTableNameFactory) SailException(org.eclipse.rdf4j.sail.SailException) ExternalTupleSet(org.apache.rya.indexing.external.tupleSet.ExternalTupleSet) TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) QueryEvaluationException(org.eclipse.rdf4j.query.QueryEvaluationException) PrecomputedJoinStorage(org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage) MalformedQueryException(org.eclipse.rdf4j.query.MalformedQueryException) List(java.util.List) AccumuloSecurityException(org.apache.accumulo.core.client.AccumuloSecurityException) PcjTables(org.apache.rya.indexing.pcj.storage.accumulo.PcjTables) PCJStorageException(org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException) RdfCloudTripleStoreConfiguration(org.apache.rya.api.RdfCloudTripleStoreConfiguration)

Example 17 with SailException

use of org.eclipse.rdf4j.sail.SailException in project incubator-rya by apache.

the class AccumuloRyaSinkTask method makeSail.

@Override
protected Sail makeSail(final Map<String, String> taskConfig) throws ConnectException {
    requireNonNull(taskConfig);
    // Parse the configuration object.
    final AccumuloRyaSinkConfig config = new AccumuloRyaSinkConfig(taskConfig);
    // Move the configuration into a Rya Configuration object.
    final AccumuloRdfConfiguration ryaConfig = new AccumuloRdfConfiguration();
    ryaConfig.setTablePrefix(config.getRyaInstanceName());
    ryaConfig.setAccumuloZookeepers(config.getZookeepers());
    ryaConfig.setAccumuloInstance(config.getClusterName());
    ryaConfig.setAccumuloUser(config.getUsername());
    ryaConfig.setAccumuloPassword(config.getPassword());
    ryaConfig.setFlush(false);
    // Create the Sail object.
    try {
        return RyaSailFactory.getInstance(ryaConfig);
    } catch (SailException | AccumuloException | AccumuloSecurityException | RyaDAOException | InferenceEngineException e) {
        throw new ConnectException("Could not connect to the Rya Instance named " + config.getRyaInstanceName(), e);
    }
}
Also used : AccumuloException(org.apache.accumulo.core.client.AccumuloException) RyaDAOException(org.apache.rya.api.persist.RyaDAOException) AccumuloSecurityException(org.apache.accumulo.core.client.AccumuloSecurityException) InferenceEngineException(org.apache.rya.rdftriplestore.inference.InferenceEngineException) SailException(org.eclipse.rdf4j.sail.SailException) AccumuloRdfConfiguration(org.apache.rya.accumulo.AccumuloRdfConfiguration) ConnectException(org.apache.kafka.connect.errors.ConnectException)

Example 18 with SailException

use of org.eclipse.rdf4j.sail.SailException in project incubator-rya by apache.

the class RyaSinkTaskTest method flushBetweenPuts.

@Test
public void flushBetweenPuts() {
    // Create the Statements that will be put by the task.
    final ValueFactory vf = SimpleValueFactory.getInstance();
    final Set<Statement> batch1 = Sets.newHashSet(vf.createStatement(vf.createIRI("urn:Alice"), vf.createIRI("urn:WorksAt"), vf.createIRI("urn:Taco Shop"), vf.createIRI("urn:graph1")), vf.createStatement(vf.createIRI("urn:Bob"), vf.createIRI("urn:TalksTo"), vf.createIRI("urn:Charlie"), vf.createIRI("urn:graph2")));
    final Set<Statement> batch2 = Sets.newHashSet(vf.createStatement(vf.createIRI("urn:Eve"), vf.createIRI("urn:ListensTo"), vf.createIRI("urn:Alice"), vf.createIRI("urn:graph1")));
    // Create the task that will be tested.
    final InMemoryRyaSinkTask task = new InMemoryRyaSinkTask();
    // Setup the properties that will be used to configure the task. We don't actually need to set anything
    // here since we're always returning true for ryaInstanceExists(...) and use an in memory RDF store.
    final Map<String, String> props = new HashMap<>();
    try {
        // Start the task.
        task.start(props);
        // Put the statements with flushes between them.
        task.put(Collections.singleton(new SinkRecord("topic", 1, null, "key", null, batch1, 0)));
        task.flush(new HashMap<>());
        task.put(Collections.singleton(new SinkRecord("topic", 1, null, "key", null, batch2, 1)));
        task.flush(new HashMap<>());
        // Fetch the stored Statements to show they match the original set.
        final Set<Statement> fetched = new HashSet<>();
        final Sail sail = task.makeSail(props);
        try (SailConnection conn = sail.getConnection();
            CloseableIteration<? extends Statement, SailException> it = conn.getStatements(null, null, null, false)) {
            while (it.hasNext()) {
                fetched.add(it.next());
            }
        }
        assertEquals(Sets.union(batch1, batch2), fetched);
    } finally {
        // Stop the task.
        task.stop();
    }
}
Also used : HashMap(java.util.HashMap) Statement(org.eclipse.rdf4j.model.Statement) ValueFactory(org.eclipse.rdf4j.model.ValueFactory) SimpleValueFactory(org.eclipse.rdf4j.model.impl.SimpleValueFactory) SailException(org.eclipse.rdf4j.sail.SailException) SinkRecord(org.apache.kafka.connect.sink.SinkRecord) SailConnection(org.eclipse.rdf4j.sail.SailConnection) Sail(org.eclipse.rdf4j.sail.Sail) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 19 with SailException

use of org.eclipse.rdf4j.sail.SailException in project incubator-rya by apache.

the class RyaSinkTaskTest method multipleRecords.

@Test
public void multipleRecords() {
    // Create the Statements that will be put by the task.
    final ValueFactory vf = SimpleValueFactory.getInstance();
    final Set<Statement> batch1 = Sets.newHashSet(vf.createStatement(vf.createIRI("urn:Alice"), vf.createIRI("urn:WorksAt"), vf.createIRI("urn:Taco Shop"), vf.createIRI("urn:graph1")), vf.createStatement(vf.createIRI("urn:Bob"), vf.createIRI("urn:TalksTo"), vf.createIRI("urn:Charlie"), vf.createIRI("urn:graph2")));
    final Set<Statement> batch2 = Sets.newHashSet(vf.createStatement(vf.createIRI("urn:Eve"), vf.createIRI("urn:ListensTo"), vf.createIRI("urn:Alice"), vf.createIRI("urn:graph1")));
    // Create the task that will be tested.
    final InMemoryRyaSinkTask task = new InMemoryRyaSinkTask();
    // Setup the properties that will be used to configure the task. We don't actually need to set anything
    // here since we're always returning true for ryaInstanceExists(...) and use an in memory RDF store.
    final Map<String, String> props = new HashMap<>();
    try {
        // Start the task.
        task.start(props);
        // Put the statements as SinkRecords.
        final Collection<SinkRecord> records = Sets.newHashSet(new SinkRecord("topic", 1, null, "key", null, batch1, 0), new SinkRecord("topic", 1, null, "key", null, batch2, 1));
        task.put(records);
        // Flush the statements.
        task.flush(new HashMap<>());
        // Fetch the stored Statements to show they match the original set.
        final Set<Statement> fetched = new HashSet<>();
        final Sail sail = task.makeSail(props);
        try (SailConnection conn = sail.getConnection();
            CloseableIteration<? extends Statement, SailException> it = conn.getStatements(null, null, null, false)) {
            while (it.hasNext()) {
                fetched.add(it.next());
            }
        }
        assertEquals(Sets.union(batch1, batch2), fetched);
    } finally {
        // Stop the task.
        task.stop();
    }
}
Also used : HashMap(java.util.HashMap) Statement(org.eclipse.rdf4j.model.Statement) ValueFactory(org.eclipse.rdf4j.model.ValueFactory) SimpleValueFactory(org.eclipse.rdf4j.model.impl.SimpleValueFactory) SailException(org.eclipse.rdf4j.sail.SailException) SinkRecord(org.apache.kafka.connect.sink.SinkRecord) SailConnection(org.eclipse.rdf4j.sail.SailConnection) Sail(org.eclipse.rdf4j.sail.Sail) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 20 with SailException

use of org.eclipse.rdf4j.sail.SailException in project com.inova8.intelligentgraph by peterjohnlawrence.

the class IntelligentGraphConnection method getStatements.

/**
 * Gets the statements.
 *
 * @param subj the subj
 * @param pred the pred
 * @param obj the obj
 * @param includeInferred the include inferred
 * @param contexts the contexts
 * @return the statements
 * @throws SailException the sail exception
 */
@Override
public CloseableIteration<? extends IntelligentStatement, SailException> getStatements(Resource subj, IRI pred, Value obj, boolean includeInferred, Resource... contexts) throws SailException {
    try {
        Resource[] extendedContexts = contexts;
        if (pred != null && !pred.stringValue().equals(SCRIPT.isprivate)) {
            extendedContexts = getContexts(contexts);
        }
        String[] predicateParts;
        if (pred != null) {
            predicateParts = decodePredicate(pred);
            switch(predicateParts[0]) {
                case PATHQL.getFact:
                case PATHQL.getFacts:
                    return getFacts(subj, decodePathQL(predicateParts, obj), obj, extendedContexts);
                case PATHQL.getPath:
                case PATHQL.getPaths:
                    return getPaths(subj, decodePathQL(predicateParts, obj), obj, extendedContexts);
                case PATHQL.traceFact:
                case PATHQL.traceFacts:
                    return traceFacts(subj, decodePathQL(predicateParts, obj), obj, extendedContexts);
                case PATHQL.clearCache:
                    return clearCache(subj, pred, obj, extendedContexts);
                case PATHQL.getScript:
                    return getScript(subj, decodePathQL(predicateParts, obj), obj, extendedContexts);
                default:
                    return new IntelligentGraphStatementsIterator(super.getStatements(subj, pred, obj, includeInferred, extendedContexts), intelligentGraphSail, this, extendedContexts);
            }
        } else {
            return new IntelligentGraphStatementsIterator(super.getStatements(subj, pred, obj, includeInferred, extendedContexts), intelligentGraphSail, this, extendedContexts);
        }
    } catch (Exception e) {
        throw new SailException(e.getMessage(), e);
    }
}
Also used : Resource(org.eclipse.rdf4j.model.Resource) SailException(org.eclipse.rdf4j.sail.SailException) QueryEvaluationException(org.eclipse.rdf4j.query.QueryEvaluationException) SailException(org.eclipse.rdf4j.sail.SailException) RepositoryException(org.eclipse.rdf4j.repository.RepositoryException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) PathPatternException(com.inova8.pathql.processor.PathPatternException) RecognitionException(org.antlr.v4.runtime.RecognitionException)

Aggregations

SailException (org.eclipse.rdf4j.sail.SailException)36 RyaDAOException (org.apache.rya.api.persist.RyaDAOException)19 QueryEvaluationException (org.eclipse.rdf4j.query.QueryEvaluationException)17 RepositoryException (org.eclipse.rdf4j.repository.RepositoryException)17 AccumuloException (org.apache.accumulo.core.client.AccumuloException)13 AccumuloSecurityException (org.apache.accumulo.core.client.AccumuloSecurityException)13 RyaClientException (org.apache.rya.api.client.RyaClientException)13 Sail (org.eclipse.rdf4j.sail.Sail)13 InferenceEngineException (org.apache.rya.rdftriplestore.inference.InferenceEngineException)12 MalformedQueryException (org.eclipse.rdf4j.query.MalformedQueryException)11 InstanceDoesNotExistException (org.apache.rya.api.client.InstanceDoesNotExistException)8 SailRepository (org.eclipse.rdf4j.repository.sail.SailRepository)7 IOException (java.io.IOException)6 AccumuloRdfConfiguration (org.apache.rya.accumulo.AccumuloRdfConfiguration)6 PathPatternException (com.inova8.pathql.processor.PathPatternException)5 UnsupportedEncodingException (java.io.UnsupportedEncodingException)5 RecognitionException (org.antlr.v4.runtime.RecognitionException)5 Resource (org.eclipse.rdf4j.model.Resource)5 SailRepositoryConnection (org.eclipse.rdf4j.repository.sail.SailRepositoryConnection)5 SailConnection (org.eclipse.rdf4j.sail.SailConnection)5