Search in sources :

Example 11 with RyaTableMutationsFactory

use of org.apache.rya.accumulo.RyaTableMutationsFactory in project incubator-rya by apache.

the class AccumuloDocIndexerTest method testNoContextUnCommonVarBs2.

@Test
public void testNoContextUnCommonVarBs2() throws Exception {
    BatchWriter bw = null;
    RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf));
    bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30);
    for (int i = 0; i < 30; i++) {
        RyaStatement rs1 = new RyaStatement(new RyaURI("uri:cq1"), new RyaURI("uri:cf1"), new RyaURI("uri:" + i));
        RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"));
        RyaStatement rs3 = null;
        if (i == 5 || i == 10 || i == 15 || i == 20 || i == 25) {
            rs3 = new RyaStatement(new RyaURI("uri:" + i), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER, Integer.toString(i)));
        }
        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> serialize1 = rtm.serialize(rs1);
        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> serialize2 = rtm.serialize(rs2);
        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> serialize3 = null;
        if (rs3 != null) {
            serialize3 = rtm.serialize(rs3);
        }
        Collection<Mutation> m1 = EntityCentricIndex.createMutations(rs1);
        for (Mutation m : m1) {
            bw.addMutation(m);
        }
        Collection<Mutation> m2 = EntityCentricIndex.createMutations(rs2);
        for (Mutation m : m2) {
            bw.addMutation(m);
        }
        if (serialize3 != null) {
            Collection<Mutation> m3 = EntityCentricIndex.createMutations(rs3);
            for (Mutation m : m3) {
                bw.addMutation(m);
            }
        }
    }
    String q1 = // 
    "" + // 
    "SELECT ?X ?Y1 ?Y2 ?Y3 " + // 
    "{" + // 
    "?Y1 <uri:cf1> ?X ." + // 
    "?X <uri:cf2> ?Y2 ." + // 
    "?X <uri:cf3> ?Y3 ." + "}";
    SPARQLParser parser = new SPARQLParser();
    ParsedQuery pq1 = parser.parseQuery(q1, null);
    TupleExpr te1 = pq1.getTupleExpr();
    List<StatementPattern> spList1 = StatementPatternCollector.process(te1);
    Assert.assertTrue(StarQuery.isValidStarQuery(spList1));
    StarQuery sq1 = new StarQuery(spList1);
    AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf);
    List<BindingSet> bsList = Lists.newArrayList();
    QueryBindingSet b1 = (new QueryBindingSet());
    b1.addBinding("X", vf.createURI("uri:5"));
    QueryBindingSet b2 = (new QueryBindingSet());
    b2.addBinding("X", vf.createURI("uri:15"));
    QueryBindingSet b3 = (new QueryBindingSet());
    b3.addBinding("X", vf.createURI("uri:25"));
    bsList.add(b1);
    bsList.add(b2);
    bsList.add(b3);
    CloseableIteration<BindingSet, QueryEvaluationException> sol1 = adi.queryDocIndex(sq1, bsList);
    System.out.println("**********************TEST 6***********************");
    int results = 0;
    while (sol1.hasNext()) {
        System.out.println(sol1.next());
        results++;
    }
    Assert.assertEquals(3, results);
    adi.close();
}
Also used : QueryBindingSet(org.openrdf.query.algebra.evaluation.QueryBindingSet) BindingSet(org.openrdf.query.BindingSet) SPARQLParser(org.openrdf.query.parser.sparql.SPARQLParser) ParsedQuery(org.openrdf.query.parser.ParsedQuery) RyaStatement(org.apache.rya.api.domain.RyaStatement) RyaType(org.apache.rya.api.domain.RyaType) TupleExpr(org.openrdf.query.algebra.TupleExpr) QueryBindingSet(org.openrdf.query.algebra.evaluation.QueryBindingSet) RyaURI(org.apache.rya.api.domain.RyaURI) StatementPattern(org.openrdf.query.algebra.StatementPattern) QueryEvaluationException(org.openrdf.query.QueryEvaluationException) RyaTableMutationsFactory(org.apache.rya.accumulo.RyaTableMutationsFactory) Collection(java.util.Collection) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) Test(org.junit.Test)

Example 12 with RyaTableMutationsFactory

use of org.apache.rya.accumulo.RyaTableMutationsFactory in project incubator-rya by apache.

the class AccumuloDocIndexerTest method testNoContextCommonVarBs.

@Test
public void testNoContextCommonVarBs() throws Exception {
    BatchWriter bw = null;
    RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf));
    bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30);
    for (int i = 0; i < 30; i++) {
        RyaStatement rs1 = new RyaStatement(new RyaURI("uri:" + i), new RyaURI("uri:cf1"), new RyaType(XMLSchema.STRING, "cq1"));
        RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"));
        RyaStatement rs3 = null;
        if (i == 5 || i == 10 || i == 15 || i == 20 || i == 25) {
            rs3 = new RyaStatement(new RyaURI("uri:" + i), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER, Integer.toString(i)));
        }
        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> serialize1 = rtm.serialize(rs1);
        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> serialize2 = rtm.serialize(rs2);
        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> serialize3 = null;
        if (rs3 != null) {
            serialize3 = rtm.serialize(rs3);
        }
        Collection<Mutation> m1 = EntityCentricIndex.createMutations(rs1);
        for (Mutation m : m1) {
            bw.addMutation(m);
        }
        Collection<Mutation> m2 = EntityCentricIndex.createMutations(rs2);
        for (Mutation m : m2) {
            bw.addMutation(m);
        }
        if (serialize3 != null) {
            Collection<Mutation> m3 = EntityCentricIndex.createMutations(rs3);
            for (Mutation m : m3) {
                bw.addMutation(m);
            }
        }
    }
    String q1 = // 
    "" + // 
    "SELECT ?X ?Y1 ?Y2 " + // 
    "{" + // 
    "?X <uri:cf1> ?Y1 ." + // 
    "?X <uri:cf2> ?Y2 ." + // 
    "?X <uri:cf3> ?Y3 ." + "}";
    SPARQLParser parser = new SPARQLParser();
    ParsedQuery pq1 = parser.parseQuery(q1, null);
    TupleExpr te1 = pq1.getTupleExpr();
    List<StatementPattern> spList1 = StatementPatternCollector.process(te1);
    Assert.assertTrue(StarQuery.isValidStarQuery(spList1));
    StarQuery sq1 = new StarQuery(spList1);
    AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf);
    List<BindingSet> bsList = Lists.newArrayList();
    QueryBindingSet b1 = (new QueryBindingSet());
    b1.addBinding("X", vf.createURI("uri:5"));
    QueryBindingSet b2 = (new QueryBindingSet());
    b2.addBinding("X", vf.createURI("uri:15"));
    QueryBindingSet b3 = (new QueryBindingSet());
    b3.addBinding("X", vf.createURI("uri:25"));
    bsList.add(b1);
    bsList.add(b2);
    bsList.add(b3);
    CloseableIteration<BindingSet, QueryEvaluationException> sol1 = adi.queryDocIndex(sq1, bsList);
    System.out.println("**********************TEST 3***********************");
    int results = 0;
    while (sol1.hasNext()) {
        System.out.println(sol1.next());
        results++;
    }
    Assert.assertEquals(3, results);
    adi.close();
}
Also used : QueryBindingSet(org.openrdf.query.algebra.evaluation.QueryBindingSet) BindingSet(org.openrdf.query.BindingSet) SPARQLParser(org.openrdf.query.parser.sparql.SPARQLParser) ParsedQuery(org.openrdf.query.parser.ParsedQuery) RyaStatement(org.apache.rya.api.domain.RyaStatement) RyaType(org.apache.rya.api.domain.RyaType) TupleExpr(org.openrdf.query.algebra.TupleExpr) QueryBindingSet(org.openrdf.query.algebra.evaluation.QueryBindingSet) RyaURI(org.apache.rya.api.domain.RyaURI) StatementPattern(org.openrdf.query.algebra.StatementPattern) QueryEvaluationException(org.openrdf.query.QueryEvaluationException) RyaTableMutationsFactory(org.apache.rya.accumulo.RyaTableMutationsFactory) Collection(java.util.Collection) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) Test(org.junit.Test)

Example 13 with RyaTableMutationsFactory

use of org.apache.rya.accumulo.RyaTableMutationsFactory in project incubator-rya by apache.

the class AccumuloDocIndexerTest method testContextConstantCommonVar.

@Test
public void testContextConstantCommonVar() throws Exception {
    BatchWriter bw = null;
    RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(conf));
    bw = accCon.createBatchWriter(tableName, 500L * 1024L * 1024L, Long.MAX_VALUE, 30);
    for (int i = 0; i < 30; i++) {
        RyaStatement rs1 = new RyaStatement(new RyaURI("uri:" + i), new RyaURI("uri:cf1"), new RyaURI("uri:cq1"), new RyaURI("uri:joe"));
        RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:joe"));
        RyaStatement rs3 = null;
        RyaStatement rs4 = new RyaStatement(new RyaURI("uri:" + i), new RyaURI("uri:cf1"), new RyaURI("uri:cq1"), new RyaURI("uri:hank"));
        RyaStatement rs5 = new RyaStatement(new RyaURI("uri:" + i), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"), new RyaURI("uri:hank"));
        RyaStatement rs6 = null;
        if (i == 5 || i == 10 || i == 15 || i == 20 || i == 25) {
            rs3 = new RyaStatement(new RyaURI("uri:" + i), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER, Integer.toString(i)), new RyaURI("uri:joe"));
            rs6 = new RyaStatement(new RyaURI("uri:" + i), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER, Integer.toString(i)), new RyaURI("uri:hank"));
        }
        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> serialize1 = rtm.serialize(rs1);
        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> serialize2 = rtm.serialize(rs2);
        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> serialize3 = null;
        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> serialize4 = rtm.serialize(rs4);
        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> serialize5 = rtm.serialize(rs5);
        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, Collection<Mutation>> serialize6 = null;
        if (rs3 != null) {
            serialize3 = rtm.serialize(rs3);
        }
        if (rs6 != null) {
            serialize6 = rtm.serialize(rs6);
        }
        Collection<Mutation> m1 = EntityCentricIndex.createMutations(rs1);
        for (Mutation m : m1) {
            bw.addMutation(m);
        }
        Collection<Mutation> m2 = EntityCentricIndex.createMutations(rs2);
        for (Mutation m : m2) {
            bw.addMutation(m);
        }
        if (serialize3 != null) {
            Collection<Mutation> m3 = EntityCentricIndex.createMutations(rs3);
            for (Mutation m : m3) {
                bw.addMutation(m);
            }
        }
        Collection<Mutation> m4 = EntityCentricIndex.createMutations(rs4);
        for (Mutation m : m4) {
            bw.addMutation(m);
        }
        Collection<Mutation> m5 = EntityCentricIndex.createMutations(rs5);
        for (Mutation m : m5) {
            bw.addMutation(m);
        }
        if (serialize6 != null) {
            Collection<Mutation> m6 = EntityCentricIndex.createMutations(rs6);
            for (Mutation m : m6) {
                bw.addMutation(m);
            }
        }
    }
    String q1 = // 
    "" + // 
    "SELECT ?Y1 ?Y2 ?Y3 " + // 
    "{" + // 
    "<uri:5> <uri:cf1> ?Y1 ." + // 
    "<uri:5> <uri:cf2> ?Y2 ." + // 
    "<uri:5> <uri:cf3> ?Y3 ." + "}";
    String q2 = // 
    "" + // 
    "SELECT ?Y1 ?Y2 ?Y3 " + // 
    "{" + // 
    " GRAPH <uri:hank> { " + // 
    "<uri:5> <uri:cf1> ?Y1 ." + // 
    "<uri:5> <uri:cf2> ?Y2 ." + // 
    "<uri:5> <uri:cf3> ?Y3 ." + // 
    " } " + "}";
    SPARQLParser parser = new SPARQLParser();
    ParsedQuery pq1 = parser.parseQuery(q1, null);
    TupleExpr te1 = pq1.getTupleExpr();
    List<StatementPattern> spList1 = StatementPatternCollector.process(te1);
    String rowString = spList1.get(0).getSubjectVar().getValue().stringValue();
    Assert.assertTrue(StarQuery.isValidStarQuery(spList1));
    StarQuery sq1 = new StarQuery(spList1);
    AccumuloDocIdIndexer adi = new AccumuloDocIdIndexer(conf);
    // Value v1 = RyaToRdfConversions.convertValue(new RyaType(XMLSchema.INTEGER,Integer.toString(5)));
    // Value v2 = RyaToRdfConversions.convertValue(new RyaType(XMLSchema.INTEGER,Integer.toString(25)));
    List<BindingSet> bsList = Lists.newArrayList();
    // QueryBindingSet b1 = (new QueryBindingSet());
    // b1.addBinding("X", vf.createURI("uri:5"));
    // QueryBindingSet b2 = (new QueryBindingSet());
    // b2.addBinding("X", vf.createURI("uri:15"));
    // QueryBindingSet b3 = (new QueryBindingSet());
    // b3.addBinding("X", vf.createURI("uri:25"));
    // bsList.add(b1);
    // bsList.add(b2);
    // bsList.add(b3);
    // BatchScanner bs = accCon.createBatchScanner(tablename + "doc_partitioned_index", new Authorizations("U"), 15);
    // bs.setRanges(Collections.singleton(new Range(rowString)));
    // Iterator<Entry<Key,org.apache.accumulo.core.data.Value>> bsIt = bs.iterator();
    // while(bsIt.hasNext()) {
    // String otherRowString = bsIt.next().getKey().getRow().toString();
    // if(rowString.equals(otherRowString)) {
    // System.out.println(otherRowString);
    // }
    // 
    // }
    CloseableIteration<BindingSet, QueryEvaluationException> sol1 = adi.queryDocIndex(sq1, bsList);
    System.out.println("**********************TEST 14***********************");
    int results = 0;
    while (sol1.hasNext()) {
        System.out.println(sol1.next());
        results++;
    }
    Assert.assertEquals(2, results);
    ParsedQuery pq2 = parser.parseQuery(q2, null);
    TupleExpr te2 = pq2.getTupleExpr();
    List<StatementPattern> spList2 = StatementPatternCollector.process(te2);
    Assert.assertTrue(StarQuery.isValidStarQuery(spList2));
    StarQuery sq2 = new StarQuery(spList2);
    CloseableIteration<BindingSet, QueryEvaluationException> sol2 = adi.queryDocIndex(sq2, bsList);
    System.out.println("**********************TEST 14***********************");
    results = 0;
    while (sol2.hasNext()) {
        System.out.println(sol2.next());
        results++;
    }
    Assert.assertEquals(1, results);
    adi.close();
}
Also used : QueryBindingSet(org.openrdf.query.algebra.evaluation.QueryBindingSet) BindingSet(org.openrdf.query.BindingSet) SPARQLParser(org.openrdf.query.parser.sparql.SPARQLParser) ParsedQuery(org.openrdf.query.parser.ParsedQuery) RyaStatement(org.apache.rya.api.domain.RyaStatement) RyaType(org.apache.rya.api.domain.RyaType) TupleExpr(org.openrdf.query.algebra.TupleExpr) RyaURI(org.apache.rya.api.domain.RyaURI) StatementPattern(org.openrdf.query.algebra.StatementPattern) QueryEvaluationException(org.openrdf.query.QueryEvaluationException) RyaTableMutationsFactory(org.apache.rya.accumulo.RyaTableMutationsFactory) Collection(java.util.Collection) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) Test(org.junit.Test)

Example 14 with RyaTableMutationsFactory

use of org.apache.rya.accumulo.RyaTableMutationsFactory in project incubator-rya by apache.

the class DocumentIndexIntersectingIteratorTest method testSerialization1.

@Test
public void testSerialization1() throws Exception {
    BatchWriter bw = null;
    AccumuloRdfConfiguration acc = new AccumuloRdfConfiguration();
    acc.set(AccumuloRdfConfiguration.CONF_ADDITIONAL_INDEXERS, EntityCentricIndex.class.getName());
    RyaTableMutationsFactory rtm = new RyaTableMutationsFactory(RyaTripleContext.getInstance(acc));
    bw = accCon.createBatchWriter(tablename, 500L * 1024L * 1024L, Long.MAX_VALUE, 30);
    for (int i = 0; i < 20; i++) {
        RyaStatement rs1 = new RyaStatement(new RyaURI("uri:" + i), new RyaURI("uri:cf1"), new RyaType(XMLSchema.STRING, "cq1"));
        RyaStatement rs2 = new RyaStatement(new RyaURI("uri:" + i), new RyaURI("uri:cf2"), new RyaType(XMLSchema.STRING, "cq2"));
        RyaStatement rs3 = null;
        RyaStatement rs4 = null;
        if (i == 5 || i == 15) {
            rs3 = new RyaStatement(new RyaURI("uri:" + i), new RyaURI("uri:cf3"), new RyaType(XMLSchema.INTEGER, Integer.toString(i)));
            rs4 = new RyaStatement(new RyaURI("uri:" + i), new RyaURI("uri:cf3"), new RyaType(XMLSchema.STRING, Integer.toString(i)));
        }
        Collection<Mutation> m1 = EntityCentricIndex.createMutations(rs1);
        for (Mutation m : m1) {
            bw.addMutation(m);
        }
        Collection<Mutation> m2 = EntityCentricIndex.createMutations(rs2);
        for (Mutation m : m2) {
            bw.addMutation(m);
        }
        if (rs3 != null) {
            Collection<Mutation> m3 = EntityCentricIndex.createMutations(rs3);
            for (Mutation m : m3) {
                bw.addMutation(m);
            }
        }
        if (rs4 != null) {
            Collection<Mutation> m4 = EntityCentricIndex.createMutations(rs4);
            for (Mutation m : m4) {
                bw.addMutation(m);
            }
        }
    }
    String q1 = // 
    "" + // 
    "SELECT ?X ?Y1 ?Y2 " + // 
    "{" + // 
    "?X <uri:cf1> ?Y1 ." + // 
    "?X <uri:cf2> ?Y2 ." + // 
    "?X <uri:cf3> 5 ." + "}";
    String q2 = // 
    "" + // 
    "SELECT ?X ?Y1 ?Y2 " + // 
    "{" + // 
    "?X <uri:cf1> ?Y1  ." + // 
    "?X <uri:cf2> ?Y2 ." + // 
    "?X <uri:cf3> \"15\" ." + "}";
    SPARQLParser parser = new SPARQLParser();
    ParsedQuery pq1 = parser.parseQuery(q1, null);
    ParsedQuery pq2 = parser.parseQuery(q2, null);
    TupleExpr te1 = pq1.getTupleExpr();
    TupleExpr te2 = pq2.getTupleExpr();
    List<StatementPattern> spList1 = StatementPatternCollector.process(te1);
    List<StatementPattern> spList2 = StatementPatternCollector.process(te2);
    System.out.println(spList1);
    System.out.println(spList2);
    RyaType rt1 = RdfToRyaConversions.convertValue(spList1.get(2).getObjectVar().getValue());
    RyaType rt2 = RdfToRyaConversions.convertValue(spList2.get(2).getObjectVar().getValue());
    RyaURI predURI1 = (RyaURI) RdfToRyaConversions.convertValue(spList1.get(0).getPredicateVar().getValue());
    RyaURI predURI2 = (RyaURI) RdfToRyaConversions.convertValue(spList1.get(1).getPredicateVar().getValue());
    RyaURI predURI3 = (RyaURI) RdfToRyaConversions.convertValue(spList1.get(2).getPredicateVar().getValue());
    // System.out.println("to string" + spList1.get(2).getObjectVar().getValue().stringValue());
    // System.out.println("converted obj" + rt1.getData());
    // System.out.println("equal: " + rt1.getData().equals(spList1.get(2).getObjectVar().getValue().stringValue()));
    System.out.println(rt1);
    System.out.println(rt2);
    RyaContext rc = RyaContext.getInstance();
    byte[][] b1 = rc.serializeType(rt1);
    byte[][] b2 = rc.serializeType(rt2);
    byte[] b3 = Bytes.concat("object".getBytes(), "\u0000".getBytes(), b1[0], b1[1]);
    byte[] b4 = Bytes.concat("object".getBytes(), "\u0000".getBytes(), b2[0], b2[1]);
    System.out.println(new String(b3));
    System.out.println(new String(b4));
    TextColumn tc1 = new TextColumn(new Text(predURI1.getData()), new Text("object"));
    TextColumn tc2 = new TextColumn(new Text(predURI2.getData()), new Text("object"));
    TextColumn tc3 = new TextColumn(new Text(predURI3.getData()), new Text(b3));
    tc1.setIsPrefix(true);
    tc2.setIsPrefix(true);
    TextColumn[] tc = new TextColumn[3];
    tc[0] = tc1;
    tc[1] = tc2;
    tc[2] = tc3;
    IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class);
    DocumentIndexIntersectingIterator.setColumnFamilies(is, tc);
    Scanner scan = accCon.createScanner(tablename, new Authorizations("auths"));
    scan.addScanIterator(is);
    int results = 0;
    System.out.println("************************Test 21****************************");
    Text t = null;
    for (Map.Entry<Key, Value> e : scan) {
        t = e.getKey().getColumnQualifier();
        System.out.println(e);
        results++;
    }
    Assert.assertEquals(1, results);
    String[] s = t.toString().split("\u001D" + "\u001E");
    String[] s1 = s[2].split("\u0000");
    RyaType rt = rc.deserialize(s1[2].getBytes());
    System.out.println("Rya type is " + rt);
    org.openrdf.model.Value v = RyaToRdfConversions.convertValue(rt);
    Assert.assertTrue(v.equals(spList1.get(2).getObjectVar().getValue()));
    tc1 = new TextColumn(new Text(predURI1.getData()), new Text("object"));
    tc2 = new TextColumn(new Text(predURI2.getData()), new Text("object"));
    tc3 = new TextColumn(new Text(predURI3.getData()), new Text(b4));
    tc1.setIsPrefix(true);
    tc2.setIsPrefix(true);
    tc = new TextColumn[3];
    tc[0] = tc1;
    tc[1] = tc2;
    tc[2] = tc3;
    is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class);
    DocumentIndexIntersectingIterator.setColumnFamilies(is, tc);
    scan = accCon.createScanner(tablename, new Authorizations("auths"));
    scan.addScanIterator(is);
    results = 0;
    System.out.println("************************Test 21****************************");
    for (Map.Entry<Key, Value> e : scan) {
        t = e.getKey().getColumnQualifier();
        System.out.println(e);
        results++;
    }
    Assert.assertEquals(1, results);
    s = t.toString().split("\u001D" + "\u001E");
    s1 = s[2].split("\u0000");
    rt = rc.deserialize(s1[2].getBytes());
    System.out.println("Rya type is " + rt);
    v = RyaToRdfConversions.convertValue(rt);
    Assert.assertTrue(v.equals(spList2.get(2).getObjectVar().getValue()));
}
Also used : Scanner(org.apache.accumulo.core.client.Scanner) ParsedQuery(org.openrdf.query.parser.ParsedQuery) EntityCentricIndex(org.apache.rya.indexing.accumulo.entity.EntityCentricIndex) RyaStatement(org.apache.rya.api.domain.RyaStatement) RyaType(org.apache.rya.api.domain.RyaType) StatementPattern(org.openrdf.query.algebra.StatementPattern) RyaContext(org.apache.rya.api.resolver.RyaContext) SPARQLParser(org.openrdf.query.parser.sparql.SPARQLParser) Authorizations(org.apache.accumulo.core.security.Authorizations) Text(org.apache.hadoop.io.Text) AccumuloRdfConfiguration(org.apache.rya.accumulo.AccumuloRdfConfiguration) TupleExpr(org.openrdf.query.algebra.TupleExpr) RyaURI(org.apache.rya.api.domain.RyaURI) IteratorSetting(org.apache.accumulo.core.client.IteratorSetting) RyaTableMutationsFactory(org.apache.rya.accumulo.RyaTableMutationsFactory) Value(org.apache.accumulo.core.data.Value) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) Map(java.util.Map) Key(org.apache.accumulo.core.data.Key) Test(org.junit.Test)

Example 15 with RyaTableMutationsFactory

use of org.apache.rya.accumulo.RyaTableMutationsFactory in project incubator-rya by apache.

the class MergeToolMapper method setup.

@Override
protected void setup(final Context context) throws IOException, InterruptedException {
    super.setup(context);
    log.info("Setting up mapper");
    parentConfig = context.getConfiguration();
    childConfig = getChildConfig(parentConfig);
    startTimeString = parentConfig.get(MergeTool.START_TIME_PROP, null);
    if (startTimeString != null) {
        startTime = MergeTool.convertStartTimeStringToDate(startTimeString);
    }
    usesStartTime = startTime != null;
    useTimeSync = parentConfig.getBoolean(CopyTool.USE_NTP_SERVER_PROP, false);
    useMergeFileInput = parentConfig.getBoolean(MergeTool.USE_MERGE_FILE_INPUT, false);
    parentTableName = parentConfig.get(MergeTool.TABLE_NAME_PROP, null);
    parentTablePrefix = parentConfig.get(MRUtils.TABLE_PREFIX_PROPERTY, null);
    childTablePrefix = childConfig.get(MRUtils.TABLE_PREFIX_PROPERTY, null);
    if (useMergeFileInput) {
        childTableName = parentTableName.replaceFirst(parentTablePrefix, childTablePrefix) + MergeTool.TEMP_SUFFIX;
    } else {
        childTableName = parentTableName.replaceFirst(parentTablePrefix, childTablePrefix);
    }
    spoTable = new Text(parentTablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX);
    poTable = new Text(parentTablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX);
    ospTable = new Text(parentTablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX);
    childScanner = setupChildScanner(context);
    childIterator = childScanner.iterator();
    parentAccumuloRdfConfiguration = new AccumuloRdfConfiguration(parentConfig);
    parentAccumuloRdfConfiguration.setTablePrefix(parentTablePrefix);
    parentRyaContext = RyaTripleContext.getInstance(parentAccumuloRdfConfiguration);
    ryaTableMutationFactory = new RyaTableMutationsFactory(parentRyaContext);
    childAccumuloRdfConfiguration = new AccumuloRdfConfiguration(childConfig);
    childAccumuloRdfConfiguration.setTablePrefix(childTablePrefix);
    childRyaContext = RyaTripleContext.getInstance(childAccumuloRdfConfiguration);
    childConnector = AccumuloRyaUtils.setupConnector(childAccumuloRdfConfiguration);
    childDao = AccumuloRyaUtils.setupDao(childConnector, childAccumuloRdfConfiguration);
    if (startTime != null && useTimeSync) {
        try {
            copyToolInputTime = AccumuloRyaUtils.getCopyToolSplitDate(childDao);
            copyToolRunTime = AccumuloRyaUtils.getCopyToolRunDate(childDao);
            // Find the parent's time offset that was stored when the child was copied.
            parentTimeOffset = AccumuloRyaUtils.getTimeOffset(childDao);
            final String durationBreakdown = TimeUtils.getDurationBreakdown(parentTimeOffset);
            log.info("The table " + parentTableName + " has a time offset of: " + durationBreakdown);
            childTimeOffset = Long.valueOf(childConfig.get(CopyTool.CHILD_TIME_OFFSET_PROP, null));
            final Date adjustedParentStartTime = new Date(startTime.getTime() - parentTimeOffset);
            final Date adjustedChildStartTime = new Date(startTime.getTime() - childTimeOffset);
            log.info("Adjusted parent start time: " + adjustedParentStartTime);
            log.info("Adjusted child start time: " + adjustedChildStartTime);
        } catch (final RyaDAOException e) {
            log.error("Error getting time offset", e);
        }
    }
    log.info("Finished setting up mapper");
}
Also used : RyaTableMutationsFactory(org.apache.rya.accumulo.RyaTableMutationsFactory) RyaDAOException(org.apache.rya.api.persist.RyaDAOException) Text(org.apache.hadoop.io.Text) AccumuloRdfConfiguration(org.apache.rya.accumulo.AccumuloRdfConfiguration) Date(java.util.Date)

Aggregations

RyaTableMutationsFactory (org.apache.rya.accumulo.RyaTableMutationsFactory)15 BatchWriter (org.apache.accumulo.core.client.BatchWriter)14 Mutation (org.apache.accumulo.core.data.Mutation)14 RyaStatement (org.apache.rya.api.domain.RyaStatement)14 RyaType (org.apache.rya.api.domain.RyaType)14 RyaURI (org.apache.rya.api.domain.RyaURI)14 Test (org.junit.Test)14 StatementPattern (org.openrdf.query.algebra.StatementPattern)14 TupleExpr (org.openrdf.query.algebra.TupleExpr)14 ParsedQuery (org.openrdf.query.parser.ParsedQuery)14 SPARQLParser (org.openrdf.query.parser.sparql.SPARQLParser)14 Collection (java.util.Collection)13 BindingSet (org.openrdf.query.BindingSet)13 QueryEvaluationException (org.openrdf.query.QueryEvaluationException)13 QueryBindingSet (org.openrdf.query.algebra.evaluation.QueryBindingSet)13 Value (org.openrdf.model.Value)4 Text (org.apache.hadoop.io.Text)2 AccumuloRdfConfiguration (org.apache.rya.accumulo.AccumuloRdfConfiguration)2 Date (java.util.Date)1 Map (java.util.Map)1