Search in sources :

Example 31 with RyaURI

use of org.apache.rya.api.domain.RyaURI in project incubator-rya by apache.

the class GraphXInputFormatTest method testInputFormat.

@Test
public void testInputFormat() throws Exception {
    RyaStatement input = RyaStatement.builder().setSubject(new RyaURI("http://www.google.com")).setPredicate(new RyaURI("http://some_other_uri")).setObject(new RyaURI("http://www.yahoo.com")).setColumnVisibility(new byte[0]).setValue(new byte[0]).build();
    apiImpl.add(input);
    Job jobConf = Job.getInstance();
    GraphXInputFormat.setMockInstance(jobConf, instance.getInstanceName());
    GraphXInputFormat.setConnectorInfo(jobConf, username, password);
    GraphXInputFormat.setInputTableName(jobConf, table);
    GraphXInputFormat.setInputTableName(jobConf, table);
    GraphXInputFormat.setScanIsolation(jobConf, false);
    GraphXInputFormat.setLocalIterators(jobConf, false);
    GraphXInputFormat.setOfflineTableScan(jobConf, false);
    GraphXInputFormat inputFormat = new GraphXInputFormat();
    JobContext context = new JobContextImpl(jobConf.getConfiguration(), jobConf.getJobID());
    List<InputSplit> splits = inputFormat.getSplits(context);
    Assert.assertEquals(1, splits.size());
    TaskAttemptContext taskAttemptContext = new TaskAttemptContextImpl(context.getConfiguration(), new TaskAttemptID(new TaskID(), 1));
    RecordReader<Object, RyaTypeWritable> reader = inputFormat.createRecordReader(splits.get(0), taskAttemptContext);
    RyaStatementRecordReader ryaStatementRecordReader = (RyaStatementRecordReader) reader;
    ryaStatementRecordReader.initialize(splits.get(0), taskAttemptContext);
    List<RyaType> results = new ArrayList<RyaType>();
    System.out.println("before while");
    while (ryaStatementRecordReader.nextKeyValue()) {
        System.out.println("in while");
        RyaTypeWritable writable = ryaStatementRecordReader.getCurrentValue();
        RyaType value = writable.getRyaType();
        Object text = ryaStatementRecordReader.getCurrentKey();
        RyaType type = new RyaType();
        type.setData(value.getData());
        type.setDataType(value.getDataType());
        results.add(type);
        System.out.println(value.getData());
        System.out.println(value.getDataType());
        System.out.println(results);
        System.out.println(type);
        System.out.println(text);
        System.out.println(value);
    }
    System.out.println("after while");
    System.out.println(results.size());
    System.out.println(results);
// Assert.assertTrue(results.size() == 2);
// Assert.assertTrue(results.contains(input));
}
Also used : JobContextImpl(org.apache.hadoop.mapreduce.task.JobContextImpl) TaskID(org.apache.hadoop.mapreduce.TaskID) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) ArrayList(java.util.ArrayList) RyaStatement(org.apache.rya.api.domain.RyaStatement) TaskAttemptContext(org.apache.hadoop.mapreduce.TaskAttemptContext) RyaStatementRecordReader(org.apache.rya.accumulo.mr.GraphXInputFormat.RyaStatementRecordReader) RyaType(org.apache.rya.api.domain.RyaType) RyaURI(org.apache.rya.api.domain.RyaURI) TaskAttemptContextImpl(org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl) JobContext(org.apache.hadoop.mapreduce.JobContext) Job(org.apache.hadoop.mapreduce.Job) InputSplit(org.apache.hadoop.mapreduce.InputSplit) Test(org.junit.Test)

Example 32 with RyaURI

use of org.apache.rya.api.domain.RyaURI in project incubator-rya by apache.

the class RyaInputFormatTest method testInputFormat.

@Test
public void testInputFormat() throws Exception {
    RyaStatement input = RyaStatement.builder().setSubject(new RyaURI("http://www.google.com")).setPredicate(new RyaURI("http://some_other_uri")).setObject(new RyaURI("http://www.yahoo.com")).setColumnVisibility(new byte[0]).setValue(new byte[0]).build();
    apiImpl.add(input);
    Job jobConf = Job.getInstance();
    RyaInputFormat.setMockInstance(jobConf, instance.getInstanceName());
    RyaInputFormat.setConnectorInfo(jobConf, username, password);
    RyaInputFormat.setTableLayout(jobConf, TABLE_LAYOUT.SPO);
    AccumuloInputFormat.setInputTableName(jobConf, table);
    AccumuloInputFormat.setInputTableName(jobConf, table);
    AccumuloInputFormat.setScanIsolation(jobConf, false);
    AccumuloInputFormat.setLocalIterators(jobConf, false);
    AccumuloInputFormat.setOfflineTableScan(jobConf, false);
    RyaInputFormat inputFormat = new RyaInputFormat();
    JobContext context = new JobContextImpl(jobConf.getConfiguration(), jobConf.getJobID());
    List<InputSplit> splits = inputFormat.getSplits(context);
    Assert.assertEquals(1, splits.size());
    TaskAttemptContext taskAttemptContext = new TaskAttemptContextImpl(context.getConfiguration(), new TaskAttemptID(new TaskID(), 1));
    RecordReader<Text, RyaStatementWritable> reader = inputFormat.createRecordReader(splits.get(0), taskAttemptContext);
    RyaStatementRecordReader ryaStatementRecordReader = (RyaStatementRecordReader) reader;
    ryaStatementRecordReader.initialize(splits.get(0), taskAttemptContext);
    List<RyaStatement> results = new ArrayList<RyaStatement>();
    while (ryaStatementRecordReader.nextKeyValue()) {
        RyaStatementWritable writable = ryaStatementRecordReader.getCurrentValue();
        RyaStatement value = writable.getRyaStatement();
        Text text = ryaStatementRecordReader.getCurrentKey();
        RyaStatement stmt = RyaStatement.builder().setSubject(value.getSubject()).setPredicate(value.getPredicate()).setObject(value.getObject()).setContext(value.getContext()).setQualifier(value.getQualifer()).setColumnVisibility(value.getColumnVisibility()).setValue(value.getValue()).build();
        results.add(stmt);
        System.out.println(text);
        System.out.println(value);
    }
    Assert.assertTrue(results.size() == 2);
    Assert.assertTrue(results.contains(input));
}
Also used : JobContextImpl(org.apache.hadoop.mapreduce.task.JobContextImpl) TaskID(org.apache.hadoop.mapreduce.TaskID) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) ArrayList(java.util.ArrayList) RyaStatement(org.apache.rya.api.domain.RyaStatement) TaskAttemptContext(org.apache.hadoop.mapreduce.TaskAttemptContext) Text(org.apache.hadoop.io.Text) RyaStatementRecordReader(org.apache.rya.accumulo.mr.RyaInputFormat.RyaStatementRecordReader) RyaURI(org.apache.rya.api.domain.RyaURI) TaskAttemptContextImpl(org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl) JobContext(org.apache.hadoop.mapreduce.JobContext) Job(org.apache.hadoop.mapreduce.Job) InputSplit(org.apache.hadoop.mapreduce.InputSplit) Test(org.junit.Test)

Example 33 with RyaURI

use of org.apache.rya.api.domain.RyaURI in project incubator-rya by apache.

the class AccumuloRdfCountToolTest method testMR.

@Test
public void testMR() throws Exception {
    RyaURI test1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "test1"));
    RyaURI pred1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "pred1"));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(0))));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(1))));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(2))));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(3))));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(4))));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(5))));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(6))));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(7))));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(8))));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(9))));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(10))));
    AccumuloRdfCountTool.main(new String[] { "-Dac.mock=true", "-Dac.instance=" + instance, "-Dac.username=" + user, "-Dac.pwd=" + pwd, "-Drdf.tablePrefix=" + tablePrefix });
    Map<String, Key> expectedValues = new HashMap<String, Key>();
    String row = test1.getData();
    expectedValues.put(row, new Key(new Text(row), RdfCloudTripleStoreConstants.SUBJECT_CF_TXT, RdfCloudTripleStoreConstants.EMPTY_TEXT));
    row = pred1.getData();
    expectedValues.put(row, new Key(new Text(row), RdfCloudTripleStoreConstants.PRED_CF_TXT, RdfCloudTripleStoreConstants.EMPTY_TEXT));
    Scanner scanner = connector.createScanner(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, auths);
    scanner.setRange(new Range());
    int count = 0;
    for (Map.Entry<Key, Value> entry : scanner) {
        assertTrue(expectedValues.get(entry.getKey().getRow().toString()).equals(entry.getKey(), PartialKey.ROW_COLFAM_COLQUAL));
        assertEquals(11, Long.parseLong(entry.getValue().toString()));
        count++;
    }
    assertEquals(2, count);
}
Also used : Scanner(org.apache.accumulo.core.client.Scanner) HashMap(java.util.HashMap) RyaStatement(org.apache.rya.api.domain.RyaStatement) Text(org.apache.hadoop.io.Text) Range(org.apache.accumulo.core.data.Range) RyaURI(org.apache.rya.api.domain.RyaURI) Value(org.apache.accumulo.core.data.Value) HashMap(java.util.HashMap) Map(java.util.Map) Key(org.apache.accumulo.core.data.Key) PartialKey(org.apache.accumulo.core.data.PartialKey) Test(org.junit.Test)

Example 34 with RyaURI

use of org.apache.rya.api.domain.RyaURI in project incubator-rya by apache.

the class AccumuloRdfCountToolTest method testContext.

@Test
public void testContext() throws Exception {
    RyaURI test1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "test1"));
    RyaURI pred1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "pred1"));
    RyaURI cntxt = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "cntxt"));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(0)), cntxt));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(1)), cntxt));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(2)), cntxt));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(3)), cntxt));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(4)), cntxt));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(5)), cntxt));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(6)), cntxt));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(7)), cntxt));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(8)), cntxt));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(9)), cntxt));
    dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(10)), cntxt));
    AccumuloRdfCountTool.main(new String[] { "-Dac.mock=true", "-Dac.instance=" + instance, "-Dac.username=" + user, "-Dac.pwd=" + pwd, "-Drdf.tablePrefix=" + tablePrefix });
    Map<String, Key> expectedValues = new HashMap<String, Key>();
    String row = test1.getData();
    expectedValues.put(row, new Key(new Text(row), RdfCloudTripleStoreConstants.SUBJECT_CF_TXT, new Text(cntxt.getData())));
    row = pred1.getData();
    expectedValues.put(row, new Key(new Text(row), RdfCloudTripleStoreConstants.PRED_CF_TXT, new Text(cntxt.getData())));
    Scanner scanner = connector.createScanner(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, auths);
    scanner.setRange(new Range());
    int count = 0;
    for (Map.Entry<Key, Value> entry : scanner) {
        assertTrue(expectedValues.get(entry.getKey().getRow().toString()).equals(entry.getKey(), PartialKey.ROW_COLFAM_COLQUAL));
        assertEquals(11, Long.parseLong(entry.getValue().toString()));
        count++;
    }
    assertEquals(2, count);
}
Also used : Scanner(org.apache.accumulo.core.client.Scanner) HashMap(java.util.HashMap) RyaStatement(org.apache.rya.api.domain.RyaStatement) Text(org.apache.hadoop.io.Text) Range(org.apache.accumulo.core.data.Range) RyaURI(org.apache.rya.api.domain.RyaURI) Value(org.apache.accumulo.core.data.Value) HashMap(java.util.HashMap) Map(java.util.Map) Key(org.apache.accumulo.core.data.Key) PartialKey(org.apache.accumulo.core.data.PartialKey) Test(org.junit.Test)

Example 35 with RyaURI

use of org.apache.rya.api.domain.RyaURI in project incubator-rya by apache.

the class Upgrade322ToolTest method testUpgrade.

public void testUpgrade() throws Exception {
    Upgrade322Tool.main(new String[] { "-Dac.mock=true", "-Dac.instance=" + instance, "-Dac.username=" + user, "-Dac.pwd=" + pwd, "-Drdf.tablePrefix=" + tablePrefix });
    final AccumuloRdfConfiguration configuration = new AccumuloRdfConfiguration();
    configuration.setTablePrefix(tablePrefix);
    final AccumuloRyaDAO ryaDAO = new AccumuloRyaDAO();
    ryaDAO.setConnector(connector);
    ryaDAO.setConf(configuration);
    ryaDAO.init();
    final AccumuloRyaQueryEngine queryEngine = ryaDAO.getQueryEngine();
    TestUtils.verify(new RyaStatement(new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), new RyaURI("http://here/2010/tracked-data-provenance/ns#booleanLit"), new RyaType(XMLSchema.BOOLEAN, "true")), queryEngine);
    TestUtils.verify(new RyaStatement(new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), new RyaURI("http://here/2010/tracked-data-provenance/ns#longLit"), new RyaType(XMLSchema.LONG, "10")), queryEngine);
    TestUtils.verify(new RyaStatement(new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), new RyaURI("http://here/2010/tracked-data-provenance/ns#intLit"), new RyaType(XMLSchema.INTEGER, "10")), queryEngine);
    TestUtils.verify(new RyaStatement(new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), new RyaURI("http://here/2010/tracked-data-provenance/ns#byteLit"), new RyaType(XMLSchema.BYTE, "10")), queryEngine);
    TestUtils.verify(new RyaStatement(new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), new RyaURI("http://here/2010/tracked-data-provenance/ns#doubleLit"), new RyaType(XMLSchema.DOUBLE, "10.0")), queryEngine);
    TestUtils.verify(new RyaStatement(new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), new RyaURI("http://here/2010/tracked-data-provenance/ns#dateLit"), new RyaType(XMLSchema.DATETIME, "2011-07-12T06:00:00.000Z")), queryEngine);
    TestUtils.verify(new RyaStatement(new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), new RyaURI("http://here/2010/tracked-data-provenance/ns#stringLit"), new RyaType("stringLit")), queryEngine);
    TestUtils.verify(new RyaStatement(new RyaURI("http://here/2010/tracked-data-provenance/ns#uuid10"), new RyaURI("http://here/2010/tracked-data-provenance/ns#uriLit"), new RyaURI("http://here/2010/tracked-data-provenance/ns" + "#objectuuid1")), queryEngine);
    TestUtils.verify(new RyaStatement(new RyaURI("urn:org.apache.rya/2012/05#rts"), new RyaURI("urn:org.apache.rya/2012/05#version"), new RyaType("3.0.0")), queryEngine);
}
Also used : AccumuloRyaDAO(org.apache.rya.accumulo.AccumuloRyaDAO) RyaURI(org.apache.rya.api.domain.RyaURI) AccumuloRyaQueryEngine(org.apache.rya.accumulo.query.AccumuloRyaQueryEngine) RyaStatement(org.apache.rya.api.domain.RyaStatement) RyaType(org.apache.rya.api.domain.RyaType) AccumuloRdfConfiguration(org.apache.rya.accumulo.AccumuloRdfConfiguration)

Aggregations

RyaURI (org.apache.rya.api.domain.RyaURI)287 Test (org.junit.Test)190 RyaStatement (org.apache.rya.api.domain.RyaStatement)183 RyaType (org.apache.rya.api.domain.RyaType)146 BindingSet (org.openrdf.query.BindingSet)56 ArrayList (java.util.ArrayList)52 StatementPattern (org.openrdf.query.algebra.StatementPattern)50 QueryBindingSet (org.openrdf.query.algebra.evaluation.QueryBindingSet)49 HashSet (java.util.HashSet)43 RyaDAOException (org.apache.rya.api.persist.RyaDAOException)43 QueryEvaluationException (org.openrdf.query.QueryEvaluationException)42 ParsedQuery (org.openrdf.query.parser.ParsedQuery)42 SPARQLParser (org.openrdf.query.parser.sparql.SPARQLParser)42 StatementMetadata (org.apache.rya.api.domain.StatementMetadata)35 Entity (org.apache.rya.indexing.entity.model.Entity)30 Property (org.apache.rya.indexing.entity.model.Property)28 URIImpl (org.openrdf.model.impl.URIImpl)25 EntityStorage (org.apache.rya.indexing.entity.storage.EntityStorage)22 AccumuloRdfConfiguration (org.apache.rya.accumulo.AccumuloRdfConfiguration)21 TripleRow (org.apache.rya.api.resolver.triple.TripleRow)21