Search in sources :

Example 6 with ColumnVisibility

use of org.apache.accumulo.core.security.ColumnVisibility in project hive by apache.

the class TestHiveAccumuloTableOutputFormat method testWriteToMockInstanceWithVisibility.

@Test
public void testWriteToMockInstanceWithVisibility() throws Exception {
    Instance inst = new MockInstance(test.getMethodName());
    Connector conn = inst.getConnector("root", new PasswordToken(""));
    Authorizations auths = new Authorizations("foo");
    conn.securityOperations().changeUserAuthorizations("root", auths);
    HiveAccumuloTableOutputFormat outputFormat = new HiveAccumuloTableOutputFormat();
    String table = test.getMethodName();
    conn.tableOperations().create(table);
    JobConf conf = new JobConf();
    conf.set(AccumuloConnectionParameters.INSTANCE_NAME, inst.getInstanceName());
    conf.set(AccumuloConnectionParameters.USER_NAME, "root");
    conf.set(AccumuloConnectionParameters.USER_PASS, "");
    conf.setBoolean(AccumuloConnectionParameters.USE_MOCK_INSTANCE, true);
    conf.set(AccumuloConnectionParameters.TABLE_NAME, test.getMethodName());
    FileSystem local = FileSystem.getLocal(conf);
    outputFormat.checkOutputSpecs(local, conf);
    RecordWriter<Text, Mutation> recordWriter = outputFormat.getRecordWriter(local, conf, null, null);
    List<String> names = Arrays.asList("row", "col1", "col2");
    List<TypeInfo> types = Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo);
    Properties tableProperties = new Properties();
    tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowID,cf:cq1,cf:cq2");
    tableProperties.setProperty(serdeConstants.FIELD_DELIM, " ");
    tableProperties.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(names));
    tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(types));
    AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(), tableProperties, AccumuloSerDe.class.getSimpleName());
    LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
    AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams, accumuloSerDeParams.getColumnMappings(), new ColumnVisibility("foo"), accumuloSerDeParams.getRowIdFactory());
    LazySimpleStructObjectInspector structOI = (LazySimpleStructObjectInspector) LazyFactory.createLazyStructInspector(Arrays.asList("row", "cq1", "cq2"), Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo), serDeParams.getSeparators(), serDeParams.getNullSequence(), serDeParams.isLastColumnTakesRest(), serDeParams.isEscaped(), serDeParams.getEscapeChar());
    LazyStruct struct = (LazyStruct) LazyFactory.createLazyObject(structOI);
    ByteArrayRef bytes = new ByteArrayRef();
    bytes.setData("row value1 value2".getBytes());
    struct.init(bytes, 0, bytes.getData().length);
    // Serialize the struct into a mutation
    Mutation m = serializer.serialize(struct, structOI);
    // Write the mutation
    recordWriter.write(new Text(table), m);
    // Close the writer
    recordWriter.close(null);
    Iterator<Entry<Key, Value>> iter = conn.createScanner(table, auths).iterator();
    Assert.assertTrue("Iterator did not have an element as expected", iter.hasNext());
    Entry<Key, Value> entry = iter.next();
    Key k = entry.getKey();
    Value v = entry.getValue();
    Assert.assertEquals("row", k.getRow().toString());
    Assert.assertEquals("cf", k.getColumnFamily().toString());
    Assert.assertEquals("cq1", k.getColumnQualifier().toString());
    Assert.assertEquals("foo", k.getColumnVisibility().toString());
    Assert.assertEquals("value1", new String(v.get()));
    Assert.assertTrue("Iterator did not have an element as expected", iter.hasNext());
    entry = iter.next();
    k = entry.getKey();
    v = entry.getValue();
    Assert.assertEquals("row", k.getRow().toString());
    Assert.assertEquals("cf", k.getColumnFamily().toString());
    Assert.assertEquals("cq2", k.getColumnQualifier().toString());
    Assert.assertEquals("foo", k.getColumnVisibility().toString());
    Assert.assertEquals("value2", new String(v.get()));
    Assert.assertFalse("Iterator unexpectedly had more data", iter.hasNext());
}
Also used : Connector(org.apache.accumulo.core.client.Connector) Configuration(org.apache.hadoop.conf.Configuration) MockInstance(org.apache.accumulo.core.client.mock.MockInstance) Instance(org.apache.accumulo.core.client.Instance) LazySerDeParameters(org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters) Properties(java.util.Properties) PasswordToken(org.apache.accumulo.core.client.security.tokens.PasswordToken) Entry(java.util.Map.Entry) MockInstance(org.apache.accumulo.core.client.mock.MockInstance) FileSystem(org.apache.hadoop.fs.FileSystem) ColumnVisibility(org.apache.accumulo.core.security.ColumnVisibility) JobConf(org.apache.hadoop.mapred.JobConf) AccumuloRowSerializer(org.apache.hadoop.hive.accumulo.serde.AccumuloRowSerializer) LazyStruct(org.apache.hadoop.hive.serde2.lazy.LazyStruct) Authorizations(org.apache.accumulo.core.security.Authorizations) LazySimpleStructObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector) Text(org.apache.hadoop.io.Text) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) AccumuloSerDe(org.apache.hadoop.hive.accumulo.serde.AccumuloSerDe) AccumuloSerDeParameters(org.apache.hadoop.hive.accumulo.serde.AccumuloSerDeParameters) ByteArrayRef(org.apache.hadoop.hive.serde2.lazy.ByteArrayRef) Value(org.apache.accumulo.core.data.Value) Mutation(org.apache.accumulo.core.data.Mutation) Key(org.apache.accumulo.core.data.Key) Test(org.junit.Test)

Example 7 with ColumnVisibility

use of org.apache.accumulo.core.security.ColumnVisibility in project hive by apache.

the class TestHiveAccumuloTableInputFormat method testGetProtectedField.

@Test
public void testGetProtectedField() throws Exception {
    FileInputFormat.addInputPath(conf, new Path("unused"));
    BatchWriterConfig writerConf = new BatchWriterConfig();
    BatchWriter writer = con.createBatchWriter(TEST_TABLE, writerConf);
    Authorizations origAuths = con.securityOperations().getUserAuthorizations(USER);
    con.securityOperations().changeUserAuthorizations(USER, new Authorizations(origAuths.toString() + ",foo"));
    Mutation m = new Mutation("r4");
    m.put(COLUMN_FAMILY, NAME, new ColumnVisibility("foo"), new Value("frank".getBytes()));
    m.put(COLUMN_FAMILY, SID, new ColumnVisibility("foo"), new Value(parseIntBytes("4")));
    m.put(COLUMN_FAMILY, DEGREES, new ColumnVisibility("foo"), new Value(parseDoubleBytes("60.6")));
    m.put(COLUMN_FAMILY, MILLIS, new ColumnVisibility("foo"), new Value(parseLongBytes("777")));
    writer.addMutation(m);
    writer.close();
    conf.set(AccumuloSerDeParameters.AUTHORIZATIONS_KEY, "foo");
    InputSplit[] splits = inputformat.getSplits(conf, 0);
    assertEquals(splits.length, 1);
    RecordReader<Text, AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
    Text rowId = new Text("r1");
    AccumuloHiveRow row = new AccumuloHiveRow();
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "brian".getBytes());
    rowId = new Text("r2");
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "mark".getBytes());
    rowId = new Text("r3");
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "dennis".getBytes());
    rowId = new Text("r4");
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "frank".getBytes());
    assertFalse(reader.next(rowId, row));
}
Also used : Path(org.apache.hadoop.fs.Path) Authorizations(org.apache.accumulo.core.security.Authorizations) Value(org.apache.accumulo.core.data.Value) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) Text(org.apache.hadoop.io.Text) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) ColumnVisibility(org.apache.accumulo.core.security.ColumnVisibility) InputSplit(org.apache.hadoop.mapred.InputSplit) AccumuloHiveRow(org.apache.hadoop.hive.accumulo.AccumuloHiveRow) Test(org.junit.Test)

Example 8 with ColumnVisibility

use of org.apache.accumulo.core.security.ColumnVisibility in project presto by prestodb.

the class Indexer method index.

/**
     * Index the given mutation, adding mutations to the index and metrics table
     * <p>
     * Like typical use of a BatchWriter, this method does not flush mutations to the underlying index table.
     * For higher throughput the modifications to the metrics table are tracked in memory and added to the metrics table when the indexer is flushed or closed.
     *
     * @param mutation Mutation to index
     */
public void index(Mutation mutation) {
    // Increment the cardinality for the number of rows in the table
    metrics.get(METRICS_TABLE_ROW_COUNT).incrementAndGet();
    // Set the first and last row values of the table based on existing row IDs
    if (firstRow == null || byteArrayComparator.compare(mutation.getRow(), firstRow) < 0) {
        firstRow = mutation.getRow();
    }
    if (lastRow == null || byteArrayComparator.compare(mutation.getRow(), lastRow) > 0) {
        lastRow = mutation.getRow();
    }
    // For each column update in this mutation
    for (ColumnUpdate columnUpdate : mutation.getUpdates()) {
        // Get the column qualifiers we want to index for this column family (if any)
        ByteBuffer family = wrap(columnUpdate.getColumnFamily());
        Collection<ByteBuffer> indexQualifiers = indexColumns.get(family);
        // If we have column qualifiers we want to index for this column family
        if (indexQualifiers != null) {
            // Check if we want to index this particular qualifier
            ByteBuffer qualifier = wrap(columnUpdate.getColumnQualifier());
            if (indexQualifiers.contains(qualifier)) {
                // If so, create a mutation using the following mapping:
                // Row ID = column value
                // Column Family = columnqualifier_columnfamily
                // Column Qualifier = row ID
                // Value = empty
                ByteBuffer indexFamily = getIndexColumnFamily(columnUpdate.getColumnFamily(), columnUpdate.getColumnQualifier());
                Type type = indexColumnTypes.get(family).get(qualifier);
                ColumnVisibility visibility = new ColumnVisibility(columnUpdate.getColumnVisibility());
                // If this is an array type, then index each individual element in the array
                if (Types.isArrayType(type)) {
                    Type elementType = Types.getElementType(type);
                    List<?> elements = serializer.decode(type, columnUpdate.getValue());
                    for (Object element : elements) {
                        addIndexMutation(wrap(serializer.encode(elementType, element)), indexFamily, visibility, mutation.getRow());
                    }
                } else {
                    addIndexMutation(wrap(columnUpdate.getValue()), indexFamily, visibility, mutation.getRow());
                }
            }
        }
    }
}
Also used : Type(com.facebook.presto.spi.type.Type) ColumnUpdate(org.apache.accumulo.core.data.ColumnUpdate) ColumnVisibility(org.apache.accumulo.core.security.ColumnVisibility) ByteBuffer(java.nio.ByteBuffer)

Example 9 with ColumnVisibility

use of org.apache.accumulo.core.security.ColumnVisibility in project presto by prestodb.

the class TestIndexer method setupClass.

@BeforeClass
public void setupClass() throws Exception {
    AccumuloColumnHandle c1 = new AccumuloColumnHandle("id", Optional.empty(), Optional.empty(), VARCHAR, 0, "", false);
    AccumuloColumnHandle c2 = new AccumuloColumnHandle("age", Optional.of("cf"), Optional.of("age"), BIGINT, 1, "", true);
    AccumuloColumnHandle c3 = new AccumuloColumnHandle("firstname", Optional.of("cf"), Optional.of("firstname"), VARCHAR, 2, "", true);
    AccumuloColumnHandle c4 = new AccumuloColumnHandle("arr", Optional.of("cf"), Optional.of("arr"), new ArrayType(VARCHAR), 3, "", true);
    table = new AccumuloTable("default", "index_test_table", ImmutableList.of(c1, c2, c3, c4), "id", true, LexicoderRowSerializer.class.getCanonicalName(), null);
    m1 = new Mutation(M1_ROWID);
    m1.put(CF, AGE, AGE_VALUE);
    m1.put(CF, FIRSTNAME, M1_FNAME_VALUE);
    m1.put(CF, SENDERS, M1_ARR_VALUE);
    m2 = new Mutation(M2_ROWID);
    m2.put(CF, AGE, AGE_VALUE);
    m2.put(CF, FIRSTNAME, M2_FNAME_VALUE);
    m2.put(CF, SENDERS, M2_ARR_VALUE);
    ColumnVisibility visibility1 = new ColumnVisibility("private");
    ColumnVisibility visibility2 = new ColumnVisibility("moreprivate");
    m1v = new Mutation(M1_ROWID);
    m1v.put(CF, AGE, visibility1, AGE_VALUE);
    m1v.put(CF, FIRSTNAME, visibility1, M1_FNAME_VALUE);
    m1v.put(CF, SENDERS, visibility2, M1_ARR_VALUE);
    m2v = new Mutation(M2_ROWID);
    m2v.put(CF, AGE, visibility1, AGE_VALUE);
    m2v.put(CF, FIRSTNAME, visibility2, M2_FNAME_VALUE);
    m2v.put(CF, SENDERS, visibility2, M2_ARR_VALUE);
}
Also used : ArrayType(com.facebook.presto.type.ArrayType) AccumuloTable(com.facebook.presto.accumulo.metadata.AccumuloTable) AccumuloColumnHandle(com.facebook.presto.accumulo.model.AccumuloColumnHandle) Mutation(org.apache.accumulo.core.data.Mutation) ColumnVisibility(org.apache.accumulo.core.security.ColumnVisibility) BeforeClass(org.testng.annotations.BeforeClass)

Example 10 with ColumnVisibility

use of org.apache.accumulo.core.security.ColumnVisibility in project Gaffer by gchq.

the class CoreKeyGroupByAggregatorIteratorTest method shouldPartiallyAggregateColumnQualifierOverCQ1GroupBy.

public void shouldPartiallyAggregateColumnQualifierOverCQ1GroupBy(final AccumuloStore store, final AccumuloElementConverter elementConverter) throws StoreException, AccumuloElementConversionException {
    final String visibilityString = "public";
    try {
        // Create edge
        final Edge edge = new Edge(TestGroups.EDGE);
        edge.setSource("1");
        edge.setDestination("2");
        edge.setDirected(true);
        edge.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 1);
        edge.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 1);
        edge.putProperty(AccumuloPropertyNames.PROP_1, 0);
        edge.putProperty(AccumuloPropertyNames.PROP_2, 0);
        edge.putProperty(AccumuloPropertyNames.PROP_3, 0);
        edge.putProperty(AccumuloPropertyNames.PROP_4, 0);
        edge.putProperty(AccumuloPropertyNames.COUNT, 1);
        final Edge edge2 = new Edge(TestGroups.EDGE);
        edge2.setSource("1");
        edge2.setDestination("2");
        edge2.setDirected(true);
        edge2.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 1);
        edge2.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 2);
        edge2.putProperty(AccumuloPropertyNames.PROP_1, 0);
        edge2.putProperty(AccumuloPropertyNames.PROP_2, 0);
        edge2.putProperty(AccumuloPropertyNames.PROP_3, 0);
        edge2.putProperty(AccumuloPropertyNames.PROP_4, 0);
        edge2.putProperty(AccumuloPropertyNames.COUNT, 1);
        final Edge edge3 = new Edge(TestGroups.EDGE);
        edge3.setSource("1");
        edge3.setDestination("2");
        edge3.setDirected(true);
        edge3.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 1);
        edge3.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 3);
        edge3.putProperty(AccumuloPropertyNames.PROP_1, 0);
        edge3.putProperty(AccumuloPropertyNames.PROP_2, 0);
        edge3.putProperty(AccumuloPropertyNames.PROP_3, 0);
        edge3.putProperty(AccumuloPropertyNames.PROP_4, 0);
        edge3.putProperty(AccumuloPropertyNames.COUNT, 1);
        //THIS EDGE WILL BE REDUCED MEANING ITS CQ (columnQualifier) will only occur once because its key is equal.
        final Edge edge4 = new Edge(TestGroups.EDGE);
        edge4.setSource("1");
        edge4.setDestination("2");
        edge4.setDirected(true);
        edge4.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 2);
        edge4.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 4);
        edge4.putProperty(AccumuloPropertyNames.PROP_1, 0);
        edge4.putProperty(AccumuloPropertyNames.PROP_2, 0);
        edge4.putProperty(AccumuloPropertyNames.PROP_3, 0);
        edge4.putProperty(AccumuloPropertyNames.PROP_4, 0);
        edge4.putProperty(AccumuloPropertyNames.COUNT, 2);
        final Edge edge5 = new Edge(TestGroups.EDGE);
        edge5.setSource("1");
        edge5.setDestination("2");
        edge5.setDirected(true);
        edge5.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 3);
        edge5.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 5);
        edge5.putProperty(AccumuloPropertyNames.PROP_1, 0);
        edge5.putProperty(AccumuloPropertyNames.PROP_2, 0);
        edge5.putProperty(AccumuloPropertyNames.PROP_3, 0);
        edge5.putProperty(AccumuloPropertyNames.PROP_4, 0);
        edge5.putProperty(AccumuloPropertyNames.COUNT, 10);
        final Edge edge6 = new Edge(TestGroups.EDGE);
        edge6.setSource("1");
        edge6.setDestination("2");
        edge6.setDirected(true);
        edge6.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 4);
        edge6.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 6);
        edge6.putProperty(AccumuloPropertyNames.PROP_1, 0);
        edge6.putProperty(AccumuloPropertyNames.PROP_2, 0);
        edge6.putProperty(AccumuloPropertyNames.PROP_3, 0);
        edge6.putProperty(AccumuloPropertyNames.PROP_4, 0);
        edge6.putProperty(AccumuloPropertyNames.COUNT, 5);
        // Accumulo key
        final Key key = elementConverter.getKeysFromEdge(edge).getFirst();
        final Key key2 = elementConverter.getKeysFromEdge(edge2).getFirst();
        final Key key3 = elementConverter.getKeysFromEdge(edge3).getFirst();
        final Key key4 = elementConverter.getKeysFromEdge(edge4).getFirst();
        final Key key5 = elementConverter.getKeysFromEdge(edge5).getFirst();
        final Key key6 = elementConverter.getKeysFromEdge(edge6).getFirst();
        // Accumulo values
        final Value value1 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge.getProperties());
        final Value value2 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge2.getProperties());
        final Value value3 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge3.getProperties());
        final Value value4 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge4.getProperties());
        final Value value5 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge5.getProperties());
        final Value value6 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge6.getProperties());
        // Create mutation
        final Mutation m1 = new Mutation(key.getRow());
        m1.put(key.getColumnFamily(), key.getColumnQualifier(), new ColumnVisibility(key.getColumnVisibility()), key.getTimestamp(), value1);
        final Mutation m2 = new Mutation(key2.getRow());
        m2.put(key2.getColumnFamily(), key2.getColumnQualifier(), new ColumnVisibility(key2.getColumnVisibility()), key2.getTimestamp(), value2);
        final Mutation m3 = new Mutation(key.getRow());
        m3.put(key3.getColumnFamily(), key3.getColumnQualifier(), new ColumnVisibility(key3.getColumnVisibility()), key3.getTimestamp(), value3);
        final Mutation m4 = new Mutation(key.getRow());
        m4.put(key4.getColumnFamily(), key4.getColumnQualifier(), new ColumnVisibility(key4.getColumnVisibility()), key4.getTimestamp(), value4);
        final Mutation m5 = new Mutation(key.getRow());
        m5.put(key5.getColumnFamily(), key5.getColumnQualifier(), new ColumnVisibility(key5.getColumnVisibility()), key5.getTimestamp(), value5);
        final Mutation m6 = new Mutation(key.getRow());
        m6.put(key6.getColumnFamily(), key6.getColumnQualifier(), new ColumnVisibility(key6.getColumnVisibility()), key6.getTimestamp(), value6);
        // Write mutation
        final BatchWriterConfig writerConfig = new BatchWriterConfig();
        writerConfig.setMaxMemory(1000000L);
        writerConfig.setMaxLatency(1000L, TimeUnit.MILLISECONDS);
        writerConfig.setMaxWriteThreads(1);
        final BatchWriter writer = store.getConnection().createBatchWriter(store.getProperties().getTable(), writerConfig);
        writer.addMutation(m1);
        writer.addMutation(m2);
        writer.addMutation(m3);
        writer.addMutation(m4);
        writer.addMutation(m5);
        writer.addMutation(m6);
        writer.close();
        Edge expectedEdge1 = new Edge(TestGroups.EDGE);
        expectedEdge1.setSource("1");
        expectedEdge1.setDestination("2");
        expectedEdge1.setDirected(true);
        expectedEdge1.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 1);
        expectedEdge1.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 6);
        expectedEdge1.putProperty(AccumuloPropertyNames.COUNT, 3);
        expectedEdge1.putProperty(AccumuloPropertyNames.PROP_1, 0);
        expectedEdge1.putProperty(AccumuloPropertyNames.PROP_2, 0);
        expectedEdge1.putProperty(AccumuloPropertyNames.PROP_3, 0);
        expectedEdge1.putProperty(AccumuloPropertyNames.PROP_4, 0);
        Edge expectedEdge2 = new Edge(TestGroups.EDGE);
        expectedEdge2.setSource("1");
        expectedEdge2.setDestination("2");
        expectedEdge2.setDirected(true);
        expectedEdge2.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 2);
        expectedEdge2.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 4);
        expectedEdge2.putProperty(AccumuloPropertyNames.COUNT, 2);
        expectedEdge2.putProperty(AccumuloPropertyNames.PROP_1, 0);
        expectedEdge2.putProperty(AccumuloPropertyNames.PROP_2, 0);
        expectedEdge2.putProperty(AccumuloPropertyNames.PROP_3, 0);
        expectedEdge2.putProperty(AccumuloPropertyNames.PROP_4, 0);
        Edge expectedEdge3 = new Edge(TestGroups.EDGE);
        expectedEdge3.setSource("1");
        expectedEdge3.setDestination("2");
        expectedEdge3.setDirected(true);
        expectedEdge3.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 3);
        expectedEdge3.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 5);
        expectedEdge3.putProperty(AccumuloPropertyNames.COUNT, 10);
        expectedEdge3.putProperty(AccumuloPropertyNames.PROP_1, 0);
        expectedEdge3.putProperty(AccumuloPropertyNames.PROP_2, 0);
        expectedEdge3.putProperty(AccumuloPropertyNames.PROP_3, 0);
        expectedEdge3.putProperty(AccumuloPropertyNames.PROP_4, 0);
        Edge expectedEdge4 = new Edge(TestGroups.EDGE);
        expectedEdge4.setSource("1");
        expectedEdge4.setDestination("2");
        expectedEdge4.setDirected(true);
        expectedEdge4.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 4);
        expectedEdge4.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 6);
        expectedEdge4.putProperty(AccumuloPropertyNames.COUNT, 5);
        expectedEdge4.putProperty(AccumuloPropertyNames.PROP_1, 0);
        expectedEdge4.putProperty(AccumuloPropertyNames.PROP_2, 0);
        expectedEdge4.putProperty(AccumuloPropertyNames.PROP_3, 0);
        expectedEdge4.putProperty(AccumuloPropertyNames.PROP_4, 0);
        // Read data back and check we get one merged element
        final Authorizations authorizations = new Authorizations(visibilityString);
        final Scanner scanner = store.getConnection().createScanner(store.getProperties().getTable(), authorizations);
        final IteratorSetting iteratorSetting = new IteratorSettingBuilder(AccumuloStoreConstants.COLUMN_QUALIFIER_AGGREGATOR_ITERATOR_PRIORITY, "KeyCombiner", CoreKeyGroupByAggregatorIterator.class).all().view(new View.Builder().edge(TestGroups.EDGE, new ViewElementDefinition.Builder().groupBy(AccumuloPropertyNames.COLUMN_QUALIFIER).build()).build()).schema(store.getSchema()).keyConverter(store.getKeyPackage().getKeyConverter()).build();
        scanner.addScanIterator(iteratorSetting);
        final Iterator<Entry<Key, Value>> it = scanner.iterator();
        Entry<Key, Value> entry = it.next();
        Element readEdge = elementConverter.getFullElement(entry.getKey(), entry.getValue());
        assertEquals(expectedEdge1, readEdge);
        assertEquals(1, readEdge.getProperty(AccumuloPropertyNames.COLUMN_QUALIFIER));
        assertEquals(6, readEdge.getProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2));
        assertEquals(3, readEdge.getProperty(AccumuloPropertyNames.COUNT));
        entry = it.next();
        readEdge = elementConverter.getFullElement(entry.getKey(), entry.getValue());
        assertEquals(expectedEdge2, readEdge);
        assertEquals(2, readEdge.getProperty(AccumuloPropertyNames.COLUMN_QUALIFIER));
        assertEquals(4, readEdge.getProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2));
        assertEquals(2, readEdge.getProperty(AccumuloPropertyNames.COUNT));
        entry = it.next();
        readEdge = elementConverter.getFullElement(entry.getKey(), entry.getValue());
        assertEquals(expectedEdge3, readEdge);
        assertEquals(3, readEdge.getProperty(AccumuloPropertyNames.COLUMN_QUALIFIER));
        assertEquals(5, readEdge.getProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2));
        assertEquals(10, readEdge.getProperty(AccumuloPropertyNames.COUNT));
        entry = it.next();
        readEdge = elementConverter.getFullElement(entry.getKey(), entry.getValue());
        assertEquals(expectedEdge4, readEdge);
        assertEquals(4, readEdge.getProperty(AccumuloPropertyNames.COLUMN_QUALIFIER));
        assertEquals(6, readEdge.getProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2));
        assertEquals(5, readEdge.getProperty(AccumuloPropertyNames.COUNT));
        if (it.hasNext()) {
            fail("Additional row found.");
        }
    } catch (AccumuloException | TableNotFoundException e) {
        fail(this.getClass().getSimpleName() + " failed with exception: " + e);
    }
}
Also used : Scanner(org.apache.accumulo.core.client.Scanner) AccumuloException(org.apache.accumulo.core.client.AccumuloException) Authorizations(org.apache.accumulo.core.security.Authorizations) IteratorSettingBuilder(uk.gov.gchq.gaffer.accumulostore.utils.IteratorSettingBuilder) Element(uk.gov.gchq.gaffer.data.element.Element) IteratorSettingBuilder(uk.gov.gchq.gaffer.accumulostore.utils.IteratorSettingBuilder) TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) Entry(java.util.Map.Entry) IteratorSetting(org.apache.accumulo.core.client.IteratorSetting) Value(org.apache.accumulo.core.data.Value) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) Mutation(org.apache.accumulo.core.data.Mutation) ColumnVisibility(org.apache.accumulo.core.security.ColumnVisibility) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Edge(uk.gov.gchq.gaffer.data.element.Edge) Key(org.apache.accumulo.core.data.Key)

Aggregations

ColumnVisibility (org.apache.accumulo.core.security.ColumnVisibility)21 Mutation (org.apache.accumulo.core.data.Mutation)16 Value (org.apache.accumulo.core.data.Value)11 Key (org.apache.accumulo.core.data.Key)10 Test (org.junit.Test)10 Entry (java.util.Map.Entry)9 BatchWriter (org.apache.accumulo.core.client.BatchWriter)9 Authorizations (org.apache.accumulo.core.security.Authorizations)9 Properties (java.util.Properties)8 BatchWriterConfig (org.apache.accumulo.core.client.BatchWriterConfig)8 TableNotFoundException (org.apache.accumulo.core.client.TableNotFoundException)8 Configuration (org.apache.hadoop.conf.Configuration)8 Element (uk.gov.gchq.gaffer.data.element.Element)8 AccumuloException (org.apache.accumulo.core.client.AccumuloException)7 IteratorSetting (org.apache.accumulo.core.client.IteratorSetting)7 Scanner (org.apache.accumulo.core.client.Scanner)7 Edge (uk.gov.gchq.gaffer.data.element.Edge)7 IteratorSettingBuilder (uk.gov.gchq.gaffer.accumulostore.utils.IteratorSettingBuilder)6 ColumnUpdate (org.apache.accumulo.core.data.ColumnUpdate)5 ByteArrayRef (org.apache.hadoop.hive.serde2.lazy.ByteArrayRef)5