Search in sources :

Example 61 with ColumnVisibility

use of org.apache.accumulo.core.security.ColumnVisibility in project hive by apache.

the class TestAccumuloRowSerializer method testMapSerialization.

@Test
public void testMapSerialization() throws IOException, SerDeException {
    List<String> columns = Arrays.asList("row", "col");
    List<TypeInfo> types = Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo));
    List<String> typeNames = new ArrayList<String>(types.size());
    for (TypeInfo type : types) {
        typeNames.add(type.getTypeName());
    }
    Properties tableProperties = new Properties();
    tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowid,cf:*");
    tableProperties.setProperty(serdeConstants.FIELD_DELIM, " ");
    tableProperties.setProperty(serdeConstants.COLLECTION_DELIM, ",");
    tableProperties.setProperty(serdeConstants.MAPKEY_DELIM, ":");
    tableProperties.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columns));
    tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(typeNames));
    AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(), tableProperties, AccumuloSerDe.class.getSimpleName());
    LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
    TypeInfo stringTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME);
    LazyStringObjectInspector stringOI = (LazyStringObjectInspector) LazyFactory.createLazyObjectInspector(stringTypeInfo, new byte[] { 0 }, 0, serDeParams.getNullSequence(), serDeParams.isEscaped(), serDeParams.getEscapeChar());
    LazyMapObjectInspector mapOI = LazyObjectInspectorFactory.getLazySimpleMapObjectInspector(stringOI, stringOI, (byte) ',', (byte) ':', serDeParams.getNullSequence(), serDeParams.isEscaped(), serDeParams.getEscapeChar());
    LazySimpleStructObjectInspector structOI = (LazySimpleStructObjectInspector) LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(columns, Arrays.asList(stringOI, mapOI), (byte) ' ', serDeParams.getNullSequence(), serDeParams.isLastColumnTakesRest(), serDeParams.isEscaped(), serDeParams.getEscapeChar());
    AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams, accumuloSerDeParams.getColumnMappings(), new ColumnVisibility(), accumuloSerDeParams.getRowIdFactory());
    // Create the LazyStruct from the LazyStruct...Inspector
    LazyStruct obj = (LazyStruct) LazyFactory.createLazyObject(structOI);
    ByteArrayRef byteRef = new ByteArrayRef();
    byteRef.setData("row1 cq1:10,cq2:20,cq3:value".getBytes());
    obj.init(byteRef, 0, byteRef.getData().length);
    Mutation m = (Mutation) serializer.serialize(obj, structOI);
    Assert.assertArrayEquals("row1".getBytes(), m.getRow());
    List<ColumnUpdate> updates = m.getUpdates();
    Assert.assertEquals(3, updates.size());
    ColumnUpdate update = updates.get(0);
    Assert.assertEquals("cf", new String(update.getColumnFamily()));
    Assert.assertEquals("cq1", new String(update.getColumnQualifier()));
    Assert.assertEquals("10", new String(update.getValue()));
    update = updates.get(1);
    Assert.assertEquals("cf", new String(update.getColumnFamily()));
    Assert.assertEquals("cq2", new String(update.getColumnQualifier()));
    Assert.assertEquals("20", new String(update.getValue()));
    update = updates.get(2);
    Assert.assertEquals("cf", new String(update.getColumnFamily()));
    Assert.assertEquals("cq3", new String(update.getColumnQualifier()));
    Assert.assertEquals("value", new String(update.getValue()));
}
Also used : LazyStringObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector) LazySimpleStructObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector) ColumnUpdate(org.apache.accumulo.core.data.ColumnUpdate) Configuration(org.apache.hadoop.conf.Configuration) LazySerDeParameters(org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters) ArrayList(java.util.ArrayList) LazyMapObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector) Properties(java.util.Properties) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) ByteArrayRef(org.apache.hadoop.hive.serde2.lazy.ByteArrayRef) ColumnVisibility(org.apache.accumulo.core.security.ColumnVisibility) Mutation(org.apache.accumulo.core.data.Mutation) LazyStruct(org.apache.hadoop.hive.serde2.lazy.LazyStruct) Test(org.junit.Test)

Example 62 with ColumnVisibility

use of org.apache.accumulo.core.security.ColumnVisibility in project hive by apache.

the class TestHiveAccumuloTableInputFormat method testGetProtectedField.

@Test
public void testGetProtectedField() throws Exception {
    FileInputFormat.addInputPath(conf, new Path("unused"));
    BatchWriterConfig writerConf = new BatchWriterConfig();
    BatchWriter writer = con.createBatchWriter(TEST_TABLE, writerConf);
    Authorizations origAuths = con.securityOperations().getUserAuthorizations(USER);
    con.securityOperations().changeUserAuthorizations(USER, new Authorizations(origAuths.toString() + ",foo"));
    Mutation m = new Mutation("r4");
    m.put(COLUMN_FAMILY, NAME, new ColumnVisibility("foo"), new Value("frank".getBytes()));
    m.put(COLUMN_FAMILY, SID, new ColumnVisibility("foo"), new Value(parseIntBytes("4")));
    m.put(COLUMN_FAMILY, DEGREES, new ColumnVisibility("foo"), new Value(parseDoubleBytes("60.6")));
    m.put(COLUMN_FAMILY, MILLIS, new ColumnVisibility("foo"), new Value(parseLongBytes("777")));
    writer.addMutation(m);
    writer.close();
    conf.set(AccumuloSerDeParameters.AUTHORIZATIONS_KEY, "foo");
    InputSplit[] splits = inputformat.getSplits(conf, 0);
    assertEquals(splits.length, 1);
    RecordReader<Text, AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
    Text rowId = new Text("r1");
    AccumuloHiveRow row = new AccumuloHiveRow();
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "brian".getBytes());
    rowId = new Text("r2");
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "mark".getBytes());
    rowId = new Text("r3");
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "dennis".getBytes());
    rowId = new Text("r4");
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "frank".getBytes());
    assertFalse(reader.next(rowId, row));
}
Also used : Path(org.apache.hadoop.fs.Path) Authorizations(org.apache.accumulo.core.security.Authorizations) Value(org.apache.accumulo.core.data.Value) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) Text(org.apache.hadoop.io.Text) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) ColumnVisibility(org.apache.accumulo.core.security.ColumnVisibility) InputSplit(org.apache.hadoop.mapred.InputSplit) AccumuloHiveRow(org.apache.hadoop.hive.accumulo.AccumuloHiveRow) Test(org.junit.Test)

Example 63 with ColumnVisibility

use of org.apache.accumulo.core.security.ColumnVisibility in project hive by apache.

the class TestHiveAccumuloTableOutputFormat method testWriteToMockInstanceWithVisibility.

@Test
public void testWriteToMockInstanceWithVisibility() throws Exception {
    Instance inst = new MockInstance(test.getMethodName());
    Connector conn = inst.getConnector("root", new PasswordToken(""));
    Authorizations auths = new Authorizations("foo");
    conn.securityOperations().changeUserAuthorizations("root", auths);
    HiveAccumuloTableOutputFormat outputFormat = new HiveAccumuloTableOutputFormat();
    String table = test.getMethodName();
    conn.tableOperations().create(table);
    JobConf conf = new JobConf();
    conf.set(AccumuloConnectionParameters.INSTANCE_NAME, inst.getInstanceName());
    conf.set(AccumuloConnectionParameters.USER_NAME, "root");
    conf.set(AccumuloConnectionParameters.USER_PASS, "");
    conf.setBoolean(AccumuloConnectionParameters.USE_MOCK_INSTANCE, true);
    conf.set(AccumuloConnectionParameters.TABLE_NAME, test.getMethodName());
    FileSystem local = FileSystem.getLocal(conf);
    outputFormat.checkOutputSpecs(local, conf);
    RecordWriter<Text, Mutation> recordWriter = outputFormat.getRecordWriter(local, conf, null, null);
    List<String> names = Arrays.asList("row", "col1", "col2");
    List<TypeInfo> types = Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo);
    Properties tableProperties = new Properties();
    tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowID,cf:cq1,cf:cq2");
    tableProperties.setProperty(serdeConstants.FIELD_DELIM, " ");
    tableProperties.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(names));
    tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(types));
    AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(), tableProperties, AccumuloSerDe.class.getSimpleName());
    LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
    AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams, accumuloSerDeParams.getColumnMappings(), new ColumnVisibility("foo"), accumuloSerDeParams.getRowIdFactory());
    LazySimpleStructObjectInspector structOI = (LazySimpleStructObjectInspector) LazyFactory.createLazyStructInspector(Arrays.asList("row", "cq1", "cq2"), Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo), serDeParams.getSeparators(), serDeParams.getNullSequence(), serDeParams.isLastColumnTakesRest(), serDeParams.isEscaped(), serDeParams.getEscapeChar());
    LazyStruct struct = (LazyStruct) LazyFactory.createLazyObject(structOI);
    ByteArrayRef bytes = new ByteArrayRef();
    bytes.setData("row value1 value2".getBytes());
    struct.init(bytes, 0, bytes.getData().length);
    // Serialize the struct into a mutation
    Mutation m = serializer.serialize(struct, structOI);
    // Write the mutation
    recordWriter.write(new Text(table), m);
    // Close the writer
    recordWriter.close(null);
    Iterator<Entry<Key, Value>> iter = conn.createScanner(table, auths).iterator();
    Assert.assertTrue("Iterator did not have an element as expected", iter.hasNext());
    Entry<Key, Value> entry = iter.next();
    Key k = entry.getKey();
    Value v = entry.getValue();
    Assert.assertEquals("row", k.getRow().toString());
    Assert.assertEquals("cf", k.getColumnFamily().toString());
    Assert.assertEquals("cq1", k.getColumnQualifier().toString());
    Assert.assertEquals("foo", k.getColumnVisibility().toString());
    Assert.assertEquals("value1", new String(v.get()));
    Assert.assertTrue("Iterator did not have an element as expected", iter.hasNext());
    entry = iter.next();
    k = entry.getKey();
    v = entry.getValue();
    Assert.assertEquals("row", k.getRow().toString());
    Assert.assertEquals("cf", k.getColumnFamily().toString());
    Assert.assertEquals("cq2", k.getColumnQualifier().toString());
    Assert.assertEquals("foo", k.getColumnVisibility().toString());
    Assert.assertEquals("value2", new String(v.get()));
    Assert.assertFalse("Iterator unexpectedly had more data", iter.hasNext());
}
Also used : Connector(org.apache.accumulo.core.client.Connector) Configuration(org.apache.hadoop.conf.Configuration) MockInstance(org.apache.accumulo.core.client.mock.MockInstance) Instance(org.apache.accumulo.core.client.Instance) LazySerDeParameters(org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters) Properties(java.util.Properties) PasswordToken(org.apache.accumulo.core.client.security.tokens.PasswordToken) Entry(java.util.Map.Entry) MockInstance(org.apache.accumulo.core.client.mock.MockInstance) FileSystem(org.apache.hadoop.fs.FileSystem) ColumnVisibility(org.apache.accumulo.core.security.ColumnVisibility) JobConf(org.apache.hadoop.mapred.JobConf) AccumuloRowSerializer(org.apache.hadoop.hive.accumulo.serde.AccumuloRowSerializer) LazyStruct(org.apache.hadoop.hive.serde2.lazy.LazyStruct) Authorizations(org.apache.accumulo.core.security.Authorizations) LazySimpleStructObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector) Text(org.apache.hadoop.io.Text) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) AccumuloSerDe(org.apache.hadoop.hive.accumulo.serde.AccumuloSerDe) AccumuloSerDeParameters(org.apache.hadoop.hive.accumulo.serde.AccumuloSerDeParameters) ByteArrayRef(org.apache.hadoop.hive.serde2.lazy.ByteArrayRef) Value(org.apache.accumulo.core.data.Value) Mutation(org.apache.accumulo.core.data.Mutation) Key(org.apache.accumulo.core.data.Key) Test(org.junit.Test)

Example 64 with ColumnVisibility

use of org.apache.accumulo.core.security.ColumnVisibility in project Gaffer by gchq.

the class AccumuloStore method insertGraphElements.

protected void insertGraphElements(final Iterable<? extends Element> elements) throws StoreException {
    // Create BatchWriter
    final BatchWriter writer = TableUtils.createBatchWriter(this);
    // too high a latency, etc.
    if (null != elements) {
        for (final Element element : elements) {
            final Pair<Key, Key> keys;
            try {
                keys = keyPackage.getKeyConverter().getKeysFromElement(element);
            } catch (final AccumuloElementConversionException e) {
                LOGGER.error(FAILED_TO_CREATE_AN_ACCUMULO_FROM_ELEMENT_OF_TYPE_WHEN_TRYING_TO_INSERT_ELEMENTS, "key", element.getGroup());
                continue;
            }
            final Value value;
            try {
                value = keyPackage.getKeyConverter().getValueFromElement(element);
            } catch (final AccumuloElementConversionException e) {
                LOGGER.error(FAILED_TO_CREATE_AN_ACCUMULO_FROM_ELEMENT_OF_TYPE_WHEN_TRYING_TO_INSERT_ELEMENTS, "value", element.getGroup());
                continue;
            }
            final Mutation m = new Mutation(keys.getFirst().getRow());
            m.put(keys.getFirst().getColumnFamily(), keys.getFirst().getColumnQualifier(), new ColumnVisibility(keys.getFirst().getColumnVisibility()), keys.getFirst().getTimestamp(), value);
            try {
                writer.addMutation(m);
            } catch (final MutationsRejectedException e) {
                LOGGER.error("Failed to create an accumulo key mutation");
                continue;
            }
            // If the GraphElement is an Edge then there will be 2 keys.
            if (null != keys.getSecond()) {
                final Mutation m2 = new Mutation(keys.getSecond().getRow());
                m2.put(keys.getSecond().getColumnFamily(), keys.getSecond().getColumnQualifier(), new ColumnVisibility(keys.getSecond().getColumnVisibility()), keys.getSecond().getTimestamp(), value);
                try {
                    writer.addMutation(m2);
                } catch (final MutationsRejectedException e) {
                    LOGGER.error("Failed to create an accumulo key mutation");
                }
            }
        }
    } else {
        throw new GafferRuntimeException("Could not find any elements to add to graph.", Status.BAD_REQUEST);
    }
    try {
        writer.close();
    } catch (final MutationsRejectedException e) {
        LOGGER.warn("Accumulo batch writer failed to close", e);
    }
}
Also used : Element(uk.gov.gchq.gaffer.data.element.Element) Value(org.apache.accumulo.core.data.Value) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) ColumnVisibility(org.apache.accumulo.core.security.ColumnVisibility) Key(org.apache.accumulo.core.data.Key) AccumuloElementConversionException(uk.gov.gchq.gaffer.accumulostore.key.exception.AccumuloElementConversionException) MutationsRejectedException(org.apache.accumulo.core.client.MutationsRejectedException) GafferRuntimeException(uk.gov.gchq.gaffer.core.exception.GafferRuntimeException)

Example 65 with ColumnVisibility

use of org.apache.accumulo.core.security.ColumnVisibility in project Gaffer by gchq.

the class CoreKeyGroupByAggregatorIteratorTest method shouldAggregateEverythingWhenGroupByIsSetToBlank.

public void shouldAggregateEverythingWhenGroupByIsSetToBlank(final AccumuloStore store, final AccumuloElementConverter elementConverter) throws StoreException {
    final String visibilityString = "public";
    try {
        // Create edge
        final Edge edge = new Edge.Builder().group(TestGroups.EDGE).source("1").dest("2").directed(true).property(AccumuloPropertyNames.COLUMN_QUALIFIER, 1).property(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 1).property(AccumuloPropertyNames.PROP_1, 0).property(AccumuloPropertyNames.PROP_2, 0).property(AccumuloPropertyNames.PROP_3, 0).property(AccumuloPropertyNames.PROP_4, 0).property(AccumuloPropertyNames.COUNT, 1).build();
        final Edge edge2 = new Edge.Builder().group(TestGroups.EDGE).source("1").dest("2").directed(true).property(AccumuloPropertyNames.COLUMN_QUALIFIER, 1).property(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 1).property(AccumuloPropertyNames.PROP_1, 0).property(AccumuloPropertyNames.PROP_2, 0).property(AccumuloPropertyNames.PROP_3, 0).property(AccumuloPropertyNames.PROP_4, 0).property(AccumuloPropertyNames.COUNT, 1).build();
        final Edge edge3 = new Edge.Builder().group(TestGroups.EDGE).source("1").dest("2").directed(true).property(AccumuloPropertyNames.COLUMN_QUALIFIER, 1).property(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 1).property(AccumuloPropertyNames.PROP_1, 0).property(AccumuloPropertyNames.PROP_2, 0).property(AccumuloPropertyNames.PROP_3, 0).property(AccumuloPropertyNames.PROP_4, 0).property(AccumuloPropertyNames.COUNT, 1).build();
        // THIS EDGE WILL BE REDUCED MEANING ITS CQ (columnQualifier) will only occur once because its key is equal.
        final Edge edge4 = new Edge.Builder().group(TestGroups.EDGE).source("1").dest("2").directed(true).property(AccumuloPropertyNames.COLUMN_QUALIFIER, 1).property(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 4).property(AccumuloPropertyNames.PROP_1, 0).property(AccumuloPropertyNames.PROP_2, 0).property(AccumuloPropertyNames.PROP_3, 0).property(AccumuloPropertyNames.PROP_4, 0).property(AccumuloPropertyNames.COUNT, 2).build();
        final Edge edge5 = new Edge.Builder().group(TestGroups.EDGE).source("1").dest("2").directed(true).property(AccumuloPropertyNames.COLUMN_QUALIFIER, 3).property(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 5).property(AccumuloPropertyNames.PROP_1, 0).property(AccumuloPropertyNames.PROP_2, 0).property(AccumuloPropertyNames.PROP_3, 0).property(AccumuloPropertyNames.PROP_4, 0).property(AccumuloPropertyNames.COUNT, 10).build();
        final Edge edge6 = new Edge.Builder().group(TestGroups.EDGE).source("1").dest("2").directed(true).property(AccumuloPropertyNames.COLUMN_QUALIFIER, 3).property(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 5).property(AccumuloPropertyNames.PROP_1, 0).property(AccumuloPropertyNames.PROP_2, 0).property(AccumuloPropertyNames.PROP_3, 0).property(AccumuloPropertyNames.PROP_4, 0).property(AccumuloPropertyNames.COUNT, 5).build();
        // Accumulo key
        final Key key = elementConverter.getKeysFromEdge(edge).getFirst();
        final Key key2 = elementConverter.getKeysFromEdge(edge2).getFirst();
        final Key key3 = elementConverter.getKeysFromEdge(edge3).getFirst();
        final Key key4 = elementConverter.getKeysFromEdge(edge4).getFirst();
        final Key key5 = elementConverter.getKeysFromEdge(edge5).getFirst();
        final Key key6 = elementConverter.getKeysFromEdge(edge6).getFirst();
        // Accumulo values
        final Value value1 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge.getProperties());
        final Value value2 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge2.getProperties());
        final Value value3 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge3.getProperties());
        final Value value4 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge4.getProperties());
        final Value value5 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge5.getProperties());
        final Value value6 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge6.getProperties());
        // Create mutation
        final Mutation m1 = new Mutation(key.getRow());
        m1.put(key.getColumnFamily(), key.getColumnQualifier(), new ColumnVisibility(key.getColumnVisibility()), key.getTimestamp(), value1);
        final Mutation m2 = new Mutation(key2.getRow());
        m2.put(key2.getColumnFamily(), key2.getColumnQualifier(), new ColumnVisibility(key2.getColumnVisibility()), key2.getTimestamp(), value2);
        final Mutation m3 = new Mutation(key.getRow());
        m3.put(key3.getColumnFamily(), key3.getColumnQualifier(), new ColumnVisibility(key3.getColumnVisibility()), key3.getTimestamp(), value3);
        final Mutation m4 = new Mutation(key.getRow());
        m4.put(key4.getColumnFamily(), key4.getColumnQualifier(), new ColumnVisibility(key4.getColumnVisibility()), key4.getTimestamp(), value4);
        final Mutation m5 = new Mutation(key.getRow());
        m5.put(key5.getColumnFamily(), key5.getColumnQualifier(), new ColumnVisibility(key5.getColumnVisibility()), key5.getTimestamp(), value5);
        final Mutation m6 = new Mutation(key.getRow());
        m6.put(key6.getColumnFamily(), key6.getColumnQualifier(), new ColumnVisibility(key6.getColumnVisibility()), key6.getTimestamp(), value6);
        // Write mutation
        final BatchWriterConfig writerConfig = new BatchWriterConfig();
        writerConfig.setMaxMemory(1000000L);
        writerConfig.setMaxLatency(1000L, TimeUnit.MILLISECONDS);
        writerConfig.setMaxWriteThreads(1);
        final BatchWriter writer = store.getConnection().createBatchWriter(store.getTableName(), writerConfig);
        writer.addMutation(m1);
        writer.addMutation(m2);
        writer.addMutation(m3);
        writer.addMutation(m4);
        writer.addMutation(m5);
        writer.addMutation(m6);
        writer.close();
        Edge expectedEdge1 = new Edge.Builder().group(TestGroups.EDGE).source("1").dest("2").directed(true).property(AccumuloPropertyNames.COLUMN_QUALIFIER, 5).property(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 10).property(AccumuloPropertyNames.COUNT, 20).property(AccumuloPropertyNames.PROP_1, 0).property(AccumuloPropertyNames.PROP_2, 0).property(AccumuloPropertyNames.PROP_3, 0).property(AccumuloPropertyNames.PROP_4, 0).build();
        // Read data back and check we get one merged element
        final Authorizations authorizations = new Authorizations(visibilityString);
        final Scanner scanner = store.getConnection().createScanner(store.getTableName(), authorizations);
        final IteratorSetting iteratorSetting = new IteratorSettingBuilder(AccumuloStoreConstants.COLUMN_QUALIFIER_AGGREGATOR_ITERATOR_PRIORITY, "KeyCombiner", CoreKeyGroupByAggregatorIterator.class).all().view(new View.Builder().edge(TestGroups.EDGE, new ViewElementDefinition.Builder().groupBy().build()).build()).schema(store.getSchema()).keyConverter(store.getKeyPackage().getKeyConverter()).build();
        scanner.addScanIterator(iteratorSetting);
        final Iterator<Entry<Key, Value>> it = scanner.iterator();
        Entry<Key, Value> entry = it.next();
        Element readEdge = elementConverter.getFullElement(entry.getKey(), entry.getValue(), false);
        assertEquals(expectedEdge1, readEdge);
        assertEquals(5, readEdge.getProperty(AccumuloPropertyNames.COLUMN_QUALIFIER));
        assertEquals(10, readEdge.getProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2));
        assertEquals(20, readEdge.getProperty(AccumuloPropertyNames.COUNT));
        if (it.hasNext()) {
            fail("Additional row found.");
        }
    } catch (final AccumuloException | TableNotFoundException e) {
        fail(this.getClass().getSimpleName() + " failed with exception: " + e);
    }
}
Also used : Scanner(org.apache.accumulo.core.client.Scanner) AccumuloException(org.apache.accumulo.core.client.AccumuloException) Authorizations(org.apache.accumulo.core.security.Authorizations) IteratorSettingBuilder(uk.gov.gchq.gaffer.accumulostore.utils.IteratorSettingBuilder) Element(uk.gov.gchq.gaffer.data.element.Element) IteratorSettingBuilder(uk.gov.gchq.gaffer.accumulostore.utils.IteratorSettingBuilder) TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) Entry(java.util.Map.Entry) IteratorSetting(org.apache.accumulo.core.client.IteratorSetting) Value(org.apache.accumulo.core.data.Value) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) Mutation(org.apache.accumulo.core.data.Mutation) ColumnVisibility(org.apache.accumulo.core.security.ColumnVisibility) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Edge(uk.gov.gchq.gaffer.data.element.Edge) Key(org.apache.accumulo.core.data.Key)

Aggregations

ColumnVisibility (org.apache.accumulo.core.security.ColumnVisibility)131 Mutation (org.apache.accumulo.core.data.Mutation)57 Text (org.apache.hadoop.io.Text)57 Value (org.apache.accumulo.core.data.Value)52 Key (org.apache.accumulo.core.data.Key)39 Test (org.junit.Test)37 BatchWriter (org.apache.accumulo.core.client.BatchWriter)28 BatchWriterConfig (org.apache.accumulo.core.client.BatchWriterConfig)20 Authorizations (org.apache.accumulo.core.security.Authorizations)17 ArrayList (java.util.ArrayList)16 Entry (java.util.Map.Entry)16 TableNotFoundException (org.apache.accumulo.core.client.TableNotFoundException)16 Scanner (org.apache.accumulo.core.client.Scanner)14 AccumuloException (org.apache.accumulo.core.client.AccumuloException)12 MutationsRejectedException (org.apache.accumulo.core.client.MutationsRejectedException)12 Configuration (org.apache.hadoop.conf.Configuration)12 Connector (org.apache.accumulo.core.client.Connector)10 IteratorSetting (org.apache.accumulo.core.client.IteratorSetting)10 TMutation (org.apache.accumulo.core.data.thrift.TMutation)10 Element (uk.gov.gchq.gaffer.data.element.Element)9