Search in sources :

Example 16 with Authorizations

use of org.apache.accumulo.core.security.Authorizations in project Gaffer by gchq.

the class CoreKeyGroupByAggregatorIteratorTest method shouldAggregateEverythingWhenGroupByIsSetToBlank.

public void shouldAggregateEverythingWhenGroupByIsSetToBlank(final AccumuloStore store, final AccumuloElementConverter elementConverter) throws StoreException, AccumuloElementConversionException {
    final String visibilityString = "public";
    try {
        // Create edge
        final Edge edge = new Edge(TestGroups.EDGE);
        edge.setSource("1");
        edge.setDestination("2");
        edge.setDirected(true);
        edge.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 1);
        edge.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 1);
        edge.putProperty(AccumuloPropertyNames.PROP_1, 0);
        edge.putProperty(AccumuloPropertyNames.PROP_2, 0);
        edge.putProperty(AccumuloPropertyNames.PROP_3, 0);
        edge.putProperty(AccumuloPropertyNames.PROP_4, 0);
        edge.putProperty(AccumuloPropertyNames.COUNT, 1);
        final Edge edge2 = new Edge(TestGroups.EDGE);
        edge2.setSource("1");
        edge2.setDestination("2");
        edge2.setDirected(true);
        edge2.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 1);
        edge2.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 1);
        edge2.putProperty(AccumuloPropertyNames.PROP_1, 0);
        edge2.putProperty(AccumuloPropertyNames.PROP_2, 0);
        edge2.putProperty(AccumuloPropertyNames.PROP_3, 0);
        edge2.putProperty(AccumuloPropertyNames.PROP_4, 0);
        edge2.putProperty(AccumuloPropertyNames.COUNT, 1);
        final Edge edge3 = new Edge(TestGroups.EDGE);
        edge3.setSource("1");
        edge3.setDestination("2");
        edge3.setDirected(true);
        edge3.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 1);
        edge3.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 1);
        edge3.putProperty(AccumuloPropertyNames.PROP_1, 0);
        edge3.putProperty(AccumuloPropertyNames.PROP_2, 0);
        edge3.putProperty(AccumuloPropertyNames.PROP_3, 0);
        edge3.putProperty(AccumuloPropertyNames.PROP_4, 0);
        edge3.putProperty(AccumuloPropertyNames.COUNT, 1);
        //THIS EDGE WILL BE REDUCED MEANING ITS CQ (columnQualifier) will only occur once because its key is equal.
        final Edge edge4 = new Edge(TestGroups.EDGE);
        edge4.setSource("1");
        edge4.setDestination("2");
        edge4.setDirected(true);
        edge4.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 1);
        edge4.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 4);
        edge4.putProperty(AccumuloPropertyNames.PROP_1, 0);
        edge4.putProperty(AccumuloPropertyNames.PROP_2, 0);
        edge4.putProperty(AccumuloPropertyNames.PROP_3, 0);
        edge4.putProperty(AccumuloPropertyNames.PROP_4, 0);
        edge4.putProperty(AccumuloPropertyNames.COUNT, 2);
        final Edge edge5 = new Edge(TestGroups.EDGE);
        edge5.setSource("1");
        edge5.setDestination("2");
        edge5.setDirected(true);
        edge5.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 3);
        edge5.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 5);
        edge5.putProperty(AccumuloPropertyNames.PROP_1, 0);
        edge5.putProperty(AccumuloPropertyNames.PROP_2, 0);
        edge5.putProperty(AccumuloPropertyNames.PROP_3, 0);
        edge5.putProperty(AccumuloPropertyNames.PROP_4, 0);
        edge5.putProperty(AccumuloPropertyNames.COUNT, 10);
        final Edge edge6 = new Edge(TestGroups.EDGE);
        edge6.setSource("1");
        edge6.setDestination("2");
        edge6.setDirected(true);
        edge6.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 3);
        edge6.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 5);
        edge6.putProperty(AccumuloPropertyNames.PROP_1, 0);
        edge6.putProperty(AccumuloPropertyNames.PROP_2, 0);
        edge6.putProperty(AccumuloPropertyNames.PROP_3, 0);
        edge6.putProperty(AccumuloPropertyNames.PROP_4, 0);
        edge6.putProperty(AccumuloPropertyNames.COUNT, 5);
        // Accumulo key
        final Key key = elementConverter.getKeysFromEdge(edge).getFirst();
        final Key key2 = elementConverter.getKeysFromEdge(edge2).getFirst();
        final Key key3 = elementConverter.getKeysFromEdge(edge3).getFirst();
        final Key key4 = elementConverter.getKeysFromEdge(edge4).getFirst();
        final Key key5 = elementConverter.getKeysFromEdge(edge5).getFirst();
        final Key key6 = elementConverter.getKeysFromEdge(edge6).getFirst();
        // Accumulo values
        final Value value1 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge.getProperties());
        final Value value2 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge2.getProperties());
        final Value value3 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge3.getProperties());
        final Value value4 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge4.getProperties());
        final Value value5 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge5.getProperties());
        final Value value6 = elementConverter.getValueFromProperties(TestGroups.EDGE, edge6.getProperties());
        // Create mutation
        final Mutation m1 = new Mutation(key.getRow());
        m1.put(key.getColumnFamily(), key.getColumnQualifier(), new ColumnVisibility(key.getColumnVisibility()), key.getTimestamp(), value1);
        final Mutation m2 = new Mutation(key2.getRow());
        m2.put(key2.getColumnFamily(), key2.getColumnQualifier(), new ColumnVisibility(key2.getColumnVisibility()), key2.getTimestamp(), value2);
        final Mutation m3 = new Mutation(key.getRow());
        m3.put(key3.getColumnFamily(), key3.getColumnQualifier(), new ColumnVisibility(key3.getColumnVisibility()), key3.getTimestamp(), value3);
        final Mutation m4 = new Mutation(key.getRow());
        m4.put(key4.getColumnFamily(), key4.getColumnQualifier(), new ColumnVisibility(key4.getColumnVisibility()), key4.getTimestamp(), value4);
        final Mutation m5 = new Mutation(key.getRow());
        m5.put(key5.getColumnFamily(), key5.getColumnQualifier(), new ColumnVisibility(key5.getColumnVisibility()), key5.getTimestamp(), value5);
        final Mutation m6 = new Mutation(key.getRow());
        m6.put(key6.getColumnFamily(), key6.getColumnQualifier(), new ColumnVisibility(key6.getColumnVisibility()), key6.getTimestamp(), value6);
        // Write mutation
        final BatchWriterConfig writerConfig = new BatchWriterConfig();
        writerConfig.setMaxMemory(1000000L);
        writerConfig.setMaxLatency(1000L, TimeUnit.MILLISECONDS);
        writerConfig.setMaxWriteThreads(1);
        final BatchWriter writer = store.getConnection().createBatchWriter(store.getProperties().getTable(), writerConfig);
        writer.addMutation(m1);
        writer.addMutation(m2);
        writer.addMutation(m3);
        writer.addMutation(m4);
        writer.addMutation(m5);
        writer.addMutation(m6);
        writer.close();
        Edge expectedEdge1 = new Edge(TestGroups.EDGE);
        expectedEdge1.setSource("1");
        expectedEdge1.setDestination("2");
        expectedEdge1.setDirected(true);
        expectedEdge1.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER, 5);
        expectedEdge1.putProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2, 10);
        expectedEdge1.putProperty(AccumuloPropertyNames.COUNT, 20);
        expectedEdge1.putProperty(AccumuloPropertyNames.PROP_1, 0);
        expectedEdge1.putProperty(AccumuloPropertyNames.PROP_2, 0);
        expectedEdge1.putProperty(AccumuloPropertyNames.PROP_3, 0);
        expectedEdge1.putProperty(AccumuloPropertyNames.PROP_4, 0);
        // Read data back and check we get one merged element
        final Authorizations authorizations = new Authorizations(visibilityString);
        final Scanner scanner = store.getConnection().createScanner(store.getProperties().getTable(), authorizations);
        final IteratorSetting iteratorSetting = new IteratorSettingBuilder(AccumuloStoreConstants.COLUMN_QUALIFIER_AGGREGATOR_ITERATOR_PRIORITY, "KeyCombiner", CoreKeyGroupByAggregatorIterator.class).all().view(new View.Builder().edge(TestGroups.EDGE, new ViewElementDefinition.Builder().groupBy().build()).build()).schema(store.getSchema()).keyConverter(store.getKeyPackage().getKeyConverter()).build();
        scanner.addScanIterator(iteratorSetting);
        final Iterator<Entry<Key, Value>> it = scanner.iterator();
        Entry<Key, Value> entry = it.next();
        Element readEdge = elementConverter.getFullElement(entry.getKey(), entry.getValue());
        assertEquals(expectedEdge1, readEdge);
        assertEquals(5, readEdge.getProperty(AccumuloPropertyNames.COLUMN_QUALIFIER));
        assertEquals(10, readEdge.getProperty(AccumuloPropertyNames.COLUMN_QUALIFIER_2));
        assertEquals(20, readEdge.getProperty(AccumuloPropertyNames.COUNT));
        if (it.hasNext()) {
            fail("Additional row found.");
        }
    } catch (AccumuloException | TableNotFoundException e) {
        fail(this.getClass().getSimpleName() + " failed with exception: " + e);
    }
}
Also used : Scanner(org.apache.accumulo.core.client.Scanner) AccumuloException(org.apache.accumulo.core.client.AccumuloException) Authorizations(org.apache.accumulo.core.security.Authorizations) IteratorSettingBuilder(uk.gov.gchq.gaffer.accumulostore.utils.IteratorSettingBuilder) Element(uk.gov.gchq.gaffer.data.element.Element) IteratorSettingBuilder(uk.gov.gchq.gaffer.accumulostore.utils.IteratorSettingBuilder) TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) Entry(java.util.Map.Entry) IteratorSetting(org.apache.accumulo.core.client.IteratorSetting) Value(org.apache.accumulo.core.data.Value) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) Mutation(org.apache.accumulo.core.data.Mutation) ColumnVisibility(org.apache.accumulo.core.security.ColumnVisibility) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Edge(uk.gov.gchq.gaffer.data.element.Edge) Key(org.apache.accumulo.core.data.Key)

Example 17 with Authorizations

use of org.apache.accumulo.core.security.Authorizations in project hive by apache.

the class TestHiveAccumuloTableInputFormat method testConfigureAccumuloInputFormatWithAuthorizations.

@Test
public void testConfigureAccumuloInputFormatWithAuthorizations() throws Exception {
    AccumuloConnectionParameters accumuloParams = new AccumuloConnectionParameters(conf);
    conf.set(AccumuloSerDeParameters.AUTHORIZATIONS_KEY, "foo,bar");
    ColumnMapper columnMapper = new ColumnMapper(conf.get(AccumuloSerDeParameters.COLUMN_MAPPINGS), conf.get(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE), columnNames, columnTypes);
    Set<Pair<Text, Text>> cfCqPairs = inputformat.getPairCollection(columnMapper.getColumnMappings());
    List<IteratorSetting> iterators = Collections.emptyList();
    Set<Range> ranges = Collections.singleton(new Range());
    String instanceName = "realInstance";
    String zookeepers = "host1:2181,host2:2181,host3:2181";
    ZooKeeperInstance zkInstance = Mockito.mock(ZooKeeperInstance.class);
    HiveAccumuloTableInputFormat mockInputFormat = Mockito.mock(HiveAccumuloTableInputFormat.class);
    // Stub out the ZKI mock
    Mockito.when(zkInstance.getInstanceName()).thenReturn(instanceName);
    Mockito.when(zkInstance.getZooKeepers()).thenReturn(zookeepers);
    // Call out to the real configure method
    Mockito.doCallRealMethod().when(mockInputFormat).configure(conf, zkInstance, con, accumuloParams, columnMapper, iterators, ranges);
    // Also compute the correct cf:cq pairs so we can assert the right argument was passed
    Mockito.doCallRealMethod().when(mockInputFormat).getPairCollection(columnMapper.getColumnMappings());
    mockInputFormat.configure(conf, zkInstance, con, accumuloParams, columnMapper, iterators, ranges);
    // Verify that the correct methods are invoked on AccumuloInputFormat
    Mockito.verify(mockInputFormat).setZooKeeperInstance(conf, instanceName, zookeepers, false);
    Mockito.verify(mockInputFormat).setConnectorInfo(conf, USER, new PasswordToken(PASS));
    Mockito.verify(mockInputFormat).setInputTableName(conf, TEST_TABLE);
    Mockito.verify(mockInputFormat).setScanAuthorizations(conf, new Authorizations("foo,bar"));
    Mockito.verify(mockInputFormat).addIterators(conf, iterators);
    Mockito.verify(mockInputFormat).setRanges(conf, ranges);
    Mockito.verify(mockInputFormat).fetchColumns(conf, cfCqPairs);
}
Also used : Authorizations(org.apache.accumulo.core.security.Authorizations) Range(org.apache.accumulo.core.data.Range) ZooKeeperInstance(org.apache.accumulo.core.client.ZooKeeperInstance) PasswordToken(org.apache.accumulo.core.client.security.tokens.PasswordToken) IteratorSetting(org.apache.accumulo.core.client.IteratorSetting) AccumuloConnectionParameters(org.apache.hadoop.hive.accumulo.AccumuloConnectionParameters) ColumnMapper(org.apache.hadoop.hive.accumulo.columns.ColumnMapper) Pair(org.apache.accumulo.core.util.Pair) Test(org.junit.Test)

Example 18 with Authorizations

use of org.apache.accumulo.core.security.Authorizations in project hive by apache.

the class TestAccumuloSerDeParameters method testNullAuthsFromProperties.

@Test
public void testNullAuthsFromProperties() throws SerDeException {
    Configuration conf = new Configuration();
    Properties properties = new Properties();
    properties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowid,cf:f2,cf:f3");
    properties.setProperty(serdeConstants.LIST_COLUMNS, "field1,field2,field3");
    properties.setProperty(serdeConstants.LIST_COLUMN_TYPES, "string,string,string");
    AccumuloSerDeParameters params = new AccumuloSerDeParameters(conf, properties, AccumuloSerDe.class.getName());
    Authorizations auths = params.getAuthorizations();
    Assert.assertNull(auths);
}
Also used : Authorizations(org.apache.accumulo.core.security.Authorizations) Configuration(org.apache.hadoop.conf.Configuration) Properties(java.util.Properties) Test(org.junit.Test)

Example 19 with Authorizations

use of org.apache.accumulo.core.security.Authorizations in project hive by apache.

the class TestAccumuloSerDeParameters method testParseAuthorizationsFromConf.

@Test
public void testParseAuthorizationsFromConf() throws SerDeException {
    Configuration conf = new Configuration(false);
    conf.set(AccumuloSerDeParameters.AUTHORIZATIONS_KEY, "foo,bar");
    Authorizations auths = AccumuloSerDeParameters.getAuthorizationsFromConf(conf);
    Assert.assertEquals(new Authorizations("foo,bar"), auths);
}
Also used : Authorizations(org.apache.accumulo.core.security.Authorizations) Configuration(org.apache.hadoop.conf.Configuration) Test(org.junit.Test)

Example 20 with Authorizations

use of org.apache.accumulo.core.security.Authorizations in project hive by apache.

the class TestAccumuloSerDeParameters method testNullAuthsFromConf.

@Test
public void testNullAuthsFromConf() throws SerDeException {
    Configuration conf = new Configuration(false);
    Authorizations auths = AccumuloSerDeParameters.getAuthorizationsFromConf(conf);
    Assert.assertNull(auths);
}
Also used : Authorizations(org.apache.accumulo.core.security.Authorizations) Configuration(org.apache.hadoop.conf.Configuration) Test(org.junit.Test)

Aggregations

Authorizations (org.apache.accumulo.core.security.Authorizations)30 Value (org.apache.accumulo.core.data.Value)18 Key (org.apache.accumulo.core.data.Key)16 Mutation (org.apache.accumulo.core.data.Mutation)15 Test (org.junit.Test)15 PasswordToken (org.apache.accumulo.core.client.security.tokens.PasswordToken)14 Entry (java.util.Map.Entry)13 BatchWriterConfig (org.apache.accumulo.core.client.BatchWriterConfig)13 IteratorSetting (org.apache.accumulo.core.client.IteratorSetting)13 Connector (org.apache.accumulo.core.client.Connector)12 BatchWriter (org.apache.accumulo.core.client.BatchWriter)11 Scanner (org.apache.accumulo.core.client.Scanner)11 MockInstance (org.apache.accumulo.core.client.mock.MockInstance)10 ColumnVisibility (org.apache.accumulo.core.security.ColumnVisibility)9 AccumuloException (org.apache.accumulo.core.client.AccumuloException)8 Configuration (org.apache.hadoop.conf.Configuration)8 Text (org.apache.hadoop.io.Text)8 TableNotFoundException (org.apache.accumulo.core.client.TableNotFoundException)7 JobConf (org.apache.hadoop.mapred.JobConf)7 Edge (uk.gov.gchq.gaffer.data.element.Edge)7