Search in sources :

Example 1 with PutColumn

use of org.apache.nifi.hbase.put.PutColumn in project nifi by apache.

the class HBase_1_1_2_ClientMapCacheService method putIfAbsent.

@Override
public <K, V> boolean putIfAbsent(final K key, final V value, final Serializer<K> keySerializer, final Serializer<V> valueSerializer) throws IOException {
    final byte[] rowIdBytes = serialize(key, keySerializer);
    final byte[] valueBytes = serialize(value, valueSerializer);
    final PutColumn putColumn = new PutColumn(hBaseColumnFamilyBytes, hBaseColumnQualifierBytes, valueBytes);
    return hBaseClientService.checkAndPut(hBaseCacheTableName, rowIdBytes, hBaseColumnFamilyBytes, hBaseColumnQualifierBytes, null, putColumn);
}
Also used : PutColumn(org.apache.nifi.hbase.put.PutColumn)

Example 2 with PutColumn

use of org.apache.nifi.hbase.put.PutColumn in project nifi by apache.

the class HBase_1_1_2_ClientService method put.

@Override
public void put(final String tableName, final Collection<PutFlowFile> puts) throws IOException {
    try (final Table table = connection.getTable(TableName.valueOf(tableName))) {
        // Create one Put per row....
        final Map<String, Put> rowPuts = new HashMap<>();
        for (final PutFlowFile putFlowFile : puts) {
            // this is used for the map key as a byte[] does not work as a key.
            final String rowKeyString = new String(putFlowFile.getRow(), StandardCharsets.UTF_8);
            Put put = rowPuts.get(rowKeyString);
            if (put == null) {
                put = new Put(putFlowFile.getRow());
                rowPuts.put(rowKeyString, put);
            }
            for (final PutColumn column : putFlowFile.getColumns()) {
                if (column.getTimestamp() != null) {
                    put.addColumn(column.getColumnFamily(), column.getColumnQualifier(), column.getTimestamp(), column.getBuffer());
                } else {
                    put.addColumn(column.getColumnFamily(), column.getColumnQualifier(), column.getBuffer());
                }
            }
        }
        table.put(new ArrayList<>(rowPuts.values()));
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) HashMap(java.util.HashMap) PutColumn(org.apache.nifi.hbase.put.PutColumn) Put(org.apache.hadoop.hbase.client.Put) PutFlowFile(org.apache.nifi.hbase.put.PutFlowFile)

Example 3 with PutColumn

use of org.apache.nifi.hbase.put.PutColumn in project nifi by apache.

the class MockHBaseClientService method put.

@Override
public void put(final String tableName, final byte[] rowId, final Collection<PutColumn> columns) throws IOException {
    Put put = new Put(rowId);
    Map<String, String> map = new HashMap<String, String>();
    for (final PutColumn column : columns) {
        put.addColumn(column.getColumnFamily(), column.getColumnQualifier(), column.getBuffer());
        map.put(new String(column.getColumnQualifier()), new String(column.getBuffer()));
    }
    table.put(put);
    addResult(new String(rowId), map, 1);
}
Also used : HashMap(java.util.HashMap) PutColumn(org.apache.nifi.hbase.put.PutColumn) Put(org.apache.hadoop.hbase.client.Put)

Example 4 with PutColumn

use of org.apache.nifi.hbase.put.PutColumn in project nifi by apache.

the class TestHBase_1_1_2_ClientService method testMultiplePutsSameRow.

@Test
public void testMultiplePutsSameRow() throws IOException, InitializationException {
    final String tableName = "nifi";
    final String row = "row1";
    final String columnFamily = "family1";
    final String columnQualifier = "qualifier1";
    final String content1 = "content1";
    final String content2 = "content2";
    final Collection<PutColumn> columns1 = Collections.singletonList(new PutColumn(columnFamily.getBytes(StandardCharsets.UTF_8), columnQualifier.getBytes(StandardCharsets.UTF_8), content1.getBytes(StandardCharsets.UTF_8)));
    final PutFlowFile putFlowFile1 = new PutFlowFile(tableName, row.getBytes(StandardCharsets.UTF_8), columns1, null);
    final Collection<PutColumn> columns2 = Collections.singletonList(new PutColumn(columnFamily.getBytes(StandardCharsets.UTF_8), columnQualifier.getBytes(StandardCharsets.UTF_8), content2.getBytes(StandardCharsets.UTF_8)));
    final PutFlowFile putFlowFile2 = new PutFlowFile(tableName, row.getBytes(StandardCharsets.UTF_8), columns2, null);
    final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
    // Mock an HBase Table so we can verify the put operations later
    final Table table = Mockito.mock(Table.class);
    when(table.getName()).thenReturn(TableName.valueOf(tableName));
    // create the controller service and link it to the test processor
    final HBaseClientService service = configureHBaseClientService(runner, table);
    runner.assertValid(service);
    // try to put a multiple cells for the same row
    final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE).asControllerService(HBaseClientService.class);
    hBaseClientService.put(tableName, Arrays.asList(putFlowFile1, putFlowFile2));
    // verify put was only called once
    ArgumentCaptor<List> capture = ArgumentCaptor.forClass(List.class);
    verify(table, times(1)).put(capture.capture());
    // verify there was only one put in the list of puts
    final List<Put> puts = capture.getValue();
    assertEquals(1, puts.size());
    // verify two cells were added to this one put operation
    final NavigableMap<byte[], List<Cell>> familyCells = puts.get(0).getFamilyCellMap();
    Map.Entry<byte[], List<Cell>> entry = familyCells.firstEntry();
    assertEquals(2, entry.getValue().size());
}
Also used : Table(org.apache.hadoop.hbase.client.Table) TestRunner(org.apache.nifi.util.TestRunner) PutColumn(org.apache.nifi.hbase.put.PutColumn) Put(org.apache.hadoop.hbase.client.Put) PutFlowFile(org.apache.nifi.hbase.put.PutFlowFile) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) NavigableMap(java.util.NavigableMap) Test(org.junit.Test)

Example 5 with PutColumn

use of org.apache.nifi.hbase.put.PutColumn in project nifi by apache.

the class TestHBase_1_1_2_ClientService method testSinglePut.

@Test
public void testSinglePut() throws InitializationException, IOException {
    final String tableName = "nifi";
    final String row = "row1";
    final String columnFamily = "family1";
    final String columnQualifier = "qualifier1";
    final String content = "content1";
    final Collection<PutColumn> columns = Collections.singletonList(new PutColumn(columnFamily.getBytes(StandardCharsets.UTF_8), columnQualifier.getBytes(StandardCharsets.UTF_8), content.getBytes(StandardCharsets.UTF_8)));
    final PutFlowFile putFlowFile = new PutFlowFile(tableName, row.getBytes(StandardCharsets.UTF_8), columns, null);
    final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
    // Mock an HBase Table so we can verify the put operations later
    final Table table = Mockito.mock(Table.class);
    when(table.getName()).thenReturn(TableName.valueOf(tableName));
    // create the controller service and link it to the test processor
    final HBaseClientService service = configureHBaseClientService(runner, table);
    runner.assertValid(service);
    // try to put a single cell
    final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE).asControllerService(HBaseClientService.class);
    hBaseClientService.put(tableName, Arrays.asList(putFlowFile));
    // verify only one call to put was made
    ArgumentCaptor<List> capture = ArgumentCaptor.forClass(List.class);
    verify(table, times(1)).put(capture.capture());
    // verify only one put was in the list of puts
    final List<Put> puts = capture.getValue();
    assertEquals(1, puts.size());
    verifyPut(row, columnFamily, columnQualifier, content, puts.get(0));
}
Also used : Table(org.apache.hadoop.hbase.client.Table) TestRunner(org.apache.nifi.util.TestRunner) PutColumn(org.apache.nifi.hbase.put.PutColumn) ArrayList(java.util.ArrayList) List(java.util.List) Put(org.apache.hadoop.hbase.client.Put) PutFlowFile(org.apache.nifi.hbase.put.PutFlowFile) Test(org.junit.Test)

Aggregations

PutColumn (org.apache.nifi.hbase.put.PutColumn)17 PutFlowFile (org.apache.nifi.hbase.put.PutFlowFile)9 ArrayList (java.util.ArrayList)7 Put (org.apache.hadoop.hbase.client.Put)6 Table (org.apache.hadoop.hbase.client.Table)5 TestRunner (org.apache.nifi.util.TestRunner)4 Test (org.junit.Test)4 HashMap (java.util.HashMap)3 List (java.util.List)3 IOException (java.io.IOException)2 InputStream (java.io.InputStream)2 Map (java.util.Map)2 InputStreamCallback (org.apache.nifi.processor.io.InputStreamCallback)2 JsonNode (com.fasterxml.jackson.databind.JsonNode)1 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 BufferedInputStream (java.io.BufferedInputStream)1 LinkedHashMap (java.util.LinkedHashMap)1 NavigableMap (java.util.NavigableMap)1 AtomicReference (java.util.concurrent.atomic.AtomicReference)1 Cell (org.apache.hadoop.hbase.Cell)1