Search in sources :

Example 61 with Cell

use of org.apache.hadoop.hbase.Cell in project hbase by apache.

the class TestGroupingTableMap method shouldCreateNewKeyAlthoughExtraKey.

@Test
@SuppressWarnings({ "deprecation", "unchecked" })
public void shouldCreateNewKeyAlthoughExtraKey() throws Exception {
    GroupingTableMap gTableMap = null;
    try {
        Result result = mock(Result.class);
        Reporter reporter = mock(Reporter.class);
        gTableMap = new GroupingTableMap();
        Configuration cfg = new Configuration();
        cfg.set(GroupingTableMap.GROUP_COLUMNS, "familyA:qualifierA familyB:qualifierB");
        JobConf jobConf = new JobConf(cfg);
        gTableMap.configure(jobConf);
        byte[] row = {};
        List<Cell> keyValues = ImmutableList.<Cell>of(new KeyValue(row, "familyA".getBytes(), "qualifierA".getBytes(), Bytes.toBytes("1111")), new KeyValue(row, "familyB".getBytes(), "qualifierB".getBytes(), Bytes.toBytes("2222")), new KeyValue(row, "familyC".getBytes(), "qualifierC".getBytes(), Bytes.toBytes("3333")));
        when(result.listCells()).thenReturn(keyValues);
        OutputCollector<ImmutableBytesWritable, Result> outputCollectorMock = mock(OutputCollector.class);
        gTableMap.map(null, result, outputCollectorMock, reporter);
        verify(result).listCells();
        verify(outputCollectorMock, times(1)).collect(any(ImmutableBytesWritable.class), any(Result.class));
        verifyNoMoreInteractions(outputCollectorMock);
    } finally {
        if (gTableMap != null)
            gTableMap.close();
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) Configuration(org.apache.hadoop.conf.Configuration) Reporter(org.apache.hadoop.mapred.Reporter) JobConf(org.apache.hadoop.mapred.JobConf) Cell(org.apache.hadoop.hbase.Cell) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Example 62 with Cell

use of org.apache.hadoop.hbase.Cell in project hbase by apache.

the class TestWALRecordReader method testSplit.

/**
   * Create a new reader from the split, and match the edits against the passed columns.
   */
private void testSplit(InputSplit split, byte[]... columns) throws Exception {
    final WALRecordReader reader = getReader();
    reader.initialize(split, MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
    for (byte[] column : columns) {
        assertTrue(reader.nextKeyValue());
        Cell cell = reader.getCurrentValue().getCells().get(0);
        if (!Bytes.equals(column, 0, column.length, cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength())) {
            assertTrue("expected [" + Bytes.toString(column) + "], actual [" + Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) + "]", false);
        }
    }
    assertFalse(reader.nextKeyValue());
    reader.close();
}
Also used : WALRecordReader(org.apache.hadoop.hbase.mapreduce.WALInputFormat.WALRecordReader) Cell(org.apache.hadoop.hbase.Cell)

Example 63 with Cell

use of org.apache.hadoop.hbase.Cell in project hbase by apache.

the class TestTimeRangeMapRed method verify.

private void verify(final Table table) throws IOException {
    Scan scan = new Scan();
    scan.addColumn(FAMILY_NAME, COLUMN_NAME);
    scan.setMaxVersions(1);
    ResultScanner scanner = table.getScanner(scan);
    for (Result r : scanner) {
        for (Cell kv : r.listCells()) {
            log.debug(Bytes.toString(r.getRow()) + "\t" + Bytes.toString(CellUtil.cloneFamily(kv)) + "\t" + Bytes.toString(CellUtil.cloneQualifier(kv)) + "\t" + kv.getTimestamp() + "\t" + Bytes.toBoolean(CellUtil.cloneValue(kv)));
            org.junit.Assert.assertEquals(TIMESTAMP.get(kv.getTimestamp()), Bytes.toBoolean(CellUtil.cloneValue(kv)));
        }
    }
    scanner.close();
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) Result(org.apache.hadoop.hbase.client.Result)

Example 64 with Cell

use of org.apache.hadoop.hbase.Cell in project hbase by apache.

the class TestWideScanner method testWideScanBatching.

@Test
public void testWideScanBatching() throws IOException {
    final int batch = 256;
    try {
        this.r = createNewHRegion(TESTTABLEDESC, null, null);
        int inserted = addWideContent(this.r);
        List<Cell> results = new ArrayList<>();
        Scan scan = new Scan();
        scan.addFamily(A);
        scan.addFamily(B);
        scan.addFamily(C);
        scan.setMaxVersions(100);
        scan.setBatch(batch);
        InternalScanner s = r.getScanner(scan);
        int total = 0;
        int i = 0;
        boolean more;
        do {
            more = s.next(results);
            i++;
            LOG.info("iteration #" + i + ", results.size=" + results.size());
            // assert that the result set is no larger
            assertTrue(results.size() <= batch);
            total += results.size();
            if (results.size() > 0) {
                // assert that all results are from the same row
                byte[] row = CellUtil.cloneRow(results.get(0));
                for (Cell kv : results) {
                    assertTrue(Bytes.equals(row, CellUtil.cloneRow(kv)));
                }
            }
            results.clear();
            // trigger ChangedReadersObservers
            Iterator<KeyValueScanner> scanners = ((HRegion.RegionScannerImpl) s).storeHeap.getHeap().iterator();
            while (scanners.hasNext()) {
                StoreScanner ss = (StoreScanner) scanners.next();
                ss.updateReaders(new ArrayList<>());
            }
        } while (more);
        // assert that the scanner returned all values
        LOG.info("inserted " + inserted + ", scanned " + total);
        assertEquals(total, inserted);
        s.close();
    } finally {
        HBaseTestingUtility.closeRegionAndWAL(this.r);
    }
}
Also used : ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 65 with Cell

use of org.apache.hadoop.hbase.Cell in project hbase by apache.

the class TestStore method testMultipleTimestamps.

/**
   * Test to ensure correctness when using Stores with multiple timestamps
   * @throws IOException
   */
@Test
public void testMultipleTimestamps() throws IOException {
    int numRows = 1;
    long[] timestamps1 = new long[] { 1, 5, 10, 20 };
    long[] timestamps2 = new long[] { 30, 80 };
    init(this.name.getMethodName());
    List<Cell> kvList1 = getKeyValueSet(timestamps1, numRows, qf1, family);
    for (Cell kv : kvList1) {
        this.store.add(kv, null);
    }
    this.store.snapshot();
    flushStore(store, id++);
    List<Cell> kvList2 = getKeyValueSet(timestamps2, numRows, qf1, family);
    for (Cell kv : kvList2) {
        this.store.add(kv, null);
    }
    List<Cell> result;
    Get get = new Get(Bytes.toBytes(1));
    get.addColumn(family, qf1);
    get.setTimeRange(0, 15);
    result = HBaseTestingUtility.getFromStoreFile(store, get);
    Assert.assertTrue(result.size() > 0);
    get.setTimeRange(40, 90);
    result = HBaseTestingUtility.getFromStoreFile(store, get);
    Assert.assertTrue(result.size() > 0);
    get.setTimeRange(10, 45);
    result = HBaseTestingUtility.getFromStoreFile(store, get);
    Assert.assertTrue(result.size() > 0);
    get.setTimeRange(80, 145);
    result = HBaseTestingUtility.getFromStoreFile(store, get);
    Assert.assertTrue(result.size() > 0);
    get.setTimeRange(1, 2);
    result = HBaseTestingUtility.getFromStoreFile(store, get);
    Assert.assertTrue(result.size() > 0);
    get.setTimeRange(90, 200);
    result = HBaseTestingUtility.getFromStoreFile(store, get);
    Assert.assertTrue(result.size() == 0);
}
Also used : Get(org.apache.hadoop.hbase.client.Get) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Aggregations

Cell (org.apache.hadoop.hbase.Cell)862 Test (org.junit.Test)326 ArrayList (java.util.ArrayList)323 Scan (org.apache.hadoop.hbase.client.Scan)258 KeyValue (org.apache.hadoop.hbase.KeyValue)220 Result (org.apache.hadoop.hbase.client.Result)203 Put (org.apache.hadoop.hbase.client.Put)159 IOException (java.io.IOException)123 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)106 Get (org.apache.hadoop.hbase.client.Get)85 Table (org.apache.hadoop.hbase.client.Table)85 List (java.util.List)80 TableName (org.apache.hadoop.hbase.TableName)77 Delete (org.apache.hadoop.hbase.client.Delete)75 CellScanner (org.apache.hadoop.hbase.CellScanner)69 Configuration (org.apache.hadoop.conf.Configuration)62 InterruptedIOException (java.io.InterruptedIOException)48 Map (java.util.Map)45 Path (org.apache.hadoop.fs.Path)45 RegionScanner (org.apache.hadoop.hbase.regionserver.RegionScanner)45