Search in sources :

Example 26 with Get

use of org.apache.hadoop.hbase.client.Get in project hbase by apache.

the class TestEndToEndSplitTransaction method blockUntilRegionIsOpened.

public static void blockUntilRegionIsOpened(Configuration conf, long timeout, HRegionInfo hri) throws IOException, InterruptedException {
    log("blocking until region is opened for reading:" + hri.getRegionNameAsString());
    long start = System.currentTimeMillis();
    try (Connection conn = ConnectionFactory.createConnection(conf);
        Table table = conn.getTable(hri.getTable())) {
        byte[] row = hri.getStartKey();
        // Check for null/empty row. If we find one, use a key that is likely to be in first region.
        if (row == null || row.length <= 0)
            row = new byte[] { '0' };
        Get get = new Get(row);
        while (System.currentTimeMillis() - start < timeout) {
            try {
                table.get(get);
                break;
            } catch (IOException ex) {
            // wait some more
            }
            Threads.sleep(10);
        }
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) Get(org.apache.hadoop.hbase.client.Get) Connection(org.apache.hadoop.hbase.client.Connection) IOException(java.io.IOException)

Example 27 with Get

use of org.apache.hadoop.hbase.client.Get in project hbase by apache.

the class TestMobStoreScanner method testReadFromCorruptMobFilesWithReadEmptyValueOnMobCellMiss.

@Test
public void testReadFromCorruptMobFilesWithReadEmptyValueOnMobCellMiss() throws Exception {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    setUp(0, tableName);
    createRecordAndCorruptMobFile(tableName, row1, family, qf1, Bytes.toBytes("value1"));
    Get get = new Get(row1);
    get.setAttribute(MobConstants.EMPTY_VALUE_ON_MOBCELL_MISS, Bytes.toBytes(true));
    Result result = table.get(get);
    Cell cell = result.getColumnLatestCell(family, qf1);
    Assert.assertEquals(0, cell.getValueLength());
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Get(org.apache.hadoop.hbase.client.Get) Cell(org.apache.hadoop.hbase.Cell) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Example 28 with Get

use of org.apache.hadoop.hbase.client.Get in project hbase by apache.

the class TestMobStoreScanner method testGetMassive.

@Test(timeout = 60000)
public void testGetMassive() throws Exception {
    setUp(defaultThreshold, TableName.valueOf(name.getMethodName()));
    // Put some data 5 10, 15, 20  mb ok  (this would be right below protobuf
    // default max size of 64MB.
    // 25, 30, 40 fail.  these is above protobuf max size of 64MB
    byte[] bigValue = new byte[25 * 1024 * 1024];
    Put put = new Put(row1);
    put.addColumn(family, qf1, bigValue);
    put.addColumn(family, qf2, bigValue);
    put.addColumn(family, qf3, bigValue);
    table.put(put);
    Get g = new Get(row1);
    table.get(g);
// should not have blown up.
}
Also used : Get(org.apache.hadoop.hbase.client.Get) Put(org.apache.hadoop.hbase.client.Put) Test(org.junit.Test)

Example 29 with Get

use of org.apache.hadoop.hbase.client.Get in project hbase by apache.

the class TestRegionReplicas method assertGetRpc.

// build a mock rpc
private void assertGetRpc(HRegionInfo info, int value, boolean expect) throws IOException, org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException {
    byte[] row = Bytes.toBytes(String.valueOf(value));
    Get get = new Get(row);
    ClientProtos.GetRequest getReq = RequestConverter.buildGetRequest(info.getRegionName(), get);
    ClientProtos.GetResponse getResp = getRS().getRSRpcServices().get(null, getReq);
    Result result = ProtobufUtil.toResult(getResp.getResult());
    if (expect) {
        Assert.assertArrayEquals(row, result.getValue(f, null));
    } else {
        result.isEmpty();
    }
}
Also used : Get(org.apache.hadoop.hbase.client.Get) ClientProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos) Result(org.apache.hadoop.hbase.client.Result)

Example 30 with Get

use of org.apache.hadoop.hbase.client.Get in project hbase by apache.

the class TestReplicationSmallTests method testSimplePutDelete.

/**
   * Add a row, check it's replicated, delete it, check's gone
   * @throws Exception
   */
@Test(timeout = 300000)
public void testSimplePutDelete() throws Exception {
    LOG.info("testSimplePutDelete");
    Put put = new Put(row);
    put.addColumn(famName, row, row);
    htable1 = utility1.getConnection().getTable(tableName);
    htable1.put(put);
    Get get = new Get(row);
    for (int i = 0; i < NB_RETRIES; i++) {
        if (i == NB_RETRIES - 1) {
            fail("Waited too much time for put replication");
        }
        Result res = htable2.get(get);
        if (res.isEmpty()) {
            LOG.info("Row not available");
            Thread.sleep(SLEEP_TIME);
        } else {
            assertArrayEquals(res.value(), row);
            break;
        }
    }
    Delete del = new Delete(row);
    htable1.delete(del);
    get = new Get(row);
    for (int i = 0; i < NB_RETRIES; i++) {
        if (i == NB_RETRIES - 1) {
            fail("Waited too much time for del replication");
        }
        Result res = htable2.get(get);
        if (res.size() >= 1) {
            LOG.info("Row not deleted");
            Thread.sleep(SLEEP_TIME);
        } else {
            break;
        }
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Get(org.apache.hadoop.hbase.client.Get) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Aggregations

Get (org.apache.hadoop.hbase.client.Get)629 Result (org.apache.hadoop.hbase.client.Result)444 Test (org.junit.Test)282 Table (org.apache.hadoop.hbase.client.Table)226 Put (org.apache.hadoop.hbase.client.Put)201 IOException (java.io.IOException)134 Cell (org.apache.hadoop.hbase.Cell)121 TableName (org.apache.hadoop.hbase.TableName)98 Connection (org.apache.hadoop.hbase.client.Connection)91 Delete (org.apache.hadoop.hbase.client.Delete)76 ArrayList (java.util.ArrayList)75 Configuration (org.apache.hadoop.conf.Configuration)72 Scan (org.apache.hadoop.hbase.client.Scan)57 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)52 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)38 CheckAndMutateResult (org.apache.hadoop.hbase.client.CheckAndMutateResult)38 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)36 Map (java.util.Map)34 Path (org.apache.hadoop.fs.Path)34 Admin (org.apache.hadoop.hbase.client.Admin)33