Search in sources :

Example 11 with Table

use of org.apache.hadoop.hbase.client.Table in project hbase by apache.

the class TestBatchCoprocessorEndpoint method setupBeforeClass.

@BeforeClass
public static void setupBeforeClass() throws Exception {
    // set configure to indicate which cp should be loaded
    Configuration conf = util.getConfiguration();
    conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, org.apache.hadoop.hbase.coprocessor.ColumnAggregationEndpoint.class.getName(), ProtobufCoprocessorService.class.getName(), ColumnAggregationEndpointWithErrors.class.getName(), ColumnAggregationEndpointNullResponse.class.getName());
    conf.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, ProtobufCoprocessorService.class.getName());
    util.startMiniCluster(2);
    Admin admin = util.getAdmin();
    HTableDescriptor desc = new HTableDescriptor(TEST_TABLE);
    desc.addFamily(new HColumnDescriptor(TEST_FAMILY));
    admin.createTable(desc, new byte[][] { ROWS[rowSeperator1], ROWS[rowSeperator2] });
    util.waitUntilAllRegionsAssigned(TEST_TABLE);
    admin.close();
    Table table = util.getConnection().getTable(TEST_TABLE);
    for (int i = 0; i < ROWSIZE; i++) {
        Put put = new Put(ROWS[i]);
        put.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i));
        table.put(put);
    }
    table.close();
}
Also used : Table(org.apache.hadoop.hbase.client.Table) Configuration(org.apache.hadoop.conf.Configuration) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Admin(org.apache.hadoop.hbase.client.Admin) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) BeforeClass(org.junit.BeforeClass)

Example 12 with Table

use of org.apache.hadoop.hbase.client.Table in project hbase by apache.

the class TestBatchCoprocessorEndpoint method testAggregationNullResponse.

@Test
public void testAggregationNullResponse() throws Throwable {
    Table table = util.getConnection().getTable(TEST_TABLE);
    ColumnAggregationNullResponseSumRequest.Builder builder = ColumnAggregationNullResponseSumRequest.newBuilder();
    builder.setFamily(ByteString.copyFrom(TEST_FAMILY));
    if (TEST_QUALIFIER != null && TEST_QUALIFIER.length > 0) {
        builder.setQualifier(ByteString.copyFrom(TEST_QUALIFIER));
    }
    Map<byte[], ColumnAggregationNullResponseSumResponse> results = table.batchCoprocessorService(ColumnAggregationServiceNullResponse.getDescriptor().findMethodByName("sum"), builder.build(), ROWS[0], ROWS[ROWS.length - 1], ColumnAggregationNullResponseSumResponse.getDefaultInstance());
    int sumResult = 0;
    int expectedResult = 0;
    for (Map.Entry<byte[], ColumnAggregationNullResponseSumResponse> e : results.entrySet()) {
        LOG.info("Got value " + e.getValue().getSum() + " for region " + Bytes.toStringBinary(e.getKey()));
        sumResult += e.getValue().getSum();
    }
    for (int i = 0; i < rowSeperator2; i++) {
        expectedResult += i;
    }
    assertEquals("Invalid result", expectedResult, sumResult);
    table.close();
}
Also used : ColumnAggregationNullResponseSumResponse(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse) Table(org.apache.hadoop.hbase.client.Table) ColumnAggregationNullResponseSumRequest(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest) Map(java.util.Map) TreeMap(java.util.TreeMap) Test(org.junit.Test)

Example 13 with Table

use of org.apache.hadoop.hbase.client.Table in project hbase by apache.

the class TestCoprocessorEndpoint method testCoprocessorServiceNullResponse.

@Test
public void testCoprocessorServiceNullResponse() throws Throwable {
    Table table = util.getConnection().getTable(TEST_TABLE);
    List<HRegionLocation> regions;
    try (RegionLocator rl = util.getConnection().getRegionLocator(TEST_TABLE)) {
        regions = rl.getAllRegionLocations();
    }
    final TestProtos.EchoRequestProto request = TestProtos.EchoRequestProto.newBuilder().setMessage("hello").build();
    try {
        // scan: for all regions
        final RpcController controller = new ServerRpcController();
        // test that null results are supported
        Map<byte[], String> results = table.coprocessorService(TestRpcServiceProtos.TestProtobufRpcProto.class, ROWS[0], ROWS[ROWS.length - 1], new Batch.Call<TestRpcServiceProtos.TestProtobufRpcProto, String>() {

            public String call(TestRpcServiceProtos.TestProtobufRpcProto instance) throws IOException {
                CoprocessorRpcUtils.BlockingRpcCallback<TestProtos.EchoResponseProto> callback = new CoprocessorRpcUtils.BlockingRpcCallback<>();
                instance.echo(controller, request, callback);
                TestProtos.EchoResponseProto response = callback.get();
                LOG.debug("Batch.Call got result " + response);
                return null;
            }
        });
        for (Map.Entry<byte[], String> e : results.entrySet()) {
            LOG.info("Got value " + e.getValue() + " for region " + Bytes.toStringBinary(e.getKey()));
        }
        assertEquals(3, results.size());
        for (HRegionLocation region : regions) {
            HRegionInfo info = region.getRegionInfo();
            LOG.info("Region info is " + info.getRegionNameAsString());
            assertTrue(results.containsKey(info.getRegionName()));
            assertNull(results.get(info.getRegionName()));
        }
    } finally {
        table.close();
    }
}
Also used : TestRpcServiceProtos(org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos) RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) Table(org.apache.hadoop.hbase.client.Table) IOException(java.io.IOException) TestProtos(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController) RpcController(com.google.protobuf.RpcController) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) CoprocessorRpcUtils(org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils) Batch(org.apache.hadoop.hbase.client.coprocessor.Batch) Map(java.util.Map) TreeMap(java.util.TreeMap) Test(org.junit.Test)

Example 14 with Table

use of org.apache.hadoop.hbase.client.Table in project hbase by apache.

the class TestServerCustomProtocol method before.

@Before
public void before() throws Exception {
    final byte[][] SPLIT_KEYS = new byte[][] { ROW_B, ROW_C };
    Table table = util.createTable(TEST_TABLE, TEST_FAMILY, SPLIT_KEYS);
    Put puta = new Put(ROW_A);
    puta.addColumn(TEST_FAMILY, Bytes.toBytes("col1"), Bytes.toBytes(1));
    table.put(puta);
    Put putb = new Put(ROW_B);
    putb.addColumn(TEST_FAMILY, Bytes.toBytes("col1"), Bytes.toBytes(1));
    table.put(putb);
    Put putc = new Put(ROW_C);
    putc.addColumn(TEST_FAMILY, Bytes.toBytes("col1"), Bytes.toBytes(1));
    table.put(putc);
}
Also used : Table(org.apache.hadoop.hbase.client.Table) Put(org.apache.hadoop.hbase.client.Put) Before(org.junit.Before)

Example 15 with Table

use of org.apache.hadoop.hbase.client.Table in project hbase by apache.

the class TestServerCustomProtocol method testSingleMethod.

@Test
public void testSingleMethod() throws Throwable {
    try (Table table = util.getConnection().getTable(TEST_TABLE);
        RegionLocator locator = util.getConnection().getRegionLocator(TEST_TABLE)) {
        Map<byte[], String> results = table.coprocessorService(PingProtos.PingService.class, null, ROW_A, new Batch.Call<PingProtos.PingService, String>() {

            @Override
            public String call(PingProtos.PingService instance) throws IOException {
                CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.PingResponse> rpcCallback = new CoprocessorRpcUtils.BlockingRpcCallback<>();
                instance.ping(null, PingProtos.PingRequest.newBuilder().build(), rpcCallback);
                return rpcCallback.get().getPong();
            }
        });
        // Should have gotten results for 1 of the three regions only since we specified
        // rows from 1 region
        assertEquals(1, results.size());
        verifyRegionResults(locator, results, ROW_A);
        final String name = "NAME";
        results = hello(table, name, null, ROW_A);
        // Should have gotten results for 1 of the three regions only since we specified
        // rows from 1 region
        assertEquals(1, results.size());
        verifyRegionResults(locator, results, "Hello, NAME", ROW_A);
    }
}
Also used : RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) Table(org.apache.hadoop.hbase.client.Table) IOException(java.io.IOException) CoprocessorRpcUtils(org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils) Batch(org.apache.hadoop.hbase.client.coprocessor.Batch) PingProtos(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos) Test(org.junit.Test)

Aggregations

Table (org.apache.hadoop.hbase.client.Table)660 Test (org.junit.Test)421 Put (org.apache.hadoop.hbase.client.Put)237 TableName (org.apache.hadoop.hbase.TableName)227 Result (org.apache.hadoop.hbase.client.Result)224 Connection (org.apache.hadoop.hbase.client.Connection)191 Scan (org.apache.hadoop.hbase.client.Scan)174 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)160 IOException (java.io.IOException)157 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)134 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)119 Get (org.apache.hadoop.hbase.client.Get)107 Delete (org.apache.hadoop.hbase.client.Delete)99 Admin (org.apache.hadoop.hbase.client.Admin)95 ArrayList (java.util.ArrayList)85 Cell (org.apache.hadoop.hbase.Cell)83 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)73 Configuration (org.apache.hadoop.conf.Configuration)71 Path (org.apache.hadoop.fs.Path)60 RegionLocator (org.apache.hadoop.hbase.client.RegionLocator)59