Search in sources :

Example 21 with TGet

use of org.apache.hadoop.hbase.thrift2.generated.TGet in project hbase by apache.

the class TestThriftHBaseServiceHandler method testPutTTL.

@Test
public void testPutTTL() throws Exception {
    ThriftHBaseServiceHandler handler = createHandler();
    byte[] rowName = "testPutTTL".getBytes();
    ByteBuffer table = wrap(tableAname);
    List<TColumnValue> columnValues = new ArrayList<>(1);
    // Add some dummy data
    columnValues.add(new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(Bytes.toBytes(1L))));
    TPut put = new TPut(wrap(rowName), columnValues);
    put.setColumnValues(columnValues);
    Map<ByteBuffer, ByteBuffer> attributes = new HashMap<>();
    // Time in ms for the kv's to live.
    long ttlTimeMs = 2000L;
    // the _ttl attribute is a number of ms ttl for key values in this put.
    attributes.put(wrap(Bytes.toBytes("_ttl")), wrap(Bytes.toBytes(ttlTimeMs)));
    // Attach the attributes
    put.setAttributes(attributes);
    // Send it.
    handler.put(table, put);
    // Now get the data back
    TGet getOne = new TGet(wrap(rowName));
    TResult resultOne = handler.get(table, getOne);
    // It's there.
    assertArrayEquals(rowName, resultOne.getRow());
    assertEquals(1, resultOne.getColumnValuesSize());
    // Sleep 30 seconds just to make 100% sure that the key value should be expired.
    Thread.sleep(ttlTimeMs * 15);
    TGet getTwo = new TGet(wrap(rowName));
    TResult resultTwo = handler.get(table, getTwo);
    // Nothing should be there since it's ttl'd out.
    assertNull(resultTwo.getRow());
    assertEquals(0, resultTwo.getColumnValuesSize());
}
Also used : HashMap(java.util.HashMap) TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) ArrayList(java.util.ArrayList) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) ByteBuffer(java.nio.ByteBuffer) TResult(org.apache.hadoop.hbase.thrift2.generated.TResult) Test(org.junit.Test)

Example 22 with TGet

use of org.apache.hadoop.hbase.thrift2.generated.TGet in project hbase by apache.

the class TestThriftHBaseServiceHandler method testPutGetMultiple.

@Test
public void testPutGetMultiple() throws Exception {
    ThriftHBaseServiceHandler handler = createHandler();
    ByteBuffer table = wrap(tableAname);
    byte[] rowName1 = "testPutGetMultiple1".getBytes();
    byte[] rowName2 = "testPutGetMultiple2".getBytes();
    List<TColumnValue> columnValues = new ArrayList<>(2);
    columnValues.add(new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname)));
    columnValues.add(new TColumnValue(wrap(familyBname), wrap(qualifierBname), wrap(valueBname)));
    List<TPut> puts = new ArrayList<>(2);
    puts.add(new TPut(wrap(rowName1), columnValues));
    puts.add(new TPut(wrap(rowName2), columnValues));
    handler.putMultiple(table, puts);
    List<TGet> gets = new ArrayList<>(2);
    gets.add(new TGet(wrap(rowName1)));
    gets.add(new TGet(wrap(rowName2)));
    List<TResult> results = handler.getMultiple(table, gets);
    assertEquals(2, results.size());
    assertArrayEquals(rowName1, results.get(0).getRow());
    assertTColumnValuesEqual(columnValues, results.get(0).getColumnValues());
    assertArrayEquals(rowName2, results.get(1).getRow());
    assertTColumnValuesEqual(columnValues, results.get(1).getColumnValues());
}
Also used : TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) ArrayList(java.util.ArrayList) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) ByteBuffer(java.nio.ByteBuffer) TResult(org.apache.hadoop.hbase.thrift2.generated.TResult) Test(org.junit.Test)

Example 23 with TGet

use of org.apache.hadoop.hbase.thrift2.generated.TGet in project hbase by apache.

the class TestThriftHBaseServiceHandler method testAttribute.

@Test
public void testAttribute() throws Exception {
    byte[] rowName = "testAttribute".getBytes();
    byte[] attributeKey = "attribute1".getBytes();
    byte[] attributeValue = "value1".getBytes();
    Map<ByteBuffer, ByteBuffer> attributes = new HashMap<>();
    attributes.put(wrap(attributeKey), wrap(attributeValue));
    TGet tGet = new TGet(wrap(rowName));
    tGet.setAttributes(attributes);
    Get get = getFromThrift(tGet);
    assertArrayEquals(get.getAttribute("attribute1"), attributeValue);
    List<TColumnValue> columnValues = new ArrayList<>(1);
    columnValues.add(new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname)));
    TPut tPut = new TPut(wrap(rowName), columnValues);
    tPut.setAttributes(attributes);
    Put put = putFromThrift(tPut);
    assertArrayEquals(put.getAttribute("attribute1"), attributeValue);
    TScan tScan = new TScan();
    tScan.setAttributes(attributes);
    Scan scan = scanFromThrift(tScan);
    assertArrayEquals(scan.getAttribute("attribute1"), attributeValue);
    List<TColumnIncrement> incrementColumns = new ArrayList<>(1);
    incrementColumns.add(new TColumnIncrement(wrap(familyAname), wrap(qualifierAname)));
    TIncrement tIncrement = new TIncrement(wrap(rowName), incrementColumns);
    tIncrement.setAttributes(attributes);
    Increment increment = incrementFromThrift(tIncrement);
    assertArrayEquals(increment.getAttribute("attribute1"), attributeValue);
    TDelete tDelete = new TDelete(wrap(rowName));
    tDelete.setAttributes(attributes);
    Delete delete = deleteFromThrift(tDelete);
    assertArrayEquals(delete.getAttribute("attribute1"), attributeValue);
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) TDelete(org.apache.hadoop.hbase.thrift2.generated.TDelete) HashMap(java.util.HashMap) TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) ArrayList(java.util.ArrayList) TDelete(org.apache.hadoop.hbase.thrift2.generated.TDelete) TIncrement(org.apache.hadoop.hbase.thrift2.generated.TIncrement) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) ByteBuffer(java.nio.ByteBuffer) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) Put(org.apache.hadoop.hbase.client.Put) Get(org.apache.hadoop.hbase.client.Get) TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) TScan(org.apache.hadoop.hbase.thrift2.generated.TScan) TColumnIncrement(org.apache.hadoop.hbase.thrift2.generated.TColumnIncrement) TIncrement(org.apache.hadoop.hbase.thrift2.generated.TIncrement) Increment(org.apache.hadoop.hbase.client.Increment) TColumnIncrement(org.apache.hadoop.hbase.thrift2.generated.TColumnIncrement) TScan(org.apache.hadoop.hbase.thrift2.generated.TScan) Scan(org.apache.hadoop.hbase.client.Scan) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) Test(org.junit.Test)

Example 24 with TGet

use of org.apache.hadoop.hbase.thrift2.generated.TGet in project hbase by apache.

the class TestThriftHBaseServiceHandler method testDeleteMultiple.

@Test
public void testDeleteMultiple() throws Exception {
    ThriftHBaseServiceHandler handler = createHandler();
    ByteBuffer table = wrap(tableAname);
    byte[] rowName1 = "testDeleteMultiple1".getBytes();
    byte[] rowName2 = "testDeleteMultiple2".getBytes();
    List<TColumnValue> columnValues = new ArrayList<>(2);
    columnValues.add(new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname)));
    columnValues.add(new TColumnValue(wrap(familyBname), wrap(qualifierBname), wrap(valueBname)));
    List<TPut> puts = new ArrayList<>(2);
    puts.add(new TPut(wrap(rowName1), columnValues));
    puts.add(new TPut(wrap(rowName2), columnValues));
    handler.putMultiple(table, puts);
    List<TDelete> deletes = new ArrayList<>(2);
    deletes.add(new TDelete(wrap(rowName1)));
    deletes.add(new TDelete(wrap(rowName2)));
    List<TDelete> deleteResults = handler.deleteMultiple(table, deletes);
    // 0 means they were all successfully applies
    assertEquals(0, deleteResults.size());
    assertFalse(handler.exists(table, new TGet(wrap(rowName1))));
    assertFalse(handler.exists(table, new TGet(wrap(rowName2))));
}
Also used : TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) ArrayList(java.util.ArrayList) TDelete(org.apache.hadoop.hbase.thrift2.generated.TDelete) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test)

Example 25 with TGet

use of org.apache.hadoop.hbase.thrift2.generated.TGet in project hbase by apache.

the class TestThriftHBaseServiceHandler method testExceptionType.

private void testExceptionType(THBaseService.Iface handler, ThriftMetrics metrics, ByteBuffer tTableName, byte[] rowkey, ErrorThrowingGetObserver.ErrorType errorType) {
    long preGetCounter = metricsHelper.getCounter("get_num_ops", metrics.getSource());
    String exceptionKey = errorType.getMetricName();
    long preExceptionCounter = metricsHelper.checkCounterExists(exceptionKey, metrics.getSource()) ? metricsHelper.getCounter(exceptionKey, metrics.getSource()) : 0;
    TGet tGet = new TGet(wrap(rowkey));
    Map<ByteBuffer, ByteBuffer> attributes = new HashMap<>();
    attributes.put(wrap(Bytes.toBytes(ErrorThrowingGetObserver.SHOULD_ERROR_ATTRIBUTE)), wrap(Bytes.toBytes(errorType.name())));
    tGet.setAttributes(attributes);
    try {
        TResult tResult = handler.get(tTableName, tGet);
        fail("Get with error attribute should have thrown an exception");
    } catch (TException e) {
        LOG.info("Received exception: ", e);
        metricsHelper.assertCounter("get_num_ops", preGetCounter + 1, metrics.getSource());
        metricsHelper.assertCounter(exceptionKey, preExceptionCounter + 1, metrics.getSource());
    }
}
Also used : TException(org.apache.thrift.TException) TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) HashMap(java.util.HashMap) ByteBuffer(java.nio.ByteBuffer) TResult(org.apache.hadoop.hbase.thrift2.generated.TResult)

Aggregations

TGet (org.apache.hadoop.hbase.thrift2.generated.TGet)26 ByteBuffer (java.nio.ByteBuffer)25 TColumnValue (org.apache.hadoop.hbase.thrift2.generated.TColumnValue)24 TPut (org.apache.hadoop.hbase.thrift2.generated.TPut)24 Test (org.junit.Test)23 ArrayList (java.util.ArrayList)22 TResult (org.apache.hadoop.hbase.thrift2.generated.TResult)20 TDelete (org.apache.hadoop.hbase.thrift2.generated.TDelete)7 TColumn (org.apache.hadoop.hbase.thrift2.generated.TColumn)5 HashMap (java.util.HashMap)4 TAuthorization (org.apache.hadoop.hbase.thrift2.generated.TAuthorization)4 TCellVisibility (org.apache.hadoop.hbase.thrift2.generated.TCellVisibility)4 TColumnIncrement (org.apache.hadoop.hbase.thrift2.generated.TColumnIncrement)4 THBaseService (org.apache.hadoop.hbase.thrift2.generated.THBaseService)4 TIncrement (org.apache.hadoop.hbase.thrift2.generated.TIncrement)4 Put (org.apache.hadoop.hbase.client.Put)3 ThriftMetrics (org.apache.hadoop.hbase.thrift.ThriftMetrics)3 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)2 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)2 TableName (org.apache.hadoop.hbase.TableName)2