Search in sources :

Example 16 with Put

use of org.apache.hadoop.hbase.client.Put in project hbase by apache.

the class TestMapReduceExamples method testSampleUploader.

/**
   * Test SampleUploader from examples
   */
@SuppressWarnings("unchecked")
@Test
public void testSampleUploader() throws Exception {
    Configuration configuration = new Configuration();
    Uploader uploader = new Uploader();
    Mapper<LongWritable, Text, ImmutableBytesWritable, Put>.Context<LongWritable, Text, ImmutableBytesWritable, Put> ctx = mock(Context.class);
    doAnswer(new Answer<Void>() {

        @Override
        public Void answer(InvocationOnMock invocation) throws Throwable {
            ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
            Put put = (Put) invocation.getArguments()[1];
            assertEquals("row", Bytes.toString(writer.get()));
            assertEquals("row", Bytes.toString(put.getRow()));
            return null;
        }
    }).when(ctx).write(any(ImmutableBytesWritable.class), any(Put.class));
    uploader.map(null, new Text("row,family,qualifier,value"), ctx);
    Path dir = util.getDataTestDirOnTestFS("testSampleUploader");
    String[] args = { dir.toString(), "simpleTable" };
    Job job = SampleUploader.configureJob(configuration, args);
    assertEquals(SequenceFileInputFormat.class, job.getInputFormatClass());
}
Also used : Path(org.apache.hadoop.fs.Path) ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) Configuration(org.apache.hadoop.conf.Configuration) Text(org.apache.hadoop.io.Text) Put(org.apache.hadoop.hbase.client.Put) Mapper(org.apache.hadoop.mapreduce.Mapper) InvocationOnMock(org.mockito.invocation.InvocationOnMock) LongWritable(org.apache.hadoop.io.LongWritable) Job(org.apache.hadoop.mapreduce.Job) Uploader(org.apache.hadoop.hbase.mapreduce.SampleUploader.Uploader) Test(org.junit.Test)

Example 17 with Put

use of org.apache.hadoop.hbase.client.Put in project hbase by apache.

the class RSGroupInfoManagerImpl method flushConfigTable.

private synchronized Map<TableName, String> flushConfigTable(Map<String, RSGroupInfo> groupMap) throws IOException {
    Map<TableName, String> newTableMap = Maps.newHashMap();
    List<Mutation> mutations = Lists.newArrayList();
    // populate deletes
    for (String groupName : prevRSGroups) {
        if (!groupMap.containsKey(groupName)) {
            Delete d = new Delete(Bytes.toBytes(groupName));
            mutations.add(d);
        }
    }
    // populate puts
    for (RSGroupInfo RSGroupInfo : groupMap.values()) {
        RSGroupProtos.RSGroupInfo proto = RSGroupProtobufUtil.toProtoGroupInfo(RSGroupInfo);
        Put p = new Put(Bytes.toBytes(RSGroupInfo.getName()));
        p.addColumn(META_FAMILY_BYTES, META_QUALIFIER_BYTES, proto.toByteArray());
        mutations.add(p);
        for (TableName entry : RSGroupInfo.getTables()) {
            newTableMap.put(entry, RSGroupInfo.getName());
        }
    }
    if (mutations.size() > 0) {
        multiMutate(mutations);
    }
    return newTableMap;
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) TableName(org.apache.hadoop.hbase.TableName) RSGroupProtos(org.apache.hadoop.hbase.protobuf.generated.RSGroupProtos) Mutation(org.apache.hadoop.hbase.client.Mutation) Put(org.apache.hadoop.hbase.client.Put)

Example 18 with Put

use of org.apache.hadoop.hbase.client.Put in project hbase by apache.

the class RSGroupInfoManagerImpl method multiMutate.

private void multiMutate(List<Mutation> mutations) throws IOException {
    CoprocessorRpcChannel channel = rsGroupTable.coprocessorService(ROW_KEY);
    MultiRowMutationProtos.MutateRowsRequest.Builder mmrBuilder = MultiRowMutationProtos.MutateRowsRequest.newBuilder();
    for (Mutation mutation : mutations) {
        if (mutation instanceof Put) {
            mmrBuilder.addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.PUT, mutation));
        } else if (mutation instanceof Delete) {
            mmrBuilder.addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.DELETE, mutation));
        } else {
            throw new DoNotRetryIOException("multiMutate doesn't support " + mutation.getClass().getName());
        }
    }
    MultiRowMutationProtos.MultiRowMutationService.BlockingInterface service = MultiRowMutationProtos.MultiRowMutationService.newBlockingStub(channel);
    try {
        service.mutateRows(null, mmrBuilder.build());
    } catch (ServiceException ex) {
        ProtobufUtil.toIOException(ex);
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) ServiceException(com.google.protobuf.ServiceException) CoprocessorRpcChannel(org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Mutation(org.apache.hadoop.hbase.client.Mutation) Put(org.apache.hadoop.hbase.client.Put)

Example 19 with Put

use of org.apache.hadoop.hbase.client.Put in project hbase by apache.

the class TestRemoteHTableRetries method testCheckAndPut.

@Test
public void testCheckAndPut() throws Exception {
    testTimedOutCall(new CallExecutor() {

        @Override
        public void run() throws Exception {
            Put put = new Put(ROW_1);
            put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
            remoteTable.checkAndPut(ROW_1, COLUMN_1, QUALIFIER_1, VALUE_1, put);
        }
    });
    verify(client, times(RETRIES)).put(anyString(), anyString(), any(byte[].class));
}
Also used : IOException(java.io.IOException) Put(org.apache.hadoop.hbase.client.Put) Test(org.junit.Test)

Example 20 with Put

use of org.apache.hadoop.hbase.client.Put in project hbase by apache.

the class TestRemoteTable method testDelete.

@Test
public void testDelete() throws IOException {
    Put put = new Put(ROW_3);
    put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
    put.addColumn(COLUMN_2, QUALIFIER_2, VALUE_2);
    remoteTable.put(put);
    Get get = new Get(ROW_3);
    get.addFamily(COLUMN_1);
    get.addFamily(COLUMN_2);
    Result result = remoteTable.get(get);
    byte[] value1 = result.getValue(COLUMN_1, QUALIFIER_1);
    byte[] value2 = result.getValue(COLUMN_2, QUALIFIER_2);
    assertNotNull(value1);
    assertTrue(Bytes.equals(VALUE_1, value1));
    assertNotNull(value2);
    assertTrue(Bytes.equals(VALUE_2, value2));
    Delete delete = new Delete(ROW_3);
    delete.addColumn(COLUMN_2, QUALIFIER_2);
    remoteTable.delete(delete);
    get = new Get(ROW_3);
    get.addFamily(COLUMN_1);
    get.addFamily(COLUMN_2);
    result = remoteTable.get(get);
    value1 = result.getValue(COLUMN_1, QUALIFIER_1);
    value2 = result.getValue(COLUMN_2, QUALIFIER_2);
    assertNotNull(value1);
    assertTrue(Bytes.equals(VALUE_1, value1));
    assertNull(value2);
    delete = new Delete(ROW_3);
    delete.setTimestamp(1L);
    remoteTable.delete(delete);
    get = new Get(ROW_3);
    get.addFamily(COLUMN_1);
    get.addFamily(COLUMN_2);
    result = remoteTable.get(get);
    value1 = result.getValue(COLUMN_1, QUALIFIER_1);
    value2 = result.getValue(COLUMN_2, QUALIFIER_2);
    assertNotNull(value1);
    assertTrue(Bytes.equals(VALUE_1, value1));
    assertNull(value2);
    delete = new Delete(ROW_3);
    remoteTable.delete(delete);
    get = new Get(ROW_3);
    get.addFamily(COLUMN_1);
    get.addFamily(COLUMN_2);
    result = remoteTable.get(get);
    value1 = result.getValue(COLUMN_1, QUALIFIER_1);
    value2 = result.getValue(COLUMN_2, QUALIFIER_2);
    assertNull(value1);
    assertNull(value2);
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Get(org.apache.hadoop.hbase.client.Get) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Aggregations

Put (org.apache.hadoop.hbase.client.Put)849 Test (org.junit.Test)414 Table (org.apache.hadoop.hbase.client.Table)237 ArrayList (java.util.ArrayList)216 Result (org.apache.hadoop.hbase.client.Result)183 Scan (org.apache.hadoop.hbase.client.Scan)164 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)149 Delete (org.apache.hadoop.hbase.client.Delete)146 Cell (org.apache.hadoop.hbase.Cell)141 IOException (java.io.IOException)134 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)134 TableName (org.apache.hadoop.hbase.TableName)118 Get (org.apache.hadoop.hbase.client.Get)114 KeyValue (org.apache.hadoop.hbase.KeyValue)98 Configuration (org.apache.hadoop.conf.Configuration)79 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)75 Connection (org.apache.hadoop.hbase.client.Connection)68 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)60 Admin (org.apache.hadoop.hbase.client.Admin)54 Mutation (org.apache.hadoop.hbase.client.Mutation)53