Search in sources :

Example 31 with Put

use of org.apache.hadoop.hbase.client.Put in project hbase by apache.

the class TestRowProcessorEndpoint method prepareTestData.

public void prepareTestData() throws Exception {
    try {
        util.getAdmin().disableTable(TABLE);
        util.getAdmin().deleteTable(TABLE);
    } catch (Exception e) {
    // ignore table not found
    }
    table = util.createTable(TABLE, FAM);
    {
        Put put = new Put(ROW);
        // B, C are friends of A
        put.addColumn(FAM, A, Bytes.add(B, C));
        // D, E, F are friends of B
        put.addColumn(FAM, B, Bytes.add(D, E, F));
        // G is a friend of C
        put.addColumn(FAM, C, G);
        table.put(put);
        rowSize = put.size();
    }
    Put put = new Put(ROW2);
    put.addColumn(FAM, D, E);
    put.addColumn(FAM, F, G);
    table.put(put);
    row2Size = put.size();
}
Also used : IOException(java.io.IOException) Put(org.apache.hadoop.hbase.client.Put)

Example 32 with Put

use of org.apache.hadoop.hbase.client.Put in project hbase by apache.

the class TestServerCustomProtocol method before.

@Before
public void before() throws Exception {
    final byte[][] SPLIT_KEYS = new byte[][] { ROW_B, ROW_C };
    Table table = util.createTable(TEST_TABLE, TEST_FAMILY, SPLIT_KEYS);
    Put puta = new Put(ROW_A);
    puta.addColumn(TEST_FAMILY, Bytes.toBytes("col1"), Bytes.toBytes(1));
    table.put(puta);
    Put putb = new Put(ROW_B);
    putb.addColumn(TEST_FAMILY, Bytes.toBytes("col1"), Bytes.toBytes(1));
    table.put(putb);
    Put putc = new Put(ROW_C);
    putc.addColumn(TEST_FAMILY, Bytes.toBytes("col1"), Bytes.toBytes(1));
    table.put(putc);
}
Also used : Table(org.apache.hadoop.hbase.client.Table) Put(org.apache.hadoop.hbase.client.Put) Before(org.junit.Before)

Example 33 with Put

use of org.apache.hadoop.hbase.client.Put in project hbase by apache.

the class TestCoprocessorEndpoint method setupBeforeClass.

@BeforeClass
public static void setupBeforeClass() throws Exception {
    // set configure to indicate which cp should be loaded
    Configuration conf = util.getConfiguration();
    conf.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 5000);
    conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, org.apache.hadoop.hbase.coprocessor.ColumnAggregationEndpoint.class.getName(), ProtobufCoprocessorService.class.getName());
    conf.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, ProtobufCoprocessorService.class.getName());
    util.startMiniCluster(2);
    Admin admin = util.getAdmin();
    HTableDescriptor desc = new HTableDescriptor(TEST_TABLE);
    desc.addFamily(new HColumnDescriptor(TEST_FAMILY));
    admin.createTable(desc, new byte[][] { ROWS[rowSeperator1], ROWS[rowSeperator2] });
    util.waitUntilAllRegionsAssigned(TEST_TABLE);
    Table table = util.getConnection().getTable(TEST_TABLE);
    for (int i = 0; i < ROWSIZE; i++) {
        Put put = new Put(ROWS[i]);
        put.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i));
        table.put(put);
    }
    table.close();
}
Also used : Table(org.apache.hadoop.hbase.client.Table) Configuration(org.apache.hadoop.conf.Configuration) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Admin(org.apache.hadoop.hbase.client.Admin) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) BeforeClass(org.junit.BeforeClass)

Example 34 with Put

use of org.apache.hadoop.hbase.client.Put in project hbase by apache.

the class TestMapReduceExamples method testSampleUploader.

/**
   * Test SampleUploader from examples
   */
@SuppressWarnings("unchecked")
@Test
public void testSampleUploader() throws Exception {
    Configuration configuration = new Configuration();
    Uploader uploader = new Uploader();
    Mapper<LongWritable, Text, ImmutableBytesWritable, Put>.Context<LongWritable, Text, ImmutableBytesWritable, Put> ctx = mock(Context.class);
    doAnswer(new Answer<Void>() {

        @Override
        public Void answer(InvocationOnMock invocation) throws Throwable {
            ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
            Put put = (Put) invocation.getArguments()[1];
            assertEquals("row", Bytes.toString(writer.get()));
            assertEquals("row", Bytes.toString(put.getRow()));
            return null;
        }
    }).when(ctx).write(any(ImmutableBytesWritable.class), any(Put.class));
    uploader.map(null, new Text("row,family,qualifier,value"), ctx);
    Path dir = util.getDataTestDirOnTestFS("testSampleUploader");
    String[] args = { dir.toString(), "simpleTable" };
    Job job = SampleUploader.configureJob(configuration, args);
    assertEquals(SequenceFileInputFormat.class, job.getInputFormatClass());
}
Also used : Path(org.apache.hadoop.fs.Path) ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) Configuration(org.apache.hadoop.conf.Configuration) Text(org.apache.hadoop.io.Text) Put(org.apache.hadoop.hbase.client.Put) Mapper(org.apache.hadoop.mapreduce.Mapper) InvocationOnMock(org.mockito.invocation.InvocationOnMock) LongWritable(org.apache.hadoop.io.LongWritable) Job(org.apache.hadoop.mapreduce.Job) Uploader(org.apache.hadoop.hbase.mapreduce.SampleUploader.Uploader) Test(org.junit.Test)

Example 35 with Put

use of org.apache.hadoop.hbase.client.Put in project hbase by apache.

the class RSGroupInfoManagerImpl method flushConfigTable.

private synchronized Map<TableName, String> flushConfigTable(Map<String, RSGroupInfo> groupMap) throws IOException {
    Map<TableName, String> newTableMap = Maps.newHashMap();
    List<Mutation> mutations = Lists.newArrayList();
    // populate deletes
    for (String groupName : prevRSGroups) {
        if (!groupMap.containsKey(groupName)) {
            Delete d = new Delete(Bytes.toBytes(groupName));
            mutations.add(d);
        }
    }
    // populate puts
    for (RSGroupInfo RSGroupInfo : groupMap.values()) {
        RSGroupProtos.RSGroupInfo proto = RSGroupProtobufUtil.toProtoGroupInfo(RSGroupInfo);
        Put p = new Put(Bytes.toBytes(RSGroupInfo.getName()));
        p.addColumn(META_FAMILY_BYTES, META_QUALIFIER_BYTES, proto.toByteArray());
        mutations.add(p);
        for (TableName entry : RSGroupInfo.getTables()) {
            newTableMap.put(entry, RSGroupInfo.getName());
        }
    }
    if (mutations.size() > 0) {
        multiMutate(mutations);
    }
    return newTableMap;
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) TableName(org.apache.hadoop.hbase.TableName) RSGroupProtos(org.apache.hadoop.hbase.protobuf.generated.RSGroupProtos) Mutation(org.apache.hadoop.hbase.client.Mutation) Put(org.apache.hadoop.hbase.client.Put)

Aggregations

Put (org.apache.hadoop.hbase.client.Put)880 Test (org.junit.Test)418 Table (org.apache.hadoop.hbase.client.Table)240 ArrayList (java.util.ArrayList)221 Result (org.apache.hadoop.hbase.client.Result)183 Scan (org.apache.hadoop.hbase.client.Scan)164 Cell (org.apache.hadoop.hbase.Cell)159 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)149 Delete (org.apache.hadoop.hbase.client.Delete)149 IOException (java.io.IOException)141 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)134 TableName (org.apache.hadoop.hbase.TableName)118 Get (org.apache.hadoop.hbase.client.Get)114 KeyValue (org.apache.hadoop.hbase.KeyValue)98 Configuration (org.apache.hadoop.conf.Configuration)81 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)75 Connection (org.apache.hadoop.hbase.client.Connection)68 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)60 Admin (org.apache.hadoop.hbase.client.Admin)55 Mutation (org.apache.hadoop.hbase.client.Mutation)54