Search in sources :

Example 81 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hive by apache.

the class PutWritable method write.

@Override
public void write(final DataOutput out) throws IOException {
    ProtobufUtil.toMutationNoData(MutationType.PUT, put).writeDelimitedTo(DataOutputOutputStream.from(out));
    out.writeInt(put.size());
    CellScanner scanner = put.cellScanner();
    while (scanner.advance()) {
        KeyValue kv = KeyValueUtil.ensureKeyValue(scanner.current());
        KeyValue.write(kv, out);
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) CellScanner(org.apache.hadoop.hbase.CellScanner)

Example 82 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hive by apache.

the class ResultWritable method write.

@Override
public void write(final DataOutput out) throws IOException {
    ProtobufUtil.toResultNoData(result).writeDelimitedTo(DataOutputOutputStream.from(out));
    out.writeInt(result.size());
    for (KeyValue kv : result.list()) {
        KeyValue.write(kv, out);
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue)

Example 83 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hive by apache.

the class TestHBaseSerDe method testHBaseSerDeWithForwardEvolvedSchema.

public void testHBaseSerDeWithForwardEvolvedSchema() throws SerDeException, IOException {
    byte[] cfa = "cola".getBytes();
    byte[] qualAvro = "avro".getBytes();
    byte[] rowKey = Bytes.toBytes("test-row1");
    // Data
    List<KeyValue> kvs = new ArrayList<KeyValue>();
    byte[] avroData = getTestAvroBytesFromSchema(RECORD_SCHEMA);
    kvs.add(new KeyValue(rowKey, cfa, qualAvro, avroData));
    Result r = new Result(kvs);
    Put p = new Put(rowKey);
    // Post serialization, separators are automatically inserted between different fields in the
    // struct. Currently there is not way to disable that. So the work around here is to pad the
    // data with the separator bytes before creating a "Put" object
    p.add(new KeyValue(rowKey, cfa, qualAvro, avroData));
    Object[] expectedFieldsData = { new String("test-row1"), new String("[[42, test, true, 42432234234]]") };
    // Create, initialize, and test the SerDe
    HBaseSerDe serDe = new HBaseSerDe();
    Configuration conf = new Configuration();
    Properties tbl = createPropertiesForHiveAvroForwardEvolvedSchema();
    serDe.initialize(conf, tbl);
    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData, EXPECTED_DESERIALIZED_AVRO_STRING_3);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) AvroTableProperties(org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties) Properties(java.util.Properties) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result)

Example 84 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hive by apache.

the class TestHBaseSerDe method testHBaseSerDeWithHiveMapToHBaseColumnFamilyII.

public void testHBaseSerDeWithHiveMapToHBaseColumnFamilyII() throws SerDeException {
    byte[] cfbyte = "cf-byte".getBytes();
    byte[] cfshort = "cf-short".getBytes();
    byte[] cfint = "cf-int".getBytes();
    byte[] cflong = "cf-long".getBytes();
    byte[] cffloat = "cf-float".getBytes();
    byte[] cfdouble = "cf-double".getBytes();
    byte[] cfstring = "cf-string".getBytes();
    byte[] cfbool = "cf-bool".getBytes();
    byte[][] columnFamilies = new byte[][] { cfbyte, cfshort, cfint, cflong, cffloat, cfdouble, cfstring, cfbool };
    byte[] rowKey = Bytes.toBytes("row-key");
    byte[][] columnQualifiersAndValues = new byte[][] { Bytes.toBytes("123"), Bytes.toBytes("456"), Bytes.toBytes("789"), Bytes.toBytes("1000"), Bytes.toBytes("-0.01"), Bytes.toBytes("5.3"), Bytes.toBytes("Hive"), Bytes.toBytes("true") };
    Put p = new Put(rowKey);
    List<KeyValue> kvs = new ArrayList<KeyValue>();
    for (int j = 0; j < columnQualifiersAndValues.length; j++) {
        kvs.add(new KeyValue(rowKey, columnFamilies[j], columnQualifiersAndValues[j], columnQualifiersAndValues[j]));
        p.add(columnFamilies[j], columnQualifiersAndValues[j], columnQualifiersAndValues[j]);
    }
    Result r = new Result(kvs);
    Object[] expectedData = { new Text("row-key"), new ByteWritable((byte) 123), new ShortWritable((short) 456), new IntWritable(789), new LongWritable(1000), new FloatWritable(-0.01F), new DoubleWritable(5.3), new Text("Hive"), new BooleanWritable(true) };
    HBaseSerDe hbaseSerDe = new HBaseSerDe();
    Configuration conf = new Configuration();
    Properties tbl = createPropertiesForHiveMapHBaseColumnFamilyII_I();
    SerDeUtils.initializeSerDe(hbaseSerDe, conf, tbl, null);
    deserializeAndSerializeHiveMapHBaseColumnFamilyII(hbaseSerDe, r, p, expectedData, columnFamilies, columnQualifiersAndValues);
    hbaseSerDe = new HBaseSerDe();
    conf = new Configuration();
    tbl = createPropertiesForHiveMapHBaseColumnFamilyII_II();
    SerDeUtils.initializeSerDe(hbaseSerDe, conf, tbl, null);
    deserializeAndSerializeHiveMapHBaseColumnFamilyII(hbaseSerDe, r, p, expectedData, columnFamilies, columnQualifiersAndValues);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) AvroTableProperties(org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties) Properties(java.util.Properties) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 85 with KeyValue

use of org.apache.hadoop.hbase.KeyValue in project hive by apache.

the class TestHBaseSerDe method testHBaseSerDeCompositeKeyWithoutSeparator.

public void testHBaseSerDeCompositeKeyWithoutSeparator() throws SerDeException, TException, IOException {
    byte[] cfa = "cola".getBytes();
    byte[] qualStruct = "struct".getBytes();
    TestStruct testStruct = new TestStruct("A", "B", "C", false, (byte) 0);
    byte[] rowKey = testStruct.getBytes();
    // Data
    List<KeyValue> kvs = new ArrayList<KeyValue>();
    byte[] testData = "This is a test data".getBytes();
    kvs.add(new KeyValue(rowKey, cfa, qualStruct, testData));
    Result r = new Result(kvs);
    byte[] putRowKey = testStruct.getBytesWithDelimiters();
    Put p = new Put(putRowKey);
    // Post serialization, separators are automatically inserted between different fields in the
    // struct. Currently there is not way to disable that. So the work around here is to pad the
    // data with the separator bytes before creating a "Put" object
    p.add(new KeyValue(putRowKey, cfa, qualStruct, testData));
    // Create, initialize, and test the SerDe
    HBaseSerDe serDe = new HBaseSerDe();
    Configuration conf = new Configuration();
    Properties tbl = createPropertiesForCompositeKeyWithoutSeparator();
    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
    deserializeAndSerializeHBaseCompositeKey(serDe, r, p);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) AvroTableProperties(org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties) Properties(java.util.Properties) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result)

Aggregations

KeyValue (org.apache.hadoop.hbase.KeyValue)552 Test (org.junit.Test)289 Cell (org.apache.hadoop.hbase.Cell)193 ArrayList (java.util.ArrayList)172 Put (org.apache.hadoop.hbase.client.Put)98 Scan (org.apache.hadoop.hbase.client.Scan)85 Result (org.apache.hadoop.hbase.client.Result)70 Configuration (org.apache.hadoop.conf.Configuration)64 Path (org.apache.hadoop.fs.Path)55 ArrayBackedTag (org.apache.hadoop.hbase.ArrayBackedTag)36 Tag (org.apache.hadoop.hbase.Tag)35 ByteBuffer (java.nio.ByteBuffer)34 List (java.util.List)34 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)34 IOException (java.io.IOException)32 TableName (org.apache.hadoop.hbase.TableName)32 TreeMap (java.util.TreeMap)29 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)28 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)28 WALEdit (org.apache.hadoop.hbase.regionserver.wal.WALEdit)27