use of org.apache.hadoop.hbase.KeyValue in project hive by apache.
the class PutWritable method write.
@Override
public void write(final DataOutput out) throws IOException {
ProtobufUtil.toMutationNoData(MutationType.PUT, put).writeDelimitedTo(DataOutputOutputStream.from(out));
out.writeInt(put.size());
CellScanner scanner = put.cellScanner();
while (scanner.advance()) {
KeyValue kv = KeyValueUtil.ensureKeyValue(scanner.current());
KeyValue.write(kv, out);
}
}
use of org.apache.hadoop.hbase.KeyValue in project hive by apache.
the class ResultWritable method write.
@Override
public void write(final DataOutput out) throws IOException {
ProtobufUtil.toResultNoData(result).writeDelimitedTo(DataOutputOutputStream.from(out));
out.writeInt(result.size());
for (KeyValue kv : result.list()) {
KeyValue.write(kv, out);
}
}
use of org.apache.hadoop.hbase.KeyValue in project hive by apache.
the class TestHBaseSerDe method testHBaseSerDeWithForwardEvolvedSchema.
public void testHBaseSerDeWithForwardEvolvedSchema() throws SerDeException, IOException {
byte[] cfa = "cola".getBytes();
byte[] qualAvro = "avro".getBytes();
byte[] rowKey = Bytes.toBytes("test-row1");
// Data
List<KeyValue> kvs = new ArrayList<KeyValue>();
byte[] avroData = getTestAvroBytesFromSchema(RECORD_SCHEMA);
kvs.add(new KeyValue(rowKey, cfa, qualAvro, avroData));
Result r = new Result(kvs);
Put p = new Put(rowKey);
// Post serialization, separators are automatically inserted between different fields in the
// struct. Currently there is not way to disable that. So the work around here is to pad the
// data with the separator bytes before creating a "Put" object
p.add(new KeyValue(rowKey, cfa, qualAvro, avroData));
Object[] expectedFieldsData = { new String("test-row1"), new String("[[42, test, true, 42432234234]]") };
// Create, initialize, and test the SerDe
HBaseSerDe serDe = new HBaseSerDe();
Configuration conf = new Configuration();
Properties tbl = createPropertiesForHiveAvroForwardEvolvedSchema();
serDe.initialize(conf, tbl);
deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData, EXPECTED_DESERIALIZED_AVRO_STRING_3);
}
use of org.apache.hadoop.hbase.KeyValue in project hive by apache.
the class TestHBaseSerDe method testHBaseSerDeWithHiveMapToHBaseColumnFamilyII.
public void testHBaseSerDeWithHiveMapToHBaseColumnFamilyII() throws SerDeException {
byte[] cfbyte = "cf-byte".getBytes();
byte[] cfshort = "cf-short".getBytes();
byte[] cfint = "cf-int".getBytes();
byte[] cflong = "cf-long".getBytes();
byte[] cffloat = "cf-float".getBytes();
byte[] cfdouble = "cf-double".getBytes();
byte[] cfstring = "cf-string".getBytes();
byte[] cfbool = "cf-bool".getBytes();
byte[][] columnFamilies = new byte[][] { cfbyte, cfshort, cfint, cflong, cffloat, cfdouble, cfstring, cfbool };
byte[] rowKey = Bytes.toBytes("row-key");
byte[][] columnQualifiersAndValues = new byte[][] { Bytes.toBytes("123"), Bytes.toBytes("456"), Bytes.toBytes("789"), Bytes.toBytes("1000"), Bytes.toBytes("-0.01"), Bytes.toBytes("5.3"), Bytes.toBytes("Hive"), Bytes.toBytes("true") };
Put p = new Put(rowKey);
List<KeyValue> kvs = new ArrayList<KeyValue>();
for (int j = 0; j < columnQualifiersAndValues.length; j++) {
kvs.add(new KeyValue(rowKey, columnFamilies[j], columnQualifiersAndValues[j], columnQualifiersAndValues[j]));
p.add(columnFamilies[j], columnQualifiersAndValues[j], columnQualifiersAndValues[j]);
}
Result r = new Result(kvs);
Object[] expectedData = { new Text("row-key"), new ByteWritable((byte) 123), new ShortWritable((short) 456), new IntWritable(789), new LongWritable(1000), new FloatWritable(-0.01F), new DoubleWritable(5.3), new Text("Hive"), new BooleanWritable(true) };
HBaseSerDe hbaseSerDe = new HBaseSerDe();
Configuration conf = new Configuration();
Properties tbl = createPropertiesForHiveMapHBaseColumnFamilyII_I();
SerDeUtils.initializeSerDe(hbaseSerDe, conf, tbl, null);
deserializeAndSerializeHiveMapHBaseColumnFamilyII(hbaseSerDe, r, p, expectedData, columnFamilies, columnQualifiersAndValues);
hbaseSerDe = new HBaseSerDe();
conf = new Configuration();
tbl = createPropertiesForHiveMapHBaseColumnFamilyII_II();
SerDeUtils.initializeSerDe(hbaseSerDe, conf, tbl, null);
deserializeAndSerializeHiveMapHBaseColumnFamilyII(hbaseSerDe, r, p, expectedData, columnFamilies, columnQualifiersAndValues);
}
use of org.apache.hadoop.hbase.KeyValue in project hive by apache.
the class TestHBaseSerDe method testHBaseSerDeCompositeKeyWithoutSeparator.
public void testHBaseSerDeCompositeKeyWithoutSeparator() throws SerDeException, TException, IOException {
byte[] cfa = "cola".getBytes();
byte[] qualStruct = "struct".getBytes();
TestStruct testStruct = new TestStruct("A", "B", "C", false, (byte) 0);
byte[] rowKey = testStruct.getBytes();
// Data
List<KeyValue> kvs = new ArrayList<KeyValue>();
byte[] testData = "This is a test data".getBytes();
kvs.add(new KeyValue(rowKey, cfa, qualStruct, testData));
Result r = new Result(kvs);
byte[] putRowKey = testStruct.getBytesWithDelimiters();
Put p = new Put(putRowKey);
// Post serialization, separators are automatically inserted between different fields in the
// struct. Currently there is not way to disable that. So the work around here is to pad the
// data with the separator bytes before creating a "Put" object
p.add(new KeyValue(putRowKey, cfa, qualStruct, testData));
// Create, initialize, and test the SerDe
HBaseSerDe serDe = new HBaseSerDe();
Configuration conf = new Configuration();
Properties tbl = createPropertiesForCompositeKeyWithoutSeparator();
SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
deserializeAndSerializeHBaseCompositeKey(serDe, r, p);
}
Aggregations