Search in sources :

Example 21 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class VerifyFast method verifyDeserializeRead.

public static void verifyDeserializeRead(DeserializeRead deserializeRead, PrimitiveTypeInfo primitiveTypeInfo, Writable writable) throws IOException {
    boolean isNull;
    isNull = !deserializeRead.readNextField();
    if (isNull) {
        if (writable != null) {
            TestCase.fail("Field reports null but object is not null (class " + writable.getClass().getName() + ", " + writable.toString() + ")");
        }
        return;
    } else if (writable == null) {
        TestCase.fail("Field report not null but object is null");
    }
    switch(primitiveTypeInfo.getPrimitiveCategory()) {
        case BOOLEAN:
            {
                boolean value = deserializeRead.currentBoolean;
                if (!(writable instanceof BooleanWritable)) {
                    TestCase.fail("Boolean expected writable not Boolean");
                }
                boolean expected = ((BooleanWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Boolean field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case BYTE:
            {
                byte value = deserializeRead.currentByte;
                if (!(writable instanceof ByteWritable)) {
                    TestCase.fail("Byte expected writable not Byte");
                }
                byte expected = ((ByteWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Byte field mismatch (expected " + (int) expected + " found " + (int) value + ")");
                }
            }
            break;
        case SHORT:
            {
                short value = deserializeRead.currentShort;
                if (!(writable instanceof ShortWritable)) {
                    TestCase.fail("Short expected writable not Short");
                }
                short expected = ((ShortWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Short field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case INT:
            {
                int value = deserializeRead.currentInt;
                if (!(writable instanceof IntWritable)) {
                    TestCase.fail("Integer expected writable not Integer");
                }
                int expected = ((IntWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Int field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case LONG:
            {
                long value = deserializeRead.currentLong;
                if (!(writable instanceof LongWritable)) {
                    TestCase.fail("Long expected writable not Long");
                }
                Long expected = ((LongWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Long field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case FLOAT:
            {
                float value = deserializeRead.currentFloat;
                if (!(writable instanceof FloatWritable)) {
                    TestCase.fail("Float expected writable not Float");
                }
                float expected = ((FloatWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Float field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case DOUBLE:
            {
                double value = deserializeRead.currentDouble;
                if (!(writable instanceof DoubleWritable)) {
                    TestCase.fail("Double expected writable not Double");
                }
                double expected = ((DoubleWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Double field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case STRING:
            {
                byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                Text text = new Text(stringBytes);
                String string = text.toString();
                String expected = ((Text) writable).toString();
                if (!string.equals(expected)) {
                    TestCase.fail("String field mismatch (expected '" + expected + "' found '" + string + "')");
                }
            }
            break;
        case CHAR:
            {
                byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                Text text = new Text(stringBytes);
                String string = text.toString();
                HiveChar hiveChar = new HiveChar(string, ((CharTypeInfo) primitiveTypeInfo).getLength());
                HiveChar expected = ((HiveCharWritable) writable).getHiveChar();
                if (!hiveChar.equals(expected)) {
                    TestCase.fail("Char field mismatch (expected '" + expected + "' found '" + hiveChar + "')");
                }
            }
            break;
        case VARCHAR:
            {
                byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                Text text = new Text(stringBytes);
                String string = text.toString();
                HiveVarchar hiveVarchar = new HiveVarchar(string, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
                HiveVarchar expected = ((HiveVarcharWritable) writable).getHiveVarchar();
                if (!hiveVarchar.equals(expected)) {
                    TestCase.fail("Varchar field mismatch (expected '" + expected + "' found '" + hiveVarchar + "')");
                }
            }
            break;
        case DECIMAL:
            {
                HiveDecimal value = deserializeRead.currentHiveDecimalWritable.getHiveDecimal();
                if (value == null) {
                    TestCase.fail("Decimal field evaluated to NULL");
                }
                HiveDecimal expected = ((HiveDecimalWritable) writable).getHiveDecimal();
                if (!value.equals(expected)) {
                    DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
                    int precision = decimalTypeInfo.getPrecision();
                    int scale = decimalTypeInfo.getScale();
                    TestCase.fail("Decimal field mismatch (expected " + expected.toString() + " found " + value.toString() + ") precision " + precision + ", scale " + scale);
                }
            }
            break;
        case DATE:
            {
                Date value = deserializeRead.currentDateWritable.get();
                Date expected = ((DateWritable) writable).get();
                if (!value.equals(expected)) {
                    TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case TIMESTAMP:
            {
                Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
                Timestamp expected = ((TimestampWritable) writable).getTimestamp();
                if (!value.equals(expected)) {
                    TestCase.fail("Timestamp field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case INTERVAL_YEAR_MONTH:
            {
                HiveIntervalYearMonth value = deserializeRead.currentHiveIntervalYearMonthWritable.getHiveIntervalYearMonth();
                HiveIntervalYearMonth expected = ((HiveIntervalYearMonthWritable) writable).getHiveIntervalYearMonth();
                if (!value.equals(expected)) {
                    TestCase.fail("HiveIntervalYearMonth field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case INTERVAL_DAY_TIME:
            {
                HiveIntervalDayTime value = deserializeRead.currentHiveIntervalDayTimeWritable.getHiveIntervalDayTime();
                HiveIntervalDayTime expected = ((HiveIntervalDayTimeWritable) writable).getHiveIntervalDayTime();
                if (!value.equals(expected)) {
                    TestCase.fail("HiveIntervalDayTime field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case BINARY:
            {
                byte[] byteArray = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                BytesWritable bytesWritable = (BytesWritable) writable;
                byte[] expected = Arrays.copyOfRange(bytesWritable.getBytes(), 0, bytesWritable.getLength());
                if (byteArray.length != expected.length) {
                    TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                }
                for (int b = 0; b < byteArray.length; b++) {
                    if (byteArray[b] != expected[b]) {
                        TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                    }
                }
            }
            break;
        default:
            throw new Error("Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory());
    }
}
Also used : VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) Date(java.sql.Date) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Example 22 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class TestHBaseSerDe method testHBaseSerDeWithHiveMapToHBaseColumnFamilyII.

public void testHBaseSerDeWithHiveMapToHBaseColumnFamilyII() throws SerDeException {
    byte[] cfbyte = "cf-byte".getBytes();
    byte[] cfshort = "cf-short".getBytes();
    byte[] cfint = "cf-int".getBytes();
    byte[] cflong = "cf-long".getBytes();
    byte[] cffloat = "cf-float".getBytes();
    byte[] cfdouble = "cf-double".getBytes();
    byte[] cfstring = "cf-string".getBytes();
    byte[] cfbool = "cf-bool".getBytes();
    byte[][] columnFamilies = new byte[][] { cfbyte, cfshort, cfint, cflong, cffloat, cfdouble, cfstring, cfbool };
    byte[] rowKey = Bytes.toBytes("row-key");
    byte[][] columnQualifiersAndValues = new byte[][] { Bytes.toBytes("123"), Bytes.toBytes("456"), Bytes.toBytes("789"), Bytes.toBytes("1000"), Bytes.toBytes("-0.01"), Bytes.toBytes("5.3"), Bytes.toBytes("Hive"), Bytes.toBytes("true") };
    Put p = new Put(rowKey);
    List<KeyValue> kvs = new ArrayList<KeyValue>();
    for (int j = 0; j < columnQualifiersAndValues.length; j++) {
        kvs.add(new KeyValue(rowKey, columnFamilies[j], columnQualifiersAndValues[j], columnQualifiersAndValues[j]));
        p.add(columnFamilies[j], columnQualifiersAndValues[j], columnQualifiersAndValues[j]);
    }
    Result r = new Result(kvs);
    Object[] expectedData = { new Text("row-key"), new ByteWritable((byte) 123), new ShortWritable((short) 456), new IntWritable(789), new LongWritable(1000), new FloatWritable(-0.01F), new DoubleWritable(5.3), new Text("Hive"), new BooleanWritable(true) };
    HBaseSerDe hbaseSerDe = new HBaseSerDe();
    Configuration conf = new Configuration();
    Properties tbl = createPropertiesForHiveMapHBaseColumnFamilyII_I();
    SerDeUtils.initializeSerDe(hbaseSerDe, conf, tbl, null);
    deserializeAndSerializeHiveMapHBaseColumnFamilyII(hbaseSerDe, r, p, expectedData, columnFamilies, columnQualifiersAndValues);
    hbaseSerDe = new HBaseSerDe();
    conf = new Configuration();
    tbl = createPropertiesForHiveMapHBaseColumnFamilyII_II();
    SerDeUtils.initializeSerDe(hbaseSerDe, conf, tbl, null);
    deserializeAndSerializeHiveMapHBaseColumnFamilyII(hbaseSerDe, r, p, expectedData, columnFamilies, columnQualifiersAndValues);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) AvroTableProperties(org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties) Properties(java.util.Properties) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 23 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class TestHBaseSerDe method testHBaseSerDeWithHiveMapToHBaseColumnFamily.

public void testHBaseSerDeWithHiveMapToHBaseColumnFamily() throws SerDeException {
    byte[] cfint = "cf-int".getBytes();
    byte[] cfbyte = "cf-byte".getBytes();
    byte[] cfshort = "cf-short".getBytes();
    byte[] cflong = "cf-long".getBytes();
    byte[] cffloat = "cf-float".getBytes();
    byte[] cfdouble = "cf-double".getBytes();
    byte[] cfbool = "cf-bool".getBytes();
    byte[][] columnFamilies = new byte[][] { cfint, cfbyte, cfshort, cflong, cffloat, cfdouble, cfbool };
    byte[][] rowKeys = new byte[][] { Integer.toString(1).getBytes(), Integer.toString(Integer.MIN_VALUE).getBytes(), Integer.toString(Integer.MAX_VALUE).getBytes() };
    byte[][][] columnQualifiersAndValues = new byte[][][] { { Bytes.toBytes(1), new byte[] { 1 }, Bytes.toBytes((short) 1), Bytes.toBytes((long) 1), Bytes.toBytes((float) 1.0F), Bytes.toBytes(1.0), Bytes.toBytes(true) }, { Bytes.toBytes(Integer.MIN_VALUE), new byte[] { Byte.MIN_VALUE }, Bytes.toBytes((short) Short.MIN_VALUE), Bytes.toBytes((long) Long.MIN_VALUE), Bytes.toBytes((float) Float.MIN_VALUE), Bytes.toBytes(Double.MIN_VALUE), Bytes.toBytes(false) }, { Bytes.toBytes(Integer.MAX_VALUE), new byte[] { Byte.MAX_VALUE }, Bytes.toBytes((short) Short.MAX_VALUE), Bytes.toBytes((long) Long.MAX_VALUE), Bytes.toBytes((float) Float.MAX_VALUE), Bytes.toBytes(Double.MAX_VALUE), Bytes.toBytes(true) } };
    List<KeyValue> kvs = new ArrayList<KeyValue>();
    Result[] r = new Result[] { null, null, null };
    Put[] p = new Put[] { null, null, null };
    for (int i = 0; i < r.length; i++) {
        kvs.clear();
        p[i] = new Put(rowKeys[i]);
        for (int j = 0; j < columnQualifiersAndValues[i].length; j++) {
            kvs.add(new KeyValue(rowKeys[i], columnFamilies[j], columnQualifiersAndValues[i][j], columnQualifiersAndValues[i][j]));
            p[i].add(columnFamilies[j], columnQualifiersAndValues[i][j], columnQualifiersAndValues[i][j]);
        }
        r[i] = new Result(kvs);
    }
    Object[][] expectedData = { { new Text(Integer.toString(1)), new IntWritable(1), new ByteWritable((byte) 1), new ShortWritable((short) 1), new LongWritable(1), new FloatWritable(1.0F), new DoubleWritable(1.0), new BooleanWritable(true) }, { new Text(Integer.toString(Integer.MIN_VALUE)), new IntWritable(Integer.MIN_VALUE), new ByteWritable(Byte.MIN_VALUE), new ShortWritable(Short.MIN_VALUE), new LongWritable(Long.MIN_VALUE), new FloatWritable(Float.MIN_VALUE), new DoubleWritable(Double.MIN_VALUE), new BooleanWritable(false) }, { new Text(Integer.toString(Integer.MAX_VALUE)), new IntWritable(Integer.MAX_VALUE), new ByteWritable(Byte.MAX_VALUE), new ShortWritable(Short.MAX_VALUE), new LongWritable(Long.MAX_VALUE), new FloatWritable(Float.MAX_VALUE), new DoubleWritable(Double.MAX_VALUE), new BooleanWritable(true) } };
    HBaseSerDe hbaseSerDe = new HBaseSerDe();
    Configuration conf = new Configuration();
    Properties tbl = createPropertiesForHiveMapHBaseColumnFamily();
    SerDeUtils.initializeSerDe(hbaseSerDe, conf, tbl, null);
    deserializeAndSerializeHiveMapHBaseColumnFamily(hbaseSerDe, r, p, expectedData, rowKeys, columnFamilies, columnQualifiersAndValues);
    hbaseSerDe = new HBaseSerDe();
    conf = new Configuration();
    tbl = createPropertiesForHiveMapHBaseColumnFamilyII();
    SerDeUtils.initializeSerDe(hbaseSerDe, conf, tbl, null);
    deserializeAndSerializeHiveMapHBaseColumnFamily(hbaseSerDe, r, p, expectedData, rowKeys, columnFamilies, columnQualifiersAndValues);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) AvroTableProperties(org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties) Properties(java.util.Properties) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 24 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class TestHBaseSerDe method testHBaseSerDeI.

/**
   * Test the default behavior of the Lazy family of objects and object inspectors.
   */
public void testHBaseSerDeI() throws SerDeException {
    byte[] cfa = "cola".getBytes();
    byte[] cfb = "colb".getBytes();
    byte[] cfc = "colc".getBytes();
    byte[] qualByte = "byte".getBytes();
    byte[] qualShort = "short".getBytes();
    byte[] qualInt = "int".getBytes();
    byte[] qualLong = "long".getBytes();
    byte[] qualFloat = "float".getBytes();
    byte[] qualDouble = "double".getBytes();
    byte[] qualString = "string".getBytes();
    byte[] qualBool = "boolean".getBytes();
    byte[] rowKey = Bytes.toBytes("test-row1");
    // Data
    List<KeyValue> kvs = new ArrayList<KeyValue>();
    kvs.add(new KeyValue(rowKey, cfa, qualByte, Bytes.toBytes("123")));
    kvs.add(new KeyValue(rowKey, cfb, qualShort, Bytes.toBytes("456")));
    kvs.add(new KeyValue(rowKey, cfc, qualInt, Bytes.toBytes("789")));
    kvs.add(new KeyValue(rowKey, cfa, qualLong, Bytes.toBytes("1000")));
    kvs.add(new KeyValue(rowKey, cfb, qualFloat, Bytes.toBytes("-0.01")));
    kvs.add(new KeyValue(rowKey, cfc, qualDouble, Bytes.toBytes("5.3")));
    kvs.add(new KeyValue(rowKey, cfa, qualString, Bytes.toBytes("Hadoop, HBase, and Hive")));
    kvs.add(new KeyValue(rowKey, cfb, qualBool, Bytes.toBytes("true")));
    Collections.sort(kvs, KeyValue.COMPARATOR);
    Result r = new Result(kvs);
    Put p = new Put(rowKey);
    p.add(cfa, qualByte, Bytes.toBytes("123"));
    p.add(cfb, qualShort, Bytes.toBytes("456"));
    p.add(cfc, qualInt, Bytes.toBytes("789"));
    p.add(cfa, qualLong, Bytes.toBytes("1000"));
    p.add(cfb, qualFloat, Bytes.toBytes("-0.01"));
    p.add(cfc, qualDouble, Bytes.toBytes("5.3"));
    p.add(cfa, qualString, Bytes.toBytes("Hadoop, HBase, and Hive"));
    p.add(cfb, qualBool, Bytes.toBytes("true"));
    Object[] expectedFieldsData = { new Text("test-row1"), new ByteWritable((byte) 123), new ShortWritable((short) 456), new IntWritable(789), new LongWritable(1000), new FloatWritable(-0.01F), new DoubleWritable(5.3), new Text("Hadoop, HBase, and Hive"), new BooleanWritable(true) };
    // Create, initialize, and test the SerDe
    HBaseSerDe serDe = new HBaseSerDe();
    Configuration conf = new Configuration();
    Properties tbl = createPropertiesI_I();
    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
    deserializeAndSerialize(serDe, r, p, expectedFieldsData);
    serDe = new HBaseSerDe();
    conf = new Configuration();
    tbl = createPropertiesI_II();
    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
    deserializeAndSerialize(serDe, r, p, expectedFieldsData);
    serDe = new HBaseSerDe();
    conf = new Configuration();
    tbl = createPropertiesI_III();
    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
    deserializeAndSerialize(serDe, r, p, expectedFieldsData);
    serDe = new HBaseSerDe();
    conf = new Configuration();
    tbl = createPropertiesI_IV();
    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
    deserializeAndSerialize(serDe, r, p, expectedFieldsData);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) AvroTableProperties(org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties) Properties(java.util.Properties) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 25 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class TestLazyHBaseObject method testLazyHBaseRow3.

/**
   * Test the LazyHBaseRow class with a one-to-one/onto mapping between Hive columns and
   * HBase column family/column qualifier pairs. The column types are primitive and fields
   * are stored in binary format in HBase.
   * @throws SerDeException
   */
public void testLazyHBaseRow3() throws SerDeException {
    List<TypeInfo> fieldTypeInfos = TypeInfoUtils.getTypeInfosFromTypeString("string,int,tinyint,smallint,bigint,float,double,string,boolean");
    List<String> fieldNames = Arrays.asList(new String[] { "key", "c_int", "c_byte", "c_short", "c_long", "c_float", "c_double", "c_string", "c_bool" });
    Text nullSequence = new Text("\\N");
    String hbaseColumnsMapping = ":key#str,cf-int:cq-int#bin,cf-byte:cq-byte#bin," + "cf-short:cq-short#bin,cf-long:cq-long#bin,cf-float:cq-float#bin,cf-double:cq-double#bin," + "cf-string:cq-string#str,cf-bool:cq-bool#bin";
    ColumnMappings columnMappings = null;
    try {
        columnMappings = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping);
    } catch (SerDeException e) {
        fail(e.toString());
    }
    ColumnMapping[] columnsMapping = columnMappings.getColumnsMapping();
    for (int i = 0; i < columnsMapping.length; i++) {
        ColumnMapping colMap = columnsMapping[i];
        if (i == 0 || i == 7) {
            colMap.binaryStorage.add(false);
        } else {
            colMap.binaryStorage.add(true);
        }
    }
    ObjectInspector oi = LazyFactory.createLazyStructInspector(fieldNames, fieldTypeInfos, new byte[] { ' ', ':', '=' }, nullSequence, false, false, (byte) 0);
    LazyHBaseRow o = new LazyHBaseRow((LazySimpleStructObjectInspector) oi, columnMappings);
    byte[] rowKey = "row-key".getBytes();
    List<KeyValue> kvs = new ArrayList<KeyValue>();
    byte[] value;
    for (int i = 1; i < columnsMapping.length; i++) {
        switch(i) {
            case 1:
                value = Bytes.toBytes(1);
                break;
            case 2:
                value = new byte[] { (byte) 1 };
                break;
            case 3:
                value = Bytes.toBytes((short) 1);
                break;
            case 4:
                value = Bytes.toBytes((long) 1);
                break;
            case 5:
                value = Bytes.toBytes((float) 1.0F);
                break;
            case 6:
                value = Bytes.toBytes((double) 1.0);
                break;
            case 7:
                value = "Hadoop, Hive, with HBase storage handler.".getBytes();
                break;
            case 8:
                value = Bytes.toBytes(true);
                break;
            default:
                throw new RuntimeException("Not expected: " + i);
        }
        ColumnMapping colMap = columnsMapping[i];
        kvs.add(new KeyValue(rowKey, colMap.familyNameBytes, colMap.qualifierNameBytes, value));
    }
    Collections.sort(kvs, KeyValue.COMPARATOR);
    Result result = new Result(kvs);
    o.init(result);
    List<? extends StructField> fieldRefs = ((StructObjectInspector) oi).getAllStructFieldRefs();
    for (int i = 0; i < fieldRefs.size(); i++) {
        Object fieldData = ((StructObjectInspector) oi).getStructFieldData(o, fieldRefs.get(i));
        assert (fieldData != null);
        assert (fieldData instanceof LazyPrimitive<?, ?>);
        Writable writable = ((LazyPrimitive<?, ?>) fieldData).getWritableObject();
        switch(i) {
            case 0:
                Text text = new Text("row-key");
                assertEquals(text, writable);
                break;
            case 1:
                IntWritable iw = new IntWritable(1);
                assertEquals(iw, writable);
                break;
            case 2:
                ByteWritable bw = new ByteWritable((byte) 1);
                assertEquals(bw, writable);
                break;
            case 3:
                ShortWritable sw = new ShortWritable((short) 1);
                assertEquals(sw, writable);
                break;
            case 4:
                LongWritable lw = new LongWritable(1);
                assertEquals(lw, writable);
                break;
            case 5:
                FloatWritable fw = new FloatWritable(1.0F);
                assertEquals(fw, writable);
                break;
            case 6:
                DoubleWritable dw = new DoubleWritable(1.0);
                assertEquals(dw, writable);
                break;
            case 7:
                Text t = new Text("Hadoop, Hive, with HBase storage handler.");
                assertEquals(t, writable);
                break;
            case 8:
                BooleanWritable boolWritable = new BooleanWritable(true);
                assertEquals(boolWritable, writable);
                break;
            default:
                fail("Error: Unanticipated value in deserializing fields for HBaseSerDe.");
                break;
        }
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) LazyString(org.apache.hadoop.hive.serde2.lazy.LazyString) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) LazyPrimitive(org.apache.hadoop.hive.serde2.lazy.LazyPrimitive) Result(org.apache.hadoop.hbase.client.Result) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) ColumnMapping(org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping) IntWritable(org.apache.hadoop.io.IntWritable) LazySimpleStructObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) LazyMapObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) Text(org.apache.hadoop.io.Text) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) LazySimpleStructObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Aggregations

BooleanWritable (org.apache.hadoop.io.BooleanWritable)63 IntWritable (org.apache.hadoop.io.IntWritable)41 LongWritable (org.apache.hadoop.io.LongWritable)40 FloatWritable (org.apache.hadoop.io.FloatWritable)37 Text (org.apache.hadoop.io.Text)31 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)27 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)26 BytesWritable (org.apache.hadoop.io.BytesWritable)26 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)25 Writable (org.apache.hadoop.io.Writable)17 Test (org.junit.Test)17 ArrayList (java.util.ArrayList)15 Configuration (org.apache.hadoop.conf.Configuration)12 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)12 Random (java.util.Random)11 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)10 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)9 KeyValue (org.apache.hadoop.hbase.KeyValue)7 Result (org.apache.hadoop.hbase.client.Result)7 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)7