Search in sources :

Example 46 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class TypedBytesRecordReader method write.

private void write(int pos, Writable inpw) throws IOException {
    String typ = columnTypes.get(pos);
    Writable w = (Writable) converters.get(pos).convert(inpw);
    if (typ.equalsIgnoreCase(serdeConstants.BOOLEAN_TYPE_NAME)) {
        tbOut.writeBoolean((BooleanWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.TINYINT_TYPE_NAME)) {
        tbOut.writeByte((ByteWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.SMALLINT_TYPE_NAME)) {
        tbOut.writeShort((ShortWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.INT_TYPE_NAME)) {
        tbOut.writeInt((IntWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.BIGINT_TYPE_NAME)) {
        tbOut.writeLong((LongWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.FLOAT_TYPE_NAME)) {
        tbOut.writeFloat((FloatWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.DOUBLE_TYPE_NAME)) {
        tbOut.writeDouble((DoubleWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME)) {
        tbOut.writeText((Text) w);
    } else {
        assert false;
    }
}
Also used : FloatWritable(org.apache.hadoop.io.FloatWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) Text(org.apache.hadoop.io.Text) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 47 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class TestLazyHBaseObject method testLazyHBaseCellMap3.

/**
   * Test the LazyHBaseCellMap class for the case where both the key and the value in the family
   * map are stored in binary format using the appropriate LazyPrimitive objects.
   * @throws SerDeException
   */
public void testLazyHBaseCellMap3() throws SerDeException {
    Text nullSequence = new Text("\\N");
    TypeInfo mapBinaryIntKeyValue = TypeInfoUtils.getTypeInfoFromTypeString("map<int,int>");
    ObjectInspector oi = LazyFactory.createLazyObjectInspector(mapBinaryIntKeyValue, new byte[] { (byte) 1, (byte) 2 }, 0, nullSequence, false, (byte) 0);
    LazyHBaseCellMap hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
    List<KeyValue> kvs = new ArrayList<KeyValue>();
    byte[] rowKey = "row-key".getBytes();
    byte[] cfInt = "cf-int".getBytes();
    kvs.add(new KeyValue(rowKey, cfInt, Bytes.toBytes(1), Bytes.toBytes(1)));
    Result result = new Result(kvs);
    List<Boolean> mapBinaryStorage = new ArrayList<Boolean>();
    mapBinaryStorage.add(true);
    mapBinaryStorage.add(true);
    hbaseCellMap.init(result, cfInt, mapBinaryStorage);
    IntWritable expectedIntValue = new IntWritable(1);
    LazyPrimitive<?, ?> lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedIntValue);
    assertEquals(expectedIntValue, lazyPrimitive.getWritableObject());
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfInt, Bytes.toBytes(Integer.MIN_VALUE), Bytes.toBytes(Integer.MIN_VALUE)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfInt, mapBinaryStorage);
    expectedIntValue = new IntWritable(Integer.MIN_VALUE);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedIntValue);
    assertEquals(expectedIntValue, lazyPrimitive.getWritableObject());
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfInt, Bytes.toBytes(Integer.MAX_VALUE), Bytes.toBytes(Integer.MAX_VALUE)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfInt, mapBinaryStorage);
    expectedIntValue = new IntWritable(Integer.MAX_VALUE);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedIntValue);
    assertEquals(expectedIntValue, lazyPrimitive.getWritableObject());
    TypeInfo mapBinaryByteKeyValue = TypeInfoUtils.getTypeInfoFromTypeString("map<tinyint,tinyint>");
    oi = LazyFactory.createLazyObjectInspector(mapBinaryByteKeyValue, new byte[] { (byte) 1, (byte) 2 }, 0, nullSequence, false, (byte) 0);
    hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
    byte[] cfByte = "cf-byte".getBytes();
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfByte, new byte[] { (byte) 1 }, new byte[] { (byte) 1 }));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfByte, mapBinaryStorage);
    ByteWritable expectedByteValue = new ByteWritable((byte) 1);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedByteValue);
    assertEquals(expectedByteValue, lazyPrimitive.getWritableObject());
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfByte, new byte[] { Byte.MIN_VALUE }, new byte[] { Byte.MIN_VALUE }));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfByte, mapBinaryStorage);
    expectedByteValue = new ByteWritable(Byte.MIN_VALUE);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedByteValue);
    assertEquals(expectedByteValue, lazyPrimitive.getWritableObject());
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfByte, new byte[] { Byte.MAX_VALUE }, new byte[] { Byte.MAX_VALUE }));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfByte, mapBinaryStorage);
    expectedByteValue = new ByteWritable(Byte.MAX_VALUE);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedByteValue);
    assertEquals(expectedByteValue, lazyPrimitive.getWritableObject());
    TypeInfo mapBinaryShortKeyValue = TypeInfoUtils.getTypeInfoFromTypeString("map<smallint,smallint>");
    oi = LazyFactory.createLazyObjectInspector(mapBinaryShortKeyValue, new byte[] { (byte) 1, (byte) 2 }, 0, nullSequence, false, (byte) 0);
    hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
    byte[] cfShort = "cf-short".getBytes();
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfShort, Bytes.toBytes((short) 1), Bytes.toBytes((short) 1)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfShort, mapBinaryStorage);
    ShortWritable expectedShortValue = new ShortWritable((short) 1);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedShortValue);
    assertEquals(expectedShortValue, lazyPrimitive.getWritableObject());
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfShort, Bytes.toBytes(Short.MIN_VALUE), Bytes.toBytes(Short.MIN_VALUE)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfShort, mapBinaryStorage);
    expectedShortValue = new ShortWritable(Short.MIN_VALUE);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedShortValue);
    assertEquals(expectedShortValue, lazyPrimitive.getWritableObject());
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfShort, Bytes.toBytes(Short.MAX_VALUE), Bytes.toBytes(Short.MAX_VALUE)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfShort, mapBinaryStorage);
    expectedShortValue = new ShortWritable(Short.MAX_VALUE);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedShortValue);
    assertEquals(expectedShortValue, lazyPrimitive.getWritableObject());
    TypeInfo mapBinaryLongKeyValue = TypeInfoUtils.getTypeInfoFromTypeString("map<bigint,bigint>");
    oi = LazyFactory.createLazyObjectInspector(mapBinaryLongKeyValue, new byte[] { (byte) 1, (byte) 2 }, 0, nullSequence, false, (byte) 0);
    hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
    byte[] cfLong = "cf-long".getBytes();
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfLong, Bytes.toBytes((long) 1), Bytes.toBytes((long) 1)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfLong, mapBinaryStorage);
    LongWritable expectedLongValue = new LongWritable(1);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedLongValue);
    assertEquals(expectedLongValue, lazyPrimitive.getWritableObject());
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfLong, Bytes.toBytes(Long.MIN_VALUE), Bytes.toBytes(Long.MIN_VALUE)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfLong, mapBinaryStorage);
    expectedLongValue = new LongWritable(Long.MIN_VALUE);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedLongValue);
    assertEquals(expectedLongValue, lazyPrimitive.getWritableObject());
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfLong, Bytes.toBytes(Long.MAX_VALUE), Bytes.toBytes(Long.MAX_VALUE)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfLong, mapBinaryStorage);
    expectedLongValue = new LongWritable(Long.MAX_VALUE);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedLongValue);
    assertEquals(expectedLongValue, lazyPrimitive.getWritableObject());
    TypeInfo mapBinaryFloatKeyValue = TypeInfoUtils.getTypeInfoFromTypeString("map<float,float>");
    oi = LazyFactory.createLazyObjectInspector(mapBinaryFloatKeyValue, new byte[] { (byte) 1, (byte) 2 }, 0, nullSequence, false, (byte) 0);
    hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
    byte[] cfFloat = "cf-float".getBytes();
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes((float) 1.0F), Bytes.toBytes((float) 1.0F)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfFloat, mapBinaryStorage);
    FloatWritable expectedFloatValue = new FloatWritable(1.0F);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedFloatValue);
    assertEquals(expectedFloatValue, lazyPrimitive.getWritableObject());
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes((float) Float.MIN_VALUE), Bytes.toBytes((float) Float.MIN_VALUE)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfFloat, mapBinaryStorage);
    expectedFloatValue = new FloatWritable(Float.MIN_VALUE);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedFloatValue);
    assertEquals(expectedFloatValue, lazyPrimitive.getWritableObject());
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes((float) Float.MAX_VALUE), Bytes.toBytes((float) Float.MAX_VALUE)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfFloat, mapBinaryStorage);
    expectedFloatValue = new FloatWritable(Float.MAX_VALUE);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedFloatValue);
    assertEquals(expectedFloatValue, lazyPrimitive.getWritableObject());
    TypeInfo mapBinaryDoubleKeyValue = TypeInfoUtils.getTypeInfoFromTypeString("map<double,double>");
    oi = LazyFactory.createLazyObjectInspector(mapBinaryDoubleKeyValue, new byte[] { (byte) 1, (byte) 2 }, 0, nullSequence, false, (byte) 0);
    hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
    byte[] cfDouble = "cf-double".getBytes();
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfDouble, Bytes.toBytes(1.0), Bytes.toBytes(1.0)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfDouble, mapBinaryStorage);
    DoubleWritable expectedDoubleValue = new DoubleWritable(1.0);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedDoubleValue);
    assertEquals(expectedDoubleValue, lazyPrimitive.getWritableObject());
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfDouble, Bytes.toBytes(Double.MIN_VALUE), Bytes.toBytes(Double.MIN_VALUE)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfDouble, mapBinaryStorage);
    expectedDoubleValue = new DoubleWritable(Double.MIN_VALUE);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedDoubleValue);
    assertEquals(expectedDoubleValue, lazyPrimitive.getWritableObject());
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfDouble, Bytes.toBytes(Double.MAX_VALUE), Bytes.toBytes(Double.MAX_VALUE)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfDouble, mapBinaryStorage);
    expectedDoubleValue = new DoubleWritable(Double.MAX_VALUE);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedDoubleValue);
    assertEquals(expectedDoubleValue, lazyPrimitive.getWritableObject());
    TypeInfo mapBinaryBooleanKeyValue = TypeInfoUtils.getTypeInfoFromTypeString("map<boolean,boolean>");
    oi = LazyFactory.createLazyObjectInspector(mapBinaryBooleanKeyValue, new byte[] { (byte) 1, (byte) 2 }, 0, nullSequence, false, (byte) 0);
    hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
    byte[] cfBoolean = "cf-boolean".getBytes();
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfBoolean, Bytes.toBytes(false), Bytes.toBytes(false)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfBoolean, mapBinaryStorage);
    BooleanWritable expectedBooleanValue = new BooleanWritable(false);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedBooleanValue);
    assertEquals(expectedBooleanValue, lazyPrimitive.getWritableObject());
    kvs.clear();
    kvs.add(new KeyValue(rowKey, cfBoolean, Bytes.toBytes(true), Bytes.toBytes(true)));
    result = new Result(kvs);
    hbaseCellMap.init(result, cfBoolean, mapBinaryStorage);
    expectedBooleanValue = new BooleanWritable(true);
    lazyPrimitive = (LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedBooleanValue);
    assertEquals(expectedBooleanValue, lazyPrimitive.getWritableObject());
}
Also used : LazySimpleStructObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) LazyMapObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) Text(org.apache.hadoop.io.Text) LazyMapObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) LazyPrimitive(org.apache.hadoop.hive.serde2.lazy.LazyPrimitive) Result(org.apache.hadoop.hbase.client.Result) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 48 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class TestHBaseSerDe method testHBaseSerDeWithTimestamp.

public void testHBaseSerDeWithTimestamp() throws SerDeException {
    // Create the SerDe
    HBaseSerDe serDe = new HBaseSerDe();
    Configuration conf = new Configuration();
    Properties tbl = createPropertiesI_I();
    long putTimestamp = 1;
    tbl.setProperty(HBaseSerDe.HBASE_PUT_TIMESTAMP, Long.toString(putTimestamp));
    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
    byte[] cfa = "cola".getBytes();
    byte[] cfb = "colb".getBytes();
    byte[] cfc = "colc".getBytes();
    byte[] qualByte = "byte".getBytes();
    byte[] qualShort = "short".getBytes();
    byte[] qualInt = "int".getBytes();
    byte[] qualLong = "long".getBytes();
    byte[] qualFloat = "float".getBytes();
    byte[] qualDouble = "double".getBytes();
    byte[] qualString = "string".getBytes();
    byte[] qualBool = "boolean".getBytes();
    byte[] rowKey = Bytes.toBytes("test-row1");
    // Data
    List<KeyValue> kvs = new ArrayList<KeyValue>();
    kvs.add(new KeyValue(rowKey, cfa, qualByte, Bytes.toBytes("123")));
    kvs.add(new KeyValue(rowKey, cfb, qualShort, Bytes.toBytes("456")));
    kvs.add(new KeyValue(rowKey, cfc, qualInt, Bytes.toBytes("789")));
    kvs.add(new KeyValue(rowKey, cfa, qualLong, Bytes.toBytes("1000")));
    kvs.add(new KeyValue(rowKey, cfb, qualFloat, Bytes.toBytes("-0.01")));
    kvs.add(new KeyValue(rowKey, cfc, qualDouble, Bytes.toBytes("5.3")));
    kvs.add(new KeyValue(rowKey, cfa, qualString, Bytes.toBytes("Hadoop, HBase, and Hive")));
    kvs.add(new KeyValue(rowKey, cfb, qualBool, Bytes.toBytes("true")));
    Collections.sort(kvs, KeyValue.COMPARATOR);
    Result r = new Result(kvs);
    Put p = new Put(rowKey, putTimestamp);
    p.add(cfa, qualByte, Bytes.toBytes("123"));
    p.add(cfb, qualShort, Bytes.toBytes("456"));
    p.add(cfc, qualInt, Bytes.toBytes("789"));
    p.add(cfa, qualLong, Bytes.toBytes("1000"));
    p.add(cfb, qualFloat, Bytes.toBytes("-0.01"));
    p.add(cfc, qualDouble, Bytes.toBytes("5.3"));
    p.add(cfa, qualString, Bytes.toBytes("Hadoop, HBase, and Hive"));
    p.add(cfb, qualBool, Bytes.toBytes("true"));
    Object[] expectedFieldsData = { new Text("test-row1"), new ByteWritable((byte) 123), new ShortWritable((short) 456), new IntWritable(789), new LongWritable(1000), new FloatWritable(-0.01F), new DoubleWritable(5.3), new Text("Hadoop, HBase, and Hive"), new BooleanWritable(true) };
    deserializeAndSerialize(serDe, r, p, expectedFieldsData);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) AvroTableProperties(org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties) Properties(java.util.Properties) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 49 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class TestHBaseSerDe method testHBaseSerDeII.

public void testHBaseSerDeII() throws SerDeException {
    byte[] cfa = "cfa".getBytes();
    byte[] cfb = "cfb".getBytes();
    byte[] cfc = "cfc".getBytes();
    byte[] qualByte = "byte".getBytes();
    byte[] qualShort = "short".getBytes();
    byte[] qualInt = "int".getBytes();
    byte[] qualLong = "long".getBytes();
    byte[] qualFloat = "float".getBytes();
    byte[] qualDouble = "double".getBytes();
    byte[] qualString = "string".getBytes();
    byte[] qualBool = "boolean".getBytes();
    byte[] rowKey = Bytes.toBytes("test-row-2");
    // Data
    List<KeyValue> kvs = new ArrayList<KeyValue>();
    kvs.add(new KeyValue(rowKey, cfa, qualByte, new byte[] { Byte.MIN_VALUE }));
    kvs.add(new KeyValue(rowKey, cfb, qualShort, Bytes.toBytes(Short.MIN_VALUE)));
    kvs.add(new KeyValue(rowKey, cfc, qualInt, Bytes.toBytes(Integer.MIN_VALUE)));
    kvs.add(new KeyValue(rowKey, cfa, qualLong, Bytes.toBytes(Long.MIN_VALUE)));
    kvs.add(new KeyValue(rowKey, cfb, qualFloat, Bytes.toBytes(Float.MIN_VALUE)));
    kvs.add(new KeyValue(rowKey, cfc, qualDouble, Bytes.toBytes(Double.MAX_VALUE)));
    kvs.add(new KeyValue(rowKey, cfa, qualString, Bytes.toBytes("Hadoop, HBase, and Hive Again!")));
    kvs.add(new KeyValue(rowKey, cfb, qualBool, Bytes.toBytes(false)));
    Collections.sort(kvs, KeyValue.COMPARATOR);
    Result r = new Result(kvs);
    Put p = new Put(rowKey);
    p.add(cfa, qualByte, new byte[] { Byte.MIN_VALUE });
    p.add(cfb, qualShort, Bytes.toBytes(Short.MIN_VALUE));
    p.add(cfc, qualInt, Bytes.toBytes(Integer.MIN_VALUE));
    p.add(cfa, qualLong, Bytes.toBytes(Long.MIN_VALUE));
    p.add(cfb, qualFloat, Bytes.toBytes(Float.MIN_VALUE));
    p.add(cfc, qualDouble, Bytes.toBytes(Double.MAX_VALUE));
    p.add(cfa, qualString, Bytes.toBytes("Hadoop, HBase, and Hive Again!"));
    p.add(cfb, qualBool, Bytes.toBytes(false));
    Object[] expectedFieldsData = { new Text("test-row-2"), new ByteWritable(Byte.MIN_VALUE), new ShortWritable(Short.MIN_VALUE), new IntWritable(Integer.MIN_VALUE), new LongWritable(Long.MIN_VALUE), new FloatWritable(Float.MIN_VALUE), new DoubleWritable(Double.MAX_VALUE), new Text("Hadoop, HBase, and Hive Again!"), new BooleanWritable(false) };
    // Create, initialize, and test the SerDe
    HBaseSerDe serDe = new HBaseSerDe();
    Configuration conf = new Configuration();
    Properties tbl = createPropertiesII_I();
    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
    deserializeAndSerialize(serDe, r, p, expectedFieldsData);
    serDe = new HBaseSerDe();
    conf = new Configuration();
    tbl = createPropertiesII_II();
    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
    deserializeAndSerialize(serDe, r, p, expectedFieldsData);
    serDe = new HBaseSerDe();
    conf = new Configuration();
    tbl = createPropertiesII_III();
    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
    deserializeAndSerialize(serDe, r, p, expectedFieldsData);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) AvroTableProperties(org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties) Properties(java.util.Properties) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 50 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hive by apache.

the class VerifyFastRow method verifyDeserializeRead.

public static void verifyDeserializeRead(DeserializeRead deserializeRead, PrimitiveTypeInfo primitiveTypeInfo, Writable writable) throws IOException {
    boolean isNull;
    isNull = !deserializeRead.readNextField();
    if (isNull) {
        if (writable != null) {
            TestCase.fail(deserializeRead.getClass().getName() + " field reports null but object is not null " + "(class " + writable.getClass().getName() + ", " + writable.toString() + ")");
        }
        return;
    } else if (writable == null) {
        TestCase.fail("Field report not null but object is null");
    }
    switch(primitiveTypeInfo.getPrimitiveCategory()) {
        case BOOLEAN:
            {
                boolean value = deserializeRead.currentBoolean;
                if (!(writable instanceof BooleanWritable)) {
                    TestCase.fail("Boolean expected writable not Boolean");
                }
                boolean expected = ((BooleanWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Boolean field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case BYTE:
            {
                byte value = deserializeRead.currentByte;
                if (!(writable instanceof ByteWritable)) {
                    TestCase.fail("Byte expected writable not Byte");
                }
                byte expected = ((ByteWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Byte field mismatch (expected " + (int) expected + " found " + (int) value + ")");
                }
            }
            break;
        case SHORT:
            {
                short value = deserializeRead.currentShort;
                if (!(writable instanceof ShortWritable)) {
                    TestCase.fail("Short expected writable not Short");
                }
                short expected = ((ShortWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Short field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case INT:
            {
                int value = deserializeRead.currentInt;
                if (!(writable instanceof IntWritable)) {
                    TestCase.fail("Integer expected writable not Integer");
                }
                int expected = ((IntWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Int field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case LONG:
            {
                long value = deserializeRead.currentLong;
                if (!(writable instanceof LongWritable)) {
                    TestCase.fail("Long expected writable not Long");
                }
                Long expected = ((LongWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Long field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case FLOAT:
            {
                float value = deserializeRead.currentFloat;
                if (!(writable instanceof FloatWritable)) {
                    TestCase.fail("Float expected writable not Float");
                }
                float expected = ((FloatWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Float field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case DOUBLE:
            {
                double value = deserializeRead.currentDouble;
                if (!(writable instanceof DoubleWritable)) {
                    TestCase.fail("Double expected writable not Double");
                }
                double expected = ((DoubleWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Double field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case STRING:
            {
                byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                Text text = new Text(stringBytes);
                String string = text.toString();
                String expected = ((Text) writable).toString();
                if (!string.equals(expected)) {
                    TestCase.fail("String field mismatch (expected '" + expected + "' found '" + string + "')");
                }
            }
            break;
        case CHAR:
            {
                byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                Text text = new Text(stringBytes);
                String string = text.toString();
                HiveChar hiveChar = new HiveChar(string, ((CharTypeInfo) primitiveTypeInfo).getLength());
                HiveChar expected = ((HiveCharWritable) writable).getHiveChar();
                if (!hiveChar.equals(expected)) {
                    TestCase.fail("Char field mismatch (expected '" + expected + "' found '" + hiveChar + "')");
                }
            }
            break;
        case VARCHAR:
            {
                byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                Text text = new Text(stringBytes);
                String string = text.toString();
                HiveVarchar hiveVarchar = new HiveVarchar(string, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
                HiveVarchar expected = ((HiveVarcharWritable) writable).getHiveVarchar();
                if (!hiveVarchar.equals(expected)) {
                    TestCase.fail("Varchar field mismatch (expected '" + expected + "' found '" + hiveVarchar + "')");
                }
            }
            break;
        case DECIMAL:
            {
                HiveDecimal value = deserializeRead.currentHiveDecimalWritable.getHiveDecimal();
                if (value == null) {
                    TestCase.fail("Decimal field evaluated to NULL");
                }
                HiveDecimal expected = ((HiveDecimalWritable) writable).getHiveDecimal();
                if (!value.equals(expected)) {
                    DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
                    int precision = decimalTypeInfo.getPrecision();
                    int scale = decimalTypeInfo.getScale();
                    TestCase.fail("Decimal field mismatch (expected " + expected.toString() + " found " + value.toString() + ") precision " + precision + ", scale " + scale);
                }
            }
            break;
        case DATE:
            {
                Date value = deserializeRead.currentDateWritable.get();
                Date expected = ((DateWritable) writable).get();
                if (!value.equals(expected)) {
                    TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case TIMESTAMP:
            {
                Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
                Timestamp expected = ((TimestampWritable) writable).getTimestamp();
                if (!value.equals(expected)) {
                    TestCase.fail("Timestamp field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case INTERVAL_YEAR_MONTH:
            {
                HiveIntervalYearMonth value = deserializeRead.currentHiveIntervalYearMonthWritable.getHiveIntervalYearMonth();
                HiveIntervalYearMonth expected = ((HiveIntervalYearMonthWritable) writable).getHiveIntervalYearMonth();
                if (!value.equals(expected)) {
                    TestCase.fail("HiveIntervalYearMonth field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case INTERVAL_DAY_TIME:
            {
                HiveIntervalDayTime value = deserializeRead.currentHiveIntervalDayTimeWritable.getHiveIntervalDayTime();
                HiveIntervalDayTime expected = ((HiveIntervalDayTimeWritable) writable).getHiveIntervalDayTime();
                if (!value.equals(expected)) {
                    TestCase.fail("HiveIntervalDayTime field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case BINARY:
            {
                byte[] byteArray = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                BytesWritable bytesWritable = (BytesWritable) writable;
                byte[] expected = Arrays.copyOfRange(bytesWritable.getBytes(), 0, bytesWritable.getLength());
                if (byteArray.length != expected.length) {
                    TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                }
                for (int b = 0; b < byteArray.length; b++) {
                    if (byteArray[b] != expected[b]) {
                        TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                    }
                }
            }
            break;
        default:
            throw new Error("Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory());
    }
}
Also used : VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) Date(java.sql.Date) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Aggregations

BooleanWritable (org.apache.hadoop.io.BooleanWritable)63 IntWritable (org.apache.hadoop.io.IntWritable)41 LongWritable (org.apache.hadoop.io.LongWritable)40 FloatWritable (org.apache.hadoop.io.FloatWritable)37 Text (org.apache.hadoop.io.Text)31 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)27 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)26 BytesWritable (org.apache.hadoop.io.BytesWritable)26 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)25 Writable (org.apache.hadoop.io.Writable)17 Test (org.junit.Test)17 ArrayList (java.util.ArrayList)15 Configuration (org.apache.hadoop.conf.Configuration)12 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)12 Random (java.util.Random)11 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)10 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)9 KeyValue (org.apache.hadoop.hbase.KeyValue)7 Result (org.apache.hadoop.hbase.client.Result)7 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)7