Search in sources :

Example 86 with ShortWritable

use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.

the class TestHBaseSerDe method testHBaseSerDeWithTimestamp.

@Test
public void testHBaseSerDeWithTimestamp() throws SerDeException {
    // Create the SerDe
    HBaseSerDe serDe = new HBaseSerDe();
    Configuration conf = new Configuration();
    Properties tbl = createPropertiesI_I();
    long putTimestamp = 1;
    tbl.setProperty(HBaseSerDe.HBASE_PUT_TIMESTAMP, Long.toString(putTimestamp));
    serDe.initialize(conf, tbl, null);
    byte[] cfa = "cola".getBytes();
    byte[] cfb = "colb".getBytes();
    byte[] cfc = "colc".getBytes();
    byte[] qualByte = "byte".getBytes();
    byte[] qualShort = "short".getBytes();
    byte[] qualInt = "int".getBytes();
    byte[] qualLong = "long".getBytes();
    byte[] qualFloat = "float".getBytes();
    byte[] qualDouble = "double".getBytes();
    byte[] qualString = "string".getBytes();
    byte[] qualBool = "boolean".getBytes();
    byte[] rowKey = Bytes.toBytes("test-row1");
    // Data
    List<Cell> kvs = new ArrayList<Cell>();
    kvs.add(new KeyValue(rowKey, cfa, qualByte, Bytes.toBytes("123")));
    kvs.add(new KeyValue(rowKey, cfb, qualShort, Bytes.toBytes("456")));
    kvs.add(new KeyValue(rowKey, cfc, qualInt, Bytes.toBytes("789")));
    kvs.add(new KeyValue(rowKey, cfa, qualLong, Bytes.toBytes("1000")));
    kvs.add(new KeyValue(rowKey, cfb, qualFloat, Bytes.toBytes("-0.01")));
    kvs.add(new KeyValue(rowKey, cfc, qualDouble, Bytes.toBytes("5.3")));
    kvs.add(new KeyValue(rowKey, cfa, qualString, Bytes.toBytes("Hadoop, HBase, and Hive")));
    kvs.add(new KeyValue(rowKey, cfb, qualBool, Bytes.toBytes("true")));
    Collections.sort(kvs, KeyValue.COMPARATOR);
    Result r = Result.create(kvs);
    Put p = new Put(rowKey, putTimestamp);
    p.addColumn(cfa, qualByte, Bytes.toBytes("123"));
    p.addColumn(cfb, qualShort, Bytes.toBytes("456"));
    p.addColumn(cfc, qualInt, Bytes.toBytes("789"));
    p.addColumn(cfa, qualLong, Bytes.toBytes("1000"));
    p.addColumn(cfb, qualFloat, Bytes.toBytes("-0.01"));
    p.addColumn(cfc, qualDouble, Bytes.toBytes("5.3"));
    p.addColumn(cfa, qualString, Bytes.toBytes("Hadoop, HBase, and Hive"));
    p.addColumn(cfb, qualBool, Bytes.toBytes("true"));
    Object[] expectedFieldsData = { new Text("test-row1"), new ByteWritable((byte) 123), new ShortWritable((short) 456), new IntWritable(789), new LongWritable(1000), new FloatWritable(-0.01F), new DoubleWritable(5.3), new Text("Hadoop, HBase, and Hive"), new BooleanWritable(true) };
    deserializeAndSerialize(serDe, r, p, expectedFieldsData);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) AvroTableProperties(org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties) Properties(java.util.Properties) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) LongWritable(org.apache.hadoop.io.LongWritable) Cell(org.apache.hadoop.hbase.Cell) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Example 87 with ShortWritable

use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.

the class HiveHBaseInputFormatUtil method getConstantVal.

static byte[] getConstantVal(Object writable, PrimitiveObjectInspector poi, boolean isKeyBinary) throws IOException {
    if (!isKeyBinary) {
        // Key is stored in text format. Get bytes representation of constant also of
        // text format.
        byte[] startRow;
        ByteStream.Output serializeStream = new ByteStream.Output();
        LazyUtils.writePrimitiveUTF8(serializeStream, writable, poi, false, (byte) 0, null);
        startRow = new byte[serializeStream.getLength()];
        System.arraycopy(serializeStream.getData(), 0, startRow, 0, serializeStream.getLength());
        return startRow;
    }
    PrimitiveCategory pc = poi.getPrimitiveCategory();
    switch(poi.getPrimitiveCategory()) {
        case INT:
            return Bytes.toBytes(((IntWritable) writable).get());
        case BOOLEAN:
            return Bytes.toBytes(((BooleanWritable) writable).get());
        case LONG:
            return Bytes.toBytes(((LongWritable) writable).get());
        case FLOAT:
            return Bytes.toBytes(((FloatWritable) writable).get());
        case DOUBLE:
            return Bytes.toBytes(((DoubleWritable) writable).get());
        case SHORT:
            return Bytes.toBytes(((ShortWritable) writable).get());
        case STRING:
            return Bytes.toBytes(((Text) writable).toString());
        case BYTE:
            return Bytes.toBytes(((ByteWritable) writable).get());
        default:
            throw new IOException("Type not supported " + pc);
    }
}
Also used : ByteStream(org.apache.hadoop.hive.serde2.ByteStream) IOException(java.io.IOException) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)

Example 88 with ShortWritable

use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.

the class TestDruidSerDe method testDruidObjectSerializerwithNullTimestamp.

@Test
public void testDruidObjectSerializerwithNullTimestamp() throws Exception {
    // Create, initialize, and test the SerDe
    DruidSerDe serDe = new DruidSerDe();
    Configuration conf = new Configuration();
    Properties tbl;
    // Mixed source (all types)
    tbl = createPropertiesSource(COLUMN_NAMES, COLUMN_TYPES);
    serDe.initialize(conf, tbl, null);
    Object[] row = new Object[] { null, new Text("dim1_val"), new HiveCharWritable(new HiveChar("dim2_v", 6)), new HiveVarcharWritable(new HiveVarchar("dim3_val", 8)), new DoubleWritable(10669.3D), new FloatWritable(10669.45F), new LongWritable(1113939), new IntWritable(1112123), new ShortWritable((short) 12), new ByteWritable((byte) 0), null // granularity
    };
    expectedEx.expect(NullPointerException.class);
    expectedEx.expectMessage("Timestamp column cannot have null value");
    // should fail as timestamp is null
    serializeObject(tbl, serDe, row, DRUID_WRITABLE);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) QTestDruidSerDe(org.apache.hadoop.hive.druid.QTestDruidSerDe) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) Properties(java.util.Properties) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) ArgumentMatchers.anyObject(org.mockito.ArgumentMatchers.anyObject) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Example 89 with ShortWritable

use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.

the class TypedBytesRecordReader method write.

private void write(int pos, Writable inpw) throws IOException {
    String typ = columnTypes.get(pos);
    Writable w = (Writable) converters.get(pos).convert(inpw);
    if (typ.equalsIgnoreCase(serdeConstants.BOOLEAN_TYPE_NAME)) {
        tbOut.writeBoolean((BooleanWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.TINYINT_TYPE_NAME)) {
        tbOut.writeByte((ByteWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.SMALLINT_TYPE_NAME)) {
        tbOut.writeShort((ShortWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.INT_TYPE_NAME)) {
        tbOut.writeInt((IntWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.BIGINT_TYPE_NAME)) {
        tbOut.writeLong((LongWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.FLOAT_TYPE_NAME)) {
        tbOut.writeFloat((FloatWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.DOUBLE_TYPE_NAME)) {
        tbOut.writeDouble((DoubleWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME)) {
        tbOut.writeText((Text) w);
    } else {
        assert false;
    }
}
Also used : FloatWritable(org.apache.hadoop.io.FloatWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) Text(org.apache.hadoop.io.Text) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 90 with ShortWritable

use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.

the class TypedBytesRecordReader method next.

public int next(Writable data) throws IOException {
    int pos = 0;
    barrStr.reset();
    while (true) {
        Type type = tbIn.readTypeCode();
        // it was a empty stream
        if (type == null) {
            return -1;
        }
        if (type == Type.ENDOFRECORD) {
            tbOut.writeEndOfRecord();
            if (barrStr.getLength() > 0) {
                ((BytesWritable) data).set(barrStr.getData(), 0, barrStr.getLength());
            }
            return barrStr.getLength();
        }
        if (pos >= row.size()) {
            Writable wrt = allocateWritable(type);
            assert pos == row.size();
            assert pos == rowTypeName.size();
            row.add(wrt);
            rowTypeName.add(type.name());
            String typeName = typedBytesToTypeName.get(type);
            PrimitiveTypeInfo srcTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(typeName);
            srcOIns.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(srcTypeInfo));
            converters.add(ObjectInspectorConverters.getConverter(srcOIns.get(pos), dstOIns.get(pos)));
        } else {
            if (!rowTypeName.get(pos).equals(type.name())) {
                throw new RuntimeException("datatype of row changed from " + rowTypeName.get(pos) + " to " + type.name());
            }
        }
        Writable w = row.get(pos);
        switch(type) {
            case BYTE:
                tbIn.readByte((ByteWritable) w);
                break;
            case BOOL:
                tbIn.readBoolean((BooleanWritable) w);
                break;
            case INT:
                tbIn.readInt((IntWritable) w);
                break;
            case SHORT:
                tbIn.readShort((ShortWritable) w);
                break;
            case LONG:
                tbIn.readLong((LongWritable) w);
                break;
            case FLOAT:
                tbIn.readFloat((FloatWritable) w);
                break;
            case DOUBLE:
                tbIn.readDouble((DoubleWritable) w);
                break;
            case STRING:
                tbIn.readText((Text) w);
                break;
            default:
                // should never come here
                assert false;
        }
        write(pos, w);
        pos++;
    }
}
Also used : ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Aggregations

ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)94 IntWritable (org.apache.hadoop.io.IntWritable)62 LongWritable (org.apache.hadoop.io.LongWritable)54 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)53 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)50 Test (org.junit.Test)48 Text (org.apache.hadoop.io.Text)43 FloatWritable (org.apache.hadoop.io.FloatWritable)41 BooleanWritable (org.apache.hadoop.io.BooleanWritable)38 BytesWritable (org.apache.hadoop.io.BytesWritable)30 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)29 ArrayList (java.util.ArrayList)25 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)23 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)18 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)17 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)17 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)17 HiveVarcharWritable (org.apache.hadoop.hive.serde2.io.HiveVarcharWritable)17 Writable (org.apache.hadoop.io.Writable)17 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)15