Search in sources :

Example 86 with FloatWritable

use of org.apache.hadoop.io.FloatWritable in project hive by apache.

the class TypedBytesRecordReader method write.

private void write(int pos, Writable inpw) throws IOException {
    String typ = columnTypes.get(pos);
    Writable w = (Writable) converters.get(pos).convert(inpw);
    if (typ.equalsIgnoreCase(serdeConstants.BOOLEAN_TYPE_NAME)) {
        tbOut.writeBoolean((BooleanWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.TINYINT_TYPE_NAME)) {
        tbOut.writeByte((ByteWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.SMALLINT_TYPE_NAME)) {
        tbOut.writeShort((ShortWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.INT_TYPE_NAME)) {
        tbOut.writeInt((IntWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.BIGINT_TYPE_NAME)) {
        tbOut.writeLong((LongWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.FLOAT_TYPE_NAME)) {
        tbOut.writeFloat((FloatWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.DOUBLE_TYPE_NAME)) {
        tbOut.writeDouble((DoubleWritable) w);
    } else if (typ.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME)) {
        tbOut.writeText((Text) w);
    } else {
        assert false;
    }
}
Also used : FloatWritable(org.apache.hadoop.io.FloatWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) Text(org.apache.hadoop.io.Text) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 87 with FloatWritable

use of org.apache.hadoop.io.FloatWritable in project hive by apache.

the class TypedBytesRecordReader method next.

public int next(Writable data) throws IOException {
    int pos = 0;
    barrStr.reset();
    while (true) {
        Type type = tbIn.readTypeCode();
        // it was a empty stream
        if (type == null) {
            return -1;
        }
        if (type == Type.ENDOFRECORD) {
            tbOut.writeEndOfRecord();
            if (barrStr.getLength() > 0) {
                ((BytesWritable) data).set(barrStr.getData(), 0, barrStr.getLength());
            }
            return barrStr.getLength();
        }
        if (pos >= row.size()) {
            Writable wrt = allocateWritable(type);
            assert pos == row.size();
            assert pos == rowTypeName.size();
            row.add(wrt);
            rowTypeName.add(type.name());
            String typeName = typedBytesToTypeName.get(type);
            PrimitiveTypeInfo srcTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(typeName);
            srcOIns.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(srcTypeInfo));
            converters.add(ObjectInspectorConverters.getConverter(srcOIns.get(pos), dstOIns.get(pos)));
        } else {
            if (!rowTypeName.get(pos).equals(type.name())) {
                throw new RuntimeException("datatype of row changed from " + rowTypeName.get(pos) + " to " + type.name());
            }
        }
        Writable w = row.get(pos);
        switch(type) {
            case BYTE:
                tbIn.readByte((ByteWritable) w);
                break;
            case BOOL:
                tbIn.readBoolean((BooleanWritable) w);
                break;
            case INT:
                tbIn.readInt((IntWritable) w);
                break;
            case SHORT:
                tbIn.readShort((ShortWritable) w);
                break;
            case LONG:
                tbIn.readLong((LongWritable) w);
                break;
            case FLOAT:
                tbIn.readFloat((FloatWritable) w);
                break;
            case DOUBLE:
                tbIn.readDouble((DoubleWritable) w);
                break;
            case STRING:
                tbIn.readText((Text) w);
                break;
            default:
                // should never come here
                assert false;
        }
        write(pos, w);
        pos++;
    }
}
Also used : ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 88 with FloatWritable

use of org.apache.hadoop.io.FloatWritable in project hive by apache.

the class TestDruidSerDe method testDruidObjectSerializerwithNullTimestamp.

@Test
public void testDruidObjectSerializerwithNullTimestamp() throws Exception {
    // Create, initialize, and test the SerDe
    DruidSerDe serDe = new DruidSerDe();
    Configuration conf = new Configuration();
    Properties tbl;
    // Mixed source (all types)
    tbl = createPropertiesSource(COLUMN_NAMES, COLUMN_TYPES);
    serDe.initialize(conf, tbl, null);
    Object[] row = new Object[] { null, new Text("dim1_val"), new HiveCharWritable(new HiveChar("dim2_v", 6)), new HiveVarcharWritable(new HiveVarchar("dim3_val", 8)), new DoubleWritable(10669.3D), new FloatWritable(10669.45F), new LongWritable(1113939), new IntWritable(1112123), new ShortWritable((short) 12), new ByteWritable((byte) 0), null // granularity
    };
    expectedEx.expect(NullPointerException.class);
    expectedEx.expectMessage("Timestamp column cannot have null value");
    // should fail as timestamp is null
    serializeObject(tbl, serDe, row, DRUID_WRITABLE);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) QTestDruidSerDe(org.apache.hadoop.hive.druid.QTestDruidSerDe) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) Properties(java.util.Properties) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) ArgumentMatchers.anyObject(org.mockito.ArgumentMatchers.anyObject) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Example 89 with FloatWritable

use of org.apache.hadoop.io.FloatWritable in project hive by apache.

the class TestHBaseSerDe method testHBaseSerDeWithTimestamp.

@Test
public void testHBaseSerDeWithTimestamp() throws SerDeException {
    // Create the SerDe
    HBaseSerDe serDe = new HBaseSerDe();
    Configuration conf = new Configuration();
    Properties tbl = createPropertiesI_I();
    long putTimestamp = 1;
    tbl.setProperty(HBaseSerDe.HBASE_PUT_TIMESTAMP, Long.toString(putTimestamp));
    serDe.initialize(conf, tbl, null);
    byte[] cfa = "cola".getBytes();
    byte[] cfb = "colb".getBytes();
    byte[] cfc = "colc".getBytes();
    byte[] qualByte = "byte".getBytes();
    byte[] qualShort = "short".getBytes();
    byte[] qualInt = "int".getBytes();
    byte[] qualLong = "long".getBytes();
    byte[] qualFloat = "float".getBytes();
    byte[] qualDouble = "double".getBytes();
    byte[] qualString = "string".getBytes();
    byte[] qualBool = "boolean".getBytes();
    byte[] rowKey = Bytes.toBytes("test-row1");
    // Data
    List<Cell> kvs = new ArrayList<Cell>();
    kvs.add(new KeyValue(rowKey, cfa, qualByte, Bytes.toBytes("123")));
    kvs.add(new KeyValue(rowKey, cfb, qualShort, Bytes.toBytes("456")));
    kvs.add(new KeyValue(rowKey, cfc, qualInt, Bytes.toBytes("789")));
    kvs.add(new KeyValue(rowKey, cfa, qualLong, Bytes.toBytes("1000")));
    kvs.add(new KeyValue(rowKey, cfb, qualFloat, Bytes.toBytes("-0.01")));
    kvs.add(new KeyValue(rowKey, cfc, qualDouble, Bytes.toBytes("5.3")));
    kvs.add(new KeyValue(rowKey, cfa, qualString, Bytes.toBytes("Hadoop, HBase, and Hive")));
    kvs.add(new KeyValue(rowKey, cfb, qualBool, Bytes.toBytes("true")));
    Collections.sort(kvs, KeyValue.COMPARATOR);
    Result r = Result.create(kvs);
    Put p = new Put(rowKey, putTimestamp);
    p.addColumn(cfa, qualByte, Bytes.toBytes("123"));
    p.addColumn(cfb, qualShort, Bytes.toBytes("456"));
    p.addColumn(cfc, qualInt, Bytes.toBytes("789"));
    p.addColumn(cfa, qualLong, Bytes.toBytes("1000"));
    p.addColumn(cfb, qualFloat, Bytes.toBytes("-0.01"));
    p.addColumn(cfc, qualDouble, Bytes.toBytes("5.3"));
    p.addColumn(cfa, qualString, Bytes.toBytes("Hadoop, HBase, and Hive"));
    p.addColumn(cfb, qualBool, Bytes.toBytes("true"));
    Object[] expectedFieldsData = { new Text("test-row1"), new ByteWritable((byte) 123), new ShortWritable((short) 456), new IntWritable(789), new LongWritable(1000), new FloatWritable(-0.01F), new DoubleWritable(5.3), new Text("Hadoop, HBase, and Hive"), new BooleanWritable(true) };
    deserializeAndSerialize(serDe, r, p, expectedFieldsData);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) AvroTableProperties(org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties) Properties(java.util.Properties) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) LongWritable(org.apache.hadoop.io.LongWritable) Cell(org.apache.hadoop.hbase.Cell) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Example 90 with FloatWritable

use of org.apache.hadoop.io.FloatWritable in project hive by apache.

the class TestHBaseSerDe method testHBaseSerDeII.

@Test
public void testHBaseSerDeII() throws SerDeException {
    byte[] cfa = "cfa".getBytes();
    byte[] cfb = "cfb".getBytes();
    byte[] cfc = "cfc".getBytes();
    byte[] qualByte = "byte".getBytes();
    byte[] qualShort = "short".getBytes();
    byte[] qualInt = "int".getBytes();
    byte[] qualLong = "long".getBytes();
    byte[] qualFloat = "float".getBytes();
    byte[] qualDouble = "double".getBytes();
    byte[] qualString = "string".getBytes();
    byte[] qualBool = "boolean".getBytes();
    byte[] rowKey = Bytes.toBytes("test-row-2");
    // Data
    List<Cell> kvs = new ArrayList<Cell>();
    kvs.add(new KeyValue(rowKey, cfa, qualByte, new byte[] { Byte.MIN_VALUE }));
    kvs.add(new KeyValue(rowKey, cfb, qualShort, Bytes.toBytes(Short.MIN_VALUE)));
    kvs.add(new KeyValue(rowKey, cfc, qualInt, Bytes.toBytes(Integer.MIN_VALUE)));
    kvs.add(new KeyValue(rowKey, cfa, qualLong, Bytes.toBytes(Long.MIN_VALUE)));
    kvs.add(new KeyValue(rowKey, cfb, qualFloat, Bytes.toBytes(Float.MIN_VALUE)));
    kvs.add(new KeyValue(rowKey, cfc, qualDouble, Bytes.toBytes(Double.MAX_VALUE)));
    kvs.add(new KeyValue(rowKey, cfa, qualString, Bytes.toBytes("Hadoop, HBase, and Hive Again!")));
    kvs.add(new KeyValue(rowKey, cfb, qualBool, Bytes.toBytes(false)));
    // When using only HBase2, then we could change to this
    // Collections.sort(kvs, CellComparator.COMPARATOR);
    Collections.sort(kvs, KeyValue.COMPARATOR);
    Result r = Result.create(kvs);
    Put p = new Put(rowKey);
    p.addColumn(cfa, qualByte, new byte[] { Byte.MIN_VALUE });
    p.addColumn(cfb, qualShort, Bytes.toBytes(Short.MIN_VALUE));
    p.addColumn(cfc, qualInt, Bytes.toBytes(Integer.MIN_VALUE));
    p.addColumn(cfa, qualLong, Bytes.toBytes(Long.MIN_VALUE));
    p.addColumn(cfb, qualFloat, Bytes.toBytes(Float.MIN_VALUE));
    p.addColumn(cfc, qualDouble, Bytes.toBytes(Double.MAX_VALUE));
    p.addColumn(cfa, qualString, Bytes.toBytes("Hadoop, HBase, and Hive Again!"));
    p.addColumn(cfb, qualBool, Bytes.toBytes(false));
    Object[] expectedFieldsData = { new Text("test-row-2"), new ByteWritable(Byte.MIN_VALUE), new ShortWritable(Short.MIN_VALUE), new IntWritable(Integer.MIN_VALUE), new LongWritable(Long.MIN_VALUE), new FloatWritable(Float.MIN_VALUE), new DoubleWritable(Double.MAX_VALUE), new Text("Hadoop, HBase, and Hive Again!"), new BooleanWritable(false) };
    // Create, initialize, and test the SerDe
    HBaseSerDe serDe = new HBaseSerDe();
    Configuration conf = new Configuration();
    Properties tbl = createPropertiesII_I();
    serDe.initialize(conf, tbl, null);
    deserializeAndSerialize(serDe, r, p, expectedFieldsData);
    serDe = new HBaseSerDe();
    conf = new Configuration();
    tbl = createPropertiesII_II();
    serDe.initialize(conf, tbl, null);
    deserializeAndSerialize(serDe, r, p, expectedFieldsData);
    serDe = new HBaseSerDe();
    conf = new Configuration();
    tbl = createPropertiesII_III();
    serDe.initialize(conf, tbl, null);
    deserializeAndSerialize(serDe, r, p, expectedFieldsData);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) AvroTableProperties(org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties) Properties(java.util.Properties) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) LongWritable(org.apache.hadoop.io.LongWritable) Cell(org.apache.hadoop.hbase.Cell) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Aggregations

FloatWritable (org.apache.hadoop.io.FloatWritable)111 IntWritable (org.apache.hadoop.io.IntWritable)68 LongWritable (org.apache.hadoop.io.LongWritable)65 BooleanWritable (org.apache.hadoop.io.BooleanWritable)54 Text (org.apache.hadoop.io.Text)51 Test (org.junit.Test)49 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)44 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)40 BytesWritable (org.apache.hadoop.io.BytesWritable)40 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)37 Writable (org.apache.hadoop.io.Writable)28 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)27 ArrayList (java.util.ArrayList)24 Configuration (org.apache.hadoop.conf.Configuration)18 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)18 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)18 Path (org.apache.hadoop.fs.Path)17 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)17 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)17 HiveVarcharWritable (org.apache.hadoop.hive.serde2.io.HiveVarcharWritable)17