Search in sources :

Example 66 with Writable

use of org.apache.hadoop.io.Writable in project hive by apache.

the class HCatSplit method readFields.

/* (non-Javadoc)
   * @see org.apache.hadoop.io.Writable#readFields(java.io.DataInput)
   */
@SuppressWarnings("unchecked")
@Override
public void readFields(DataInput input) throws IOException {
    String partitionInfoString = WritableUtils.readString(input);
    partitionInfo = (PartInfo) HCatUtil.deserialize(partitionInfoString);
    String baseSplitClassName = WritableUtils.readString(input);
    org.apache.hadoop.mapred.InputSplit split;
    try {
        Class<? extends org.apache.hadoop.mapred.InputSplit> splitClass = (Class<? extends org.apache.hadoop.mapred.InputSplit>) JavaUtils.loadClass(baseSplitClassName);
        //Class.forName().newInstance() does not work if the underlying
        //InputSplit has package visibility
        Constructor<? extends org.apache.hadoop.mapred.InputSplit> constructor = splitClass.getDeclaredConstructor(new Class[] {});
        constructor.setAccessible(true);
        split = constructor.newInstance();
        // read baseSplit from input
        ((Writable) split).readFields(input);
        this.baseMapRedSplit = split;
    } catch (Exception e) {
        throw new IOException("Exception from " + baseSplitClassName, e);
    }
}
Also used : Writable(org.apache.hadoop.io.Writable) IOException(java.io.IOException) IOException(java.io.IOException) InputSplit(org.apache.hadoop.mapreduce.InputSplit)

Example 67 with Writable

use of org.apache.hadoop.io.Writable in project hive by apache.

the class HCatSplit method write.

/* (non-Javadoc)
   * @see org.apache.hadoop.io.Writable#write(java.io.DataOutput)
   */
@Override
public void write(DataOutput output) throws IOException {
    String partitionInfoString = HCatUtil.serialize(partitionInfo);
    // write partitionInfo into output
    WritableUtils.writeString(output, partitionInfoString);
    WritableUtils.writeString(output, baseMapRedSplit.getClass().getName());
    Writable baseSplitWritable = (Writable) baseMapRedSplit;
    //write  baseSplit into output
    baseSplitWritable.write(output);
}
Also used : Writable(org.apache.hadoop.io.Writable)

Example 68 with Writable

use of org.apache.hadoop.io.Writable in project hive by apache.

the class TestLazyHBaseObject method testLazyHBaseRow3.

/**
   * Test the LazyHBaseRow class with a one-to-one/onto mapping between Hive columns and
   * HBase column family/column qualifier pairs. The column types are primitive and fields
   * are stored in binary format in HBase.
   * @throws SerDeException
   */
public void testLazyHBaseRow3() throws SerDeException {
    List<TypeInfo> fieldTypeInfos = TypeInfoUtils.getTypeInfosFromTypeString("string,int,tinyint,smallint,bigint,float,double,string,boolean");
    List<String> fieldNames = Arrays.asList(new String[] { "key", "c_int", "c_byte", "c_short", "c_long", "c_float", "c_double", "c_string", "c_bool" });
    Text nullSequence = new Text("\\N");
    String hbaseColumnsMapping = ":key#str,cf-int:cq-int#bin,cf-byte:cq-byte#bin," + "cf-short:cq-short#bin,cf-long:cq-long#bin,cf-float:cq-float#bin,cf-double:cq-double#bin," + "cf-string:cq-string#str,cf-bool:cq-bool#bin";
    ColumnMappings columnMappings = null;
    try {
        columnMappings = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping);
    } catch (SerDeException e) {
        fail(e.toString());
    }
    ColumnMapping[] columnsMapping = columnMappings.getColumnsMapping();
    for (int i = 0; i < columnsMapping.length; i++) {
        ColumnMapping colMap = columnsMapping[i];
        if (i == 0 || i == 7) {
            colMap.binaryStorage.add(false);
        } else {
            colMap.binaryStorage.add(true);
        }
    }
    ObjectInspector oi = LazyFactory.createLazyStructInspector(fieldNames, fieldTypeInfos, new byte[] { ' ', ':', '=' }, nullSequence, false, false, (byte) 0);
    LazyHBaseRow o = new LazyHBaseRow((LazySimpleStructObjectInspector) oi, columnMappings);
    byte[] rowKey = "row-key".getBytes();
    List<KeyValue> kvs = new ArrayList<KeyValue>();
    byte[] value;
    for (int i = 1; i < columnsMapping.length; i++) {
        switch(i) {
            case 1:
                value = Bytes.toBytes(1);
                break;
            case 2:
                value = new byte[] { (byte) 1 };
                break;
            case 3:
                value = Bytes.toBytes((short) 1);
                break;
            case 4:
                value = Bytes.toBytes((long) 1);
                break;
            case 5:
                value = Bytes.toBytes((float) 1.0F);
                break;
            case 6:
                value = Bytes.toBytes((double) 1.0);
                break;
            case 7:
                value = "Hadoop, Hive, with HBase storage handler.".getBytes();
                break;
            case 8:
                value = Bytes.toBytes(true);
                break;
            default:
                throw new RuntimeException("Not expected: " + i);
        }
        ColumnMapping colMap = columnsMapping[i];
        kvs.add(new KeyValue(rowKey, colMap.familyNameBytes, colMap.qualifierNameBytes, value));
    }
    Collections.sort(kvs, KeyValue.COMPARATOR);
    Result result = new Result(kvs);
    o.init(result);
    List<? extends StructField> fieldRefs = ((StructObjectInspector) oi).getAllStructFieldRefs();
    for (int i = 0; i < fieldRefs.size(); i++) {
        Object fieldData = ((StructObjectInspector) oi).getStructFieldData(o, fieldRefs.get(i));
        assert (fieldData != null);
        assert (fieldData instanceof LazyPrimitive<?, ?>);
        Writable writable = ((LazyPrimitive<?, ?>) fieldData).getWritableObject();
        switch(i) {
            case 0:
                Text text = new Text("row-key");
                assertEquals(text, writable);
                break;
            case 1:
                IntWritable iw = new IntWritable(1);
                assertEquals(iw, writable);
                break;
            case 2:
                ByteWritable bw = new ByteWritable((byte) 1);
                assertEquals(bw, writable);
                break;
            case 3:
                ShortWritable sw = new ShortWritable((short) 1);
                assertEquals(sw, writable);
                break;
            case 4:
                LongWritable lw = new LongWritable(1);
                assertEquals(lw, writable);
                break;
            case 5:
                FloatWritable fw = new FloatWritable(1.0F);
                assertEquals(fw, writable);
                break;
            case 6:
                DoubleWritable dw = new DoubleWritable(1.0);
                assertEquals(dw, writable);
                break;
            case 7:
                Text t = new Text("Hadoop, Hive, with HBase storage handler.");
                assertEquals(t, writable);
                break;
            case 8:
                BooleanWritable boolWritable = new BooleanWritable(true);
                assertEquals(boolWritable, writable);
                break;
            default:
                fail("Error: Unanticipated value in deserializing fields for HBaseSerDe.");
                break;
        }
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) LazyString(org.apache.hadoop.hive.serde2.lazy.LazyString) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) LazyPrimitive(org.apache.hadoop.hive.serde2.lazy.LazyPrimitive) Result(org.apache.hadoop.hbase.client.Result) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) ColumnMapping(org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping) IntWritable(org.apache.hadoop.io.IntWritable) LazySimpleStructObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) LazyMapObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) Text(org.apache.hadoop.io.Text) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) LazySimpleStructObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 69 with Writable

use of org.apache.hadoop.io.Writable in project hive by apache.

the class TypedBytesWritableInput method readMap.

public MapWritable readMap(MapWritable mw) throws IOException {
    if (mw == null) {
        mw = new MapWritable();
    }
    int length = in.readMapHeader();
    for (int i = 0; i < length; i++) {
        Writable key = read();
        Writable value = read();
        mw.put(key, value);
    }
    return mw;
}
Also used : ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) NullWritable(org.apache.hadoop.io.NullWritable) VLongWritable(org.apache.hadoop.io.VLongWritable) Writable(org.apache.hadoop.io.Writable) MapWritable(org.apache.hadoop.io.MapWritable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) IntWritable(org.apache.hadoop.io.IntWritable) SortedMapWritable(org.apache.hadoop.io.SortedMapWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) VIntWritable(org.apache.hadoop.io.VIntWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) MapWritable(org.apache.hadoop.io.MapWritable) SortedMapWritable(org.apache.hadoop.io.SortedMapWritable)

Example 70 with Writable

use of org.apache.hadoop.io.Writable in project hive by apache.

the class TypedBytesWritableInput method readArray.

public ArrayWritable readArray(ArrayWritable aw) throws IOException {
    if (aw == null) {
        aw = new ArrayWritable(TypedBytesWritable.class);
    } else if (!aw.getValueClass().equals(TypedBytesWritable.class)) {
        throw new RuntimeException("value class has to be TypedBytesWritable");
    }
    int length = in.readVectorHeader();
    Writable[] writables = new Writable[length];
    for (int i = 0; i < length; i++) {
        writables[i] = new TypedBytesWritable(in.readRaw());
    }
    aw.set(writables);
    return aw;
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) NullWritable(org.apache.hadoop.io.NullWritable) VLongWritable(org.apache.hadoop.io.VLongWritable) Writable(org.apache.hadoop.io.Writable) MapWritable(org.apache.hadoop.io.MapWritable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) IntWritable(org.apache.hadoop.io.IntWritable) SortedMapWritable(org.apache.hadoop.io.SortedMapWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) VIntWritable(org.apache.hadoop.io.VIntWritable) FloatWritable(org.apache.hadoop.io.FloatWritable)

Aggregations

Writable (org.apache.hadoop.io.Writable)221 IntWritable (org.apache.hadoop.io.IntWritable)103 LongWritable (org.apache.hadoop.io.LongWritable)91 BooleanWritable (org.apache.hadoop.io.BooleanWritable)75 BytesWritable (org.apache.hadoop.io.BytesWritable)74 FloatWritable (org.apache.hadoop.io.FloatWritable)73 Test (org.junit.Test)68 IOException (java.io.IOException)43 Path (org.apache.hadoop.fs.Path)43 Text (org.apache.hadoop.io.Text)40 ArrayWritable (org.apache.hadoop.io.ArrayWritable)37 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)34 SequenceFile (org.apache.hadoop.io.SequenceFile)32 Configuration (org.apache.hadoop.conf.Configuration)31 DoubleWritable (org.apache.hadoop.io.DoubleWritable)30 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)29 ByteWritable (org.apache.hadoop.io.ByteWritable)28 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)25 FileSystem (org.apache.hadoop.fs.FileSystem)24 ArrayList (java.util.ArrayList)23