Search in sources :

Example 71 with Writable

use of org.apache.hadoop.io.Writable in project hive by apache.

the class TypedBytesWritableOutput method writeArray.

public void writeArray(ArrayWritable aw) throws IOException {
    Writable[] writables = aw.get();
    out.writeVectorHeader(writables.length);
    for (Writable writable : writables) {
        write(writable);
    }
}
Also used : ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) NullWritable(org.apache.hadoop.io.NullWritable) VLongWritable(org.apache.hadoop.io.VLongWritable) Writable(org.apache.hadoop.io.Writable) MapWritable(org.apache.hadoop.io.MapWritable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) IntWritable(org.apache.hadoop.io.IntWritable) SortedMapWritable(org.apache.hadoop.io.SortedMapWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) VIntWritable(org.apache.hadoop.io.VIntWritable) FloatWritable(org.apache.hadoop.io.FloatWritable)

Example 72 with Writable

use of org.apache.hadoop.io.Writable in project hbase by apache.

the class HbaseObjectWritableFor96Migration method readObject.

/**
   * Read a {@link Writable}, {@link String}, primitive type, or an array of
   * the preceding.
   * @param in
   * @param objectWritable
   * @param conf
   * @return the object
   * @throws IOException
   */
@SuppressWarnings("unchecked")
static Object readObject(DataInput in, HbaseObjectWritableFor96Migration objectWritable, Configuration conf) throws IOException {
    Class<?> declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in));
    Object instance;
    if (declaredClass.isPrimitive()) {
        // primitive types
        if (declaredClass == Boolean.TYPE) {
            // boolean
            instance = Boolean.valueOf(in.readBoolean());
        } else if (declaredClass == Character.TYPE) {
            // char
            instance = Character.valueOf(in.readChar());
        } else if (declaredClass == Byte.TYPE) {
            // byte
            instance = Byte.valueOf(in.readByte());
        } else if (declaredClass == Short.TYPE) {
            // short
            instance = Short.valueOf(in.readShort());
        } else if (declaredClass == Integer.TYPE) {
            // int
            instance = Integer.valueOf(in.readInt());
        } else if (declaredClass == Long.TYPE) {
            // long
            instance = Long.valueOf(in.readLong());
        } else if (declaredClass == Float.TYPE) {
            // float
            instance = Float.valueOf(in.readFloat());
        } else if (declaredClass == Double.TYPE) {
            // double
            instance = Double.valueOf(in.readDouble());
        } else if (declaredClass == Void.TYPE) {
            // void
            instance = null;
        } else {
            throw new IllegalArgumentException("Not a primitive: " + declaredClass);
        }
    } else if (declaredClass.isArray()) {
        // array
        if (declaredClass.equals(byte[].class)) {
            instance = Bytes.readByteArray(in);
        } else {
            int length = in.readInt();
            instance = Array.newInstance(declaredClass.getComponentType(), length);
            for (int i = 0; i < length; i++) {
                Array.set(instance, i, readObject(in, conf));
            }
        }
    } else if (declaredClass.equals(Array.class)) {
        //an array not declared in CLASS_TO_CODE
        Class<?> componentType = readClass(conf, in);
        int length = in.readInt();
        instance = Array.newInstance(componentType, length);
        for (int i = 0; i < length; i++) {
            Array.set(instance, i, readObject(in, conf));
        }
    } else if (List.class.isAssignableFrom(declaredClass)) {
        // List
        int length = in.readInt();
        instance = new ArrayList(length);
        for (int i = 0; i < length; i++) {
            ((ArrayList) instance).add(readObject(in, conf));
        }
    } else if (declaredClass == String.class) {
        // String
        instance = Text.readString(in);
    } else if (declaredClass.isEnum()) {
        // enum
        instance = Enum.valueOf((Class<? extends Enum>) declaredClass, Text.readString(in));
    } else if (declaredClass == Message.class) {
        String className = Text.readString(in);
        try {
            declaredClass = getClassByName(conf, className);
            instance = tryInstantiateProtobuf(declaredClass, in);
        } catch (ClassNotFoundException e) {
            LOG.error("Can't find class " + className, e);
            throw new IOException("Can't find class " + className, e);
        }
    } else if (Scan.class.isAssignableFrom(declaredClass)) {
        int length = in.readInt();
        byte[] scanBytes = new byte[length];
        in.readFully(scanBytes);
        ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder();
        ProtobufUtil.mergeFrom(scanProto, scanBytes);
        instance = ProtobufUtil.toScan(scanProto.build());
    } else {
        // Writable or Serializable
        Class instanceClass = null;
        int b = (byte) WritableUtils.readVInt(in);
        if (b == NOT_ENCODED) {
            String className = Text.readString(in);
            if ("org.apache.hadoop.hbase.regionserver.wal.HLog$Entry".equals(className)) {
                className = Entry.class.getName();
            }
            try {
                instanceClass = getClassByName(conf, className);
            } catch (ClassNotFoundException e) {
                LOG.error("Can't find class " + className, e);
                throw new IOException("Can't find class " + className, e);
            }
        } else {
            instanceClass = CODE_TO_CLASS.get(b);
        }
        if (Writable.class.isAssignableFrom(instanceClass)) {
            Writable writable = WritableFactories.newInstance(instanceClass, conf);
            try {
                writable.readFields(in);
            } catch (Exception e) {
                LOG.error("Error in readFields", e);
                throw new IOException("Error in readFields", e);
            }
            instance = writable;
            if (instanceClass == NullInstance.class) {
                // null
                declaredClass = ((NullInstance) instance).declaredClass;
                instance = null;
            }
        } else {
            int length = in.readInt();
            byte[] objectBytes = new byte[length];
            in.readFully(objectBytes);
            ByteArrayInputStream bis = null;
            ObjectInputStream ois = null;
            try {
                bis = new ByteArrayInputStream(objectBytes);
                ois = new ObjectInputStream(bis);
                instance = ois.readObject();
            } catch (ClassNotFoundException e) {
                LOG.error("Class not found when attempting to deserialize object", e);
                throw new IOException("Class not found when attempting to " + "deserialize object", e);
            } finally {
                if (bis != null)
                    bis.close();
                if (ois != null)
                    ois.close();
            }
        }
    }
    if (objectWritable != null) {
        // store values
        objectWritable.declaredClass = declaredClass;
        objectWritable.instance = instance;
    }
    return instance;
}
Also used : ArrayList(java.util.ArrayList) Writable(org.apache.hadoop.io.Writable) MapWritable(org.apache.hadoop.io.MapWritable) ObjectWritable(org.apache.hadoop.io.ObjectWritable) IOException(java.io.IOException) InvocationTargetException(java.lang.reflect.InvocationTargetException) IOException(java.io.IOException) ByteArrayInputStream(java.io.ByteArrayInputStream) List(java.util.List) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) ClientProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos) ObjectInputStream(java.io.ObjectInputStream)

Example 73 with Writable

use of org.apache.hadoop.io.Writable in project hbase by apache.

the class HbaseObjectWritableFor96Migration method writeObject.

/**
   * Write a {@link Writable}, {@link String}, primitive type, or an array of
   * the preceding.
   * @param out
   * @param instance
   * @param declaredClass
   * @param conf
   * @throws IOException
   */
@SuppressWarnings("unchecked")
static void writeObject(DataOutput out, Object instance, Class declaredClass, Configuration conf) throws IOException {
    Object instanceObj = instance;
    Class declClass = declaredClass;
    if (instanceObj == null) {
        // null
        instanceObj = new NullInstance(declClass, conf);
        declClass = Writable.class;
    }
    writeClassCode(out, declClass);
    if (declClass.isArray()) {
        // byte-at-a-time we were previously doing.
        if (declClass.equals(byte[].class)) {
            Bytes.writeByteArray(out, (byte[]) instanceObj);
        } else {
            //if it is a Generic array, write the element's type
            if (getClassCode(declaredClass) == GENERIC_ARRAY_CODE) {
                Class<?> componentType = declaredClass.getComponentType();
                writeClass(out, componentType);
            }
            int length = Array.getLength(instanceObj);
            out.writeInt(length);
            for (int i = 0; i < length; i++) {
                Object item = Array.get(instanceObj, i);
                writeObject(out, item, item.getClass(), conf);
            }
        }
    } else if (List.class.isAssignableFrom(declClass)) {
        List list = (List) instanceObj;
        int length = list.size();
        out.writeInt(length);
        for (int i = 0; i < length; i++) {
            Object elem = list.get(i);
            writeObject(out, elem, elem == null ? Writable.class : elem.getClass(), conf);
        }
    } else if (declClass == String.class) {
        // String
        Text.writeString(out, (String) instanceObj);
    } else if (declClass.isPrimitive()) {
        // primitive type
        if (declClass == Boolean.TYPE) {
            // boolean
            out.writeBoolean(((Boolean) instanceObj).booleanValue());
        } else if (declClass == Character.TYPE) {
            // char
            out.writeChar(((Character) instanceObj).charValue());
        } else if (declClass == Byte.TYPE) {
            // byte
            out.writeByte(((Byte) instanceObj).byteValue());
        } else if (declClass == Short.TYPE) {
            // short
            out.writeShort(((Short) instanceObj).shortValue());
        } else if (declClass == Integer.TYPE) {
            // int
            out.writeInt(((Integer) instanceObj).intValue());
        } else if (declClass == Long.TYPE) {
            // long
            out.writeLong(((Long) instanceObj).longValue());
        } else if (declClass == Float.TYPE) {
            // float
            out.writeFloat(((Float) instanceObj).floatValue());
        } else if (declClass == Double.TYPE) {
            // double
            out.writeDouble(((Double) instanceObj).doubleValue());
        } else if (declClass == Void.TYPE) {
        // void
        } else {
            throw new IllegalArgumentException("Not a primitive: " + declClass);
        }
    } else if (declClass.isEnum()) {
        // enum
        Text.writeString(out, ((Enum) instanceObj).name());
    } else if (Message.class.isAssignableFrom(declaredClass)) {
        Text.writeString(out, instanceObj.getClass().getName());
        ((Message) instance).writeDelimitedTo(DataOutputOutputStream.constructOutputStream(out));
    } else if (Writable.class.isAssignableFrom(declClass)) {
        // Writable
        Class<?> c = instanceObj.getClass();
        Integer code = CLASS_TO_CODE.get(c);
        if (code == null) {
            out.writeByte(NOT_ENCODED);
            Text.writeString(out, c.getName());
        } else {
            writeClassCode(out, c);
        }
        ((Writable) instanceObj).write(out);
    } else if (Serializable.class.isAssignableFrom(declClass)) {
        Class<?> c = instanceObj.getClass();
        Integer code = CLASS_TO_CODE.get(c);
        if (code == null) {
            out.writeByte(NOT_ENCODED);
            Text.writeString(out, c.getName());
        } else {
            writeClassCode(out, c);
        }
        ByteArrayOutputStream bos = null;
        ObjectOutputStream oos = null;
        try {
            bos = new ByteArrayOutputStream();
            oos = new ObjectOutputStream(bos);
            oos.writeObject(instanceObj);
            byte[] value = bos.toByteArray();
            out.writeInt(value.length);
            out.write(value);
        } finally {
            if (bos != null)
                bos.close();
            if (oos != null)
                oos.close();
        }
    } else if (Scan.class.isAssignableFrom(declClass)) {
        Scan scan = (Scan) instanceObj;
        byte[] scanBytes = ProtobufUtil.toScan(scan).toByteArray();
        out.writeInt(scanBytes.length);
        out.write(scanBytes);
    } else {
        throw new IOException("Can't write: " + instanceObj + " as " + declClass);
    }
}
Also used : Serializable(java.io.Serializable) Message(com.google.protobuf.Message) Writable(org.apache.hadoop.io.Writable) MapWritable(org.apache.hadoop.io.MapWritable) ObjectWritable(org.apache.hadoop.io.ObjectWritable) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IOException(java.io.IOException) ObjectOutputStream(java.io.ObjectOutputStream) List(java.util.List) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan)

Example 74 with Writable

use of org.apache.hadoop.io.Writable in project hive by apache.

the class TestJsonSerDe method testRobustRead.

public void testRobustRead() throws Exception {
    /**
     *  This test has been added to account for HCATALOG-436
     *  We write out columns with "internal column names" such
     *  as "_col0", but try to read with regular column names.
     */
    Configuration conf = new Configuration();
    for (Pair<Properties, HCatRecord> e : getData()) {
        Properties tblProps = e.first;
        HCatRecord r = e.second;
        Properties internalTblProps = new Properties();
        for (Map.Entry pe : tblProps.entrySet()) {
            if (!pe.getKey().equals(serdeConstants.LIST_COLUMNS)) {
                internalTblProps.put(pe.getKey(), pe.getValue());
            } else {
                internalTblProps.put(pe.getKey(), getInternalNames((String) pe.getValue()));
            }
        }
        LOG.info("orig tbl props:{}", tblProps);
        LOG.info("modif tbl props:{}", internalTblProps);
        JsonSerDe wjsd = new JsonSerDe();
        SerDeUtils.initializeSerDe(wjsd, conf, internalTblProps, null);
        JsonSerDe rjsd = new JsonSerDe();
        SerDeUtils.initializeSerDe(rjsd, conf, tblProps, null);
        LOG.info("ORIG:{}", r);
        Writable s = wjsd.serialize(r, wjsd.getObjectInspector());
        LOG.info("ONE:{}", s);
        Object o1 = wjsd.deserialize(s);
        LOG.info("deserialized ONE : {} ", o1);
        Object o2 = rjsd.deserialize(s);
        LOG.info("deserialized TWO : {} ", o2);
        StringBuilder msg = new StringBuilder();
        boolean isEqual = HCatDataCheckUtil.recordsEqual(r, (HCatRecord) o2, msg);
        assertTrue(msg.toString(), isEqual);
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) Writable(org.apache.hadoop.io.Writable) Properties(java.util.Properties) HashMap(java.util.HashMap) Map(java.util.Map)

Example 75 with Writable

use of org.apache.hadoop.io.Writable in project hive by apache.

the class TestJsonSerDe method testRW.

public void testRW() throws Exception {
    Configuration conf = new Configuration();
    for (Pair<Properties, HCatRecord> e : getData()) {
        Properties tblProps = e.first;
        HCatRecord r = e.second;
        HCatRecordSerDe hrsd = new HCatRecordSerDe();
        SerDeUtils.initializeSerDe(hrsd, conf, tblProps, null);
        JsonSerDe jsde = new JsonSerDe();
        SerDeUtils.initializeSerDe(jsde, conf, tblProps, null);
        LOG.info("ORIG:{}", r);
        Writable s = hrsd.serialize(r, hrsd.getObjectInspector());
        LOG.info("ONE:{}", s);
        Object o1 = hrsd.deserialize(s);
        StringBuilder msg = new StringBuilder();
        boolean isEqual = HCatDataCheckUtil.recordsEqual(r, (HCatRecord) o1);
        assertTrue(msg.toString(), isEqual);
        Writable s2 = jsde.serialize(o1, hrsd.getObjectInspector());
        LOG.info("TWO:{}", s2);
        Object o2 = jsde.deserialize(s2);
        LOG.info("deserialized TWO : {} ", o2);
        msg.setLength(0);
        isEqual = HCatDataCheckUtil.recordsEqual(r, (HCatRecord) o2, msg);
        assertTrue(msg.toString(), isEqual);
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) Writable(org.apache.hadoop.io.Writable) Properties(java.util.Properties)

Aggregations

Writable (org.apache.hadoop.io.Writable)221 IntWritable (org.apache.hadoop.io.IntWritable)103 LongWritable (org.apache.hadoop.io.LongWritable)91 BooleanWritable (org.apache.hadoop.io.BooleanWritable)75 BytesWritable (org.apache.hadoop.io.BytesWritable)74 FloatWritable (org.apache.hadoop.io.FloatWritable)73 Test (org.junit.Test)68 IOException (java.io.IOException)43 Path (org.apache.hadoop.fs.Path)43 Text (org.apache.hadoop.io.Text)40 ArrayWritable (org.apache.hadoop.io.ArrayWritable)37 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)34 SequenceFile (org.apache.hadoop.io.SequenceFile)32 Configuration (org.apache.hadoop.conf.Configuration)31 DoubleWritable (org.apache.hadoop.io.DoubleWritable)30 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)29 ByteWritable (org.apache.hadoop.io.ByteWritable)28 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)25 FileSystem (org.apache.hadoop.fs.FileSystem)24 ArrayList (java.util.ArrayList)23