Search in sources :

Example 1 with ObjectWritable

use of org.apache.hadoop.io.ObjectWritable in project hbase by apache.

the class HbaseObjectWritableFor96Migration method readObject.

/**
   * Read a {@link Writable}, {@link String}, primitive type, or an array of
   * the preceding.
   * @param in
   * @param objectWritable
   * @param conf
   * @return the object
   * @throws IOException
   */
@SuppressWarnings("unchecked")
static Object readObject(DataInput in, HbaseObjectWritableFor96Migration objectWritable, Configuration conf) throws IOException {
    Class<?> declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in));
    Object instance;
    if (declaredClass.isPrimitive()) {
        // primitive types
        if (declaredClass == Boolean.TYPE) {
            // boolean
            instance = Boolean.valueOf(in.readBoolean());
        } else if (declaredClass == Character.TYPE) {
            // char
            instance = Character.valueOf(in.readChar());
        } else if (declaredClass == Byte.TYPE) {
            // byte
            instance = Byte.valueOf(in.readByte());
        } else if (declaredClass == Short.TYPE) {
            // short
            instance = Short.valueOf(in.readShort());
        } else if (declaredClass == Integer.TYPE) {
            // int
            instance = Integer.valueOf(in.readInt());
        } else if (declaredClass == Long.TYPE) {
            // long
            instance = Long.valueOf(in.readLong());
        } else if (declaredClass == Float.TYPE) {
            // float
            instance = Float.valueOf(in.readFloat());
        } else if (declaredClass == Double.TYPE) {
            // double
            instance = Double.valueOf(in.readDouble());
        } else if (declaredClass == Void.TYPE) {
            // void
            instance = null;
        } else {
            throw new IllegalArgumentException("Not a primitive: " + declaredClass);
        }
    } else if (declaredClass.isArray()) {
        // array
        if (declaredClass.equals(byte[].class)) {
            instance = Bytes.readByteArray(in);
        } else {
            int length = in.readInt();
            instance = Array.newInstance(declaredClass.getComponentType(), length);
            for (int i = 0; i < length; i++) {
                Array.set(instance, i, readObject(in, conf));
            }
        }
    } else if (declaredClass.equals(Array.class)) {
        //an array not declared in CLASS_TO_CODE
        Class<?> componentType = readClass(conf, in);
        int length = in.readInt();
        instance = Array.newInstance(componentType, length);
        for (int i = 0; i < length; i++) {
            Array.set(instance, i, readObject(in, conf));
        }
    } else if (List.class.isAssignableFrom(declaredClass)) {
        // List
        int length = in.readInt();
        instance = new ArrayList(length);
        for (int i = 0; i < length; i++) {
            ((ArrayList) instance).add(readObject(in, conf));
        }
    } else if (declaredClass == String.class) {
        // String
        instance = Text.readString(in);
    } else if (declaredClass.isEnum()) {
        // enum
        instance = Enum.valueOf((Class<? extends Enum>) declaredClass, Text.readString(in));
    } else if (declaredClass == Message.class) {
        String className = Text.readString(in);
        try {
            declaredClass = getClassByName(conf, className);
            instance = tryInstantiateProtobuf(declaredClass, in);
        } catch (ClassNotFoundException e) {
            LOG.error("Can't find class " + className, e);
            throw new IOException("Can't find class " + className, e);
        }
    } else if (Scan.class.isAssignableFrom(declaredClass)) {
        int length = in.readInt();
        byte[] scanBytes = new byte[length];
        in.readFully(scanBytes);
        ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder();
        ProtobufUtil.mergeFrom(scanProto, scanBytes);
        instance = ProtobufUtil.toScan(scanProto.build());
    } else {
        // Writable or Serializable
        Class instanceClass = null;
        int b = (byte) WritableUtils.readVInt(in);
        if (b == NOT_ENCODED) {
            String className = Text.readString(in);
            if ("org.apache.hadoop.hbase.regionserver.wal.HLog$Entry".equals(className)) {
                className = Entry.class.getName();
            }
            try {
                instanceClass = getClassByName(conf, className);
            } catch (ClassNotFoundException e) {
                LOG.error("Can't find class " + className, e);
                throw new IOException("Can't find class " + className, e);
            }
        } else {
            instanceClass = CODE_TO_CLASS.get(b);
        }
        if (Writable.class.isAssignableFrom(instanceClass)) {
            Writable writable = WritableFactories.newInstance(instanceClass, conf);
            try {
                writable.readFields(in);
            } catch (Exception e) {
                LOG.error("Error in readFields", e);
                throw new IOException("Error in readFields", e);
            }
            instance = writable;
            if (instanceClass == NullInstance.class) {
                // null
                declaredClass = ((NullInstance) instance).declaredClass;
                instance = null;
            }
        } else {
            int length = in.readInt();
            byte[] objectBytes = new byte[length];
            in.readFully(objectBytes);
            ByteArrayInputStream bis = null;
            ObjectInputStream ois = null;
            try {
                bis = new ByteArrayInputStream(objectBytes);
                ois = new ObjectInputStream(bis);
                instance = ois.readObject();
            } catch (ClassNotFoundException e) {
                LOG.error("Class not found when attempting to deserialize object", e);
                throw new IOException("Class not found when attempting to " + "deserialize object", e);
            } finally {
                if (bis != null)
                    bis.close();
                if (ois != null)
                    ois.close();
            }
        }
    }
    if (objectWritable != null) {
        // store values
        objectWritable.declaredClass = declaredClass;
        objectWritable.instance = instance;
    }
    return instance;
}
Also used : ArrayList(java.util.ArrayList) Writable(org.apache.hadoop.io.Writable) MapWritable(org.apache.hadoop.io.MapWritable) ObjectWritable(org.apache.hadoop.io.ObjectWritable) IOException(java.io.IOException) InvocationTargetException(java.lang.reflect.InvocationTargetException) IOException(java.io.IOException) ByteArrayInputStream(java.io.ByteArrayInputStream) List(java.util.List) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) ClientProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos) ObjectInputStream(java.io.ObjectInputStream)

Example 2 with ObjectWritable

use of org.apache.hadoop.io.ObjectWritable in project cdap by caskdata.

the class DatasetSerDe method deserialize.

@Override
public Object deserialize(Writable writable) throws SerDeException {
    ObjectWritable objectWritable = (ObjectWritable) writable;
    Object obj = objectWritable.get();
    try {
        return deserializer.deserialize(obj);
    } catch (Throwable t) {
        LOG.error("Unable to deserialize object {}.", obj, t);
        throw new SerDeException("Unable to deserialize an object.", t);
    }
}
Also used : ObjectWritable(org.apache.hadoop.io.ObjectWritable) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 3 with ObjectWritable

use of org.apache.hadoop.io.ObjectWritable in project hadoop-pcap by RIPE-NCC.

the class PcapDeserializer method deserialize.

@Override
public Object deserialize(Writable w) throws SerDeException {
    ObjectWritable obj = (ObjectWritable) w;
    Packet packet = (Packet) obj.get();
    for (int i = 0; i < numColumns; i++) {
        String columName = columnNames.get(i);
        Object value = packet.get(columName);
        row.set(i, value);
    }
    return row;
}
Also used : Packet(net.ripe.hadoop.pcap.packet.Packet) ObjectWritable(org.apache.hadoop.io.ObjectWritable)

Example 4 with ObjectWritable

use of org.apache.hadoop.io.ObjectWritable in project cdap by caskdata.

the class StreamSerDe method deserialize.

@Override
public Object deserialize(Writable writable) throws SerDeException {
    // The writable should always contains a StreamEvent object provided by the StreamRecordReader
    ObjectWritable objectWritable = (ObjectWritable) writable;
    StreamEvent streamEvent = (StreamEvent) objectWritable.get();
    // timestamp and headers are always guaranteed to be first.
    List<Object> event = Lists.newArrayList();
    event.add(streamEvent.getTimestamp());
    event.add(streamEvent.getHeaders());
    try {
        // The format should always format the stream event into a record.
        event.addAll(deserializer.translateRecord(streamFormat.read(streamEvent)));
        return event;
    } catch (Throwable t) {
        LOG.info("Unable to format the stream body.", t);
        throw new SerDeException("Unable to format the stream body.", t);
    }
}
Also used : StreamEvent(co.cask.cdap.api.flow.flowlet.StreamEvent) ObjectWritable(org.apache.hadoop.io.ObjectWritable) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 5 with ObjectWritable

use of org.apache.hadoop.io.ObjectWritable in project cdap by caskdata.

the class TaggedWritable method setConf.

@Override
public void setConf(Configuration conf) {
    this.conf = conf;
    recordWritable = new ObjectWritable();
    // ObjectWritable does not set conf while reading fields
    recordWritable.setConf(conf);
}
Also used : ObjectWritable(org.apache.hadoop.io.ObjectWritable)

Aggregations

ObjectWritable (org.apache.hadoop.io.ObjectWritable)8 SerDeException (org.apache.hadoop.hive.serde2.SerDeException)3 IOException (java.io.IOException)2 MapWritable (org.apache.hadoop.io.MapWritable)2 Text (org.apache.hadoop.io.Text)2 Writable (org.apache.hadoop.io.Writable)2 StreamEvent (co.cask.cdap.api.flow.flowlet.StreamEvent)1 ByteArrayInputStream (java.io.ByteArrayInputStream)1 ObjectInputStream (java.io.ObjectInputStream)1 InvocationTargetException (java.lang.reflect.InvocationTargetException)1 LocalDate (java.time.LocalDate)1 LocalDateTime (java.time.LocalDateTime)1 ArrayList (java.util.ArrayList)1 List (java.util.List)1 Packet (net.ripe.hadoop.pcap.packet.Packet)1 Scan (org.apache.hadoop.hbase.client.Scan)1 ClientProtos (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos)1 Date (org.apache.hadoop.hive.common.type.Date)1 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)1 Timestamp (org.apache.hadoop.hive.common.type.Timestamp)1