use of org.apache.hadoop.io.ObjectWritable in project hbase by apache.
the class HbaseObjectWritableFor96Migration method readObject.
/**
* Read a {@link Writable}, {@link String}, primitive type, or an array of
* the preceding.
* @param in
* @param objectWritable
* @param conf
* @return the object
* @throws IOException
*/
@SuppressWarnings("unchecked")
static Object readObject(DataInput in, HbaseObjectWritableFor96Migration objectWritable, Configuration conf) throws IOException {
Class<?> declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in));
Object instance;
if (declaredClass.isPrimitive()) {
// primitive types
if (declaredClass == Boolean.TYPE) {
// boolean
instance = Boolean.valueOf(in.readBoolean());
} else if (declaredClass == Character.TYPE) {
// char
instance = Character.valueOf(in.readChar());
} else if (declaredClass == Byte.TYPE) {
// byte
instance = Byte.valueOf(in.readByte());
} else if (declaredClass == Short.TYPE) {
// short
instance = Short.valueOf(in.readShort());
} else if (declaredClass == Integer.TYPE) {
// int
instance = Integer.valueOf(in.readInt());
} else if (declaredClass == Long.TYPE) {
// long
instance = Long.valueOf(in.readLong());
} else if (declaredClass == Float.TYPE) {
// float
instance = Float.valueOf(in.readFloat());
} else if (declaredClass == Double.TYPE) {
// double
instance = Double.valueOf(in.readDouble());
} else if (declaredClass == Void.TYPE) {
// void
instance = null;
} else {
throw new IllegalArgumentException("Not a primitive: " + declaredClass);
}
} else if (declaredClass.isArray()) {
// array
if (declaredClass.equals(byte[].class)) {
instance = Bytes.readByteArray(in);
} else {
int length = in.readInt();
instance = Array.newInstance(declaredClass.getComponentType(), length);
for (int i = 0; i < length; i++) {
Array.set(instance, i, readObject(in, conf));
}
}
} else if (declaredClass.equals(Array.class)) {
//an array not declared in CLASS_TO_CODE
Class<?> componentType = readClass(conf, in);
int length = in.readInt();
instance = Array.newInstance(componentType, length);
for (int i = 0; i < length; i++) {
Array.set(instance, i, readObject(in, conf));
}
} else if (List.class.isAssignableFrom(declaredClass)) {
// List
int length = in.readInt();
instance = new ArrayList(length);
for (int i = 0; i < length; i++) {
((ArrayList) instance).add(readObject(in, conf));
}
} else if (declaredClass == String.class) {
// String
instance = Text.readString(in);
} else if (declaredClass.isEnum()) {
// enum
instance = Enum.valueOf((Class<? extends Enum>) declaredClass, Text.readString(in));
} else if (declaredClass == Message.class) {
String className = Text.readString(in);
try {
declaredClass = getClassByName(conf, className);
instance = tryInstantiateProtobuf(declaredClass, in);
} catch (ClassNotFoundException e) {
LOG.error("Can't find class " + className, e);
throw new IOException("Can't find class " + className, e);
}
} else if (Scan.class.isAssignableFrom(declaredClass)) {
int length = in.readInt();
byte[] scanBytes = new byte[length];
in.readFully(scanBytes);
ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder();
ProtobufUtil.mergeFrom(scanProto, scanBytes);
instance = ProtobufUtil.toScan(scanProto.build());
} else {
// Writable or Serializable
Class instanceClass = null;
int b = (byte) WritableUtils.readVInt(in);
if (b == NOT_ENCODED) {
String className = Text.readString(in);
if ("org.apache.hadoop.hbase.regionserver.wal.HLog$Entry".equals(className)) {
className = Entry.class.getName();
}
try {
instanceClass = getClassByName(conf, className);
} catch (ClassNotFoundException e) {
LOG.error("Can't find class " + className, e);
throw new IOException("Can't find class " + className, e);
}
} else {
instanceClass = CODE_TO_CLASS.get(b);
}
if (Writable.class.isAssignableFrom(instanceClass)) {
Writable writable = WritableFactories.newInstance(instanceClass, conf);
try {
writable.readFields(in);
} catch (Exception e) {
LOG.error("Error in readFields", e);
throw new IOException("Error in readFields", e);
}
instance = writable;
if (instanceClass == NullInstance.class) {
// null
declaredClass = ((NullInstance) instance).declaredClass;
instance = null;
}
} else {
int length = in.readInt();
byte[] objectBytes = new byte[length];
in.readFully(objectBytes);
ByteArrayInputStream bis = null;
ObjectInputStream ois = null;
try {
bis = new ByteArrayInputStream(objectBytes);
ois = new ObjectInputStream(bis);
instance = ois.readObject();
} catch (ClassNotFoundException e) {
LOG.error("Class not found when attempting to deserialize object", e);
throw new IOException("Class not found when attempting to " + "deserialize object", e);
} finally {
if (bis != null)
bis.close();
if (ois != null)
ois.close();
}
}
}
if (objectWritable != null) {
// store values
objectWritable.declaredClass = declaredClass;
objectWritable.instance = instance;
}
return instance;
}
use of org.apache.hadoop.io.ObjectWritable in project cdap by caskdata.
the class DatasetSerDe method deserialize.
@Override
public Object deserialize(Writable writable) throws SerDeException {
ObjectWritable objectWritable = (ObjectWritable) writable;
Object obj = objectWritable.get();
try {
return deserializer.deserialize(obj);
} catch (Throwable t) {
LOG.error("Unable to deserialize object {}.", obj, t);
throw new SerDeException("Unable to deserialize an object.", t);
}
}
use of org.apache.hadoop.io.ObjectWritable in project hadoop-pcap by RIPE-NCC.
the class PcapDeserializer method deserialize.
@Override
public Object deserialize(Writable w) throws SerDeException {
ObjectWritable obj = (ObjectWritable) w;
Packet packet = (Packet) obj.get();
for (int i = 0; i < numColumns; i++) {
String columName = columnNames.get(i);
Object value = packet.get(columName);
row.set(i, value);
}
return row;
}
use of org.apache.hadoop.io.ObjectWritable in project cdap by caskdata.
the class StreamSerDe method deserialize.
@Override
public Object deserialize(Writable writable) throws SerDeException {
// The writable should always contains a StreamEvent object provided by the StreamRecordReader
ObjectWritable objectWritable = (ObjectWritable) writable;
StreamEvent streamEvent = (StreamEvent) objectWritable.get();
// timestamp and headers are always guaranteed to be first.
List<Object> event = Lists.newArrayList();
event.add(streamEvent.getTimestamp());
event.add(streamEvent.getHeaders());
try {
// The format should always format the stream event into a record.
event.addAll(deserializer.translateRecord(streamFormat.read(streamEvent)));
return event;
} catch (Throwable t) {
LOG.info("Unable to format the stream body.", t);
throw new SerDeException("Unable to format the stream body.", t);
}
}
use of org.apache.hadoop.io.ObjectWritable in project cdap by caskdata.
the class TaggedWritable method setConf.
@Override
public void setConf(Configuration conf) {
this.conf = conf;
recordWritable = new ObjectWritable();
// ObjectWritable does not set conf while reading fields
recordWritable.setConf(conf);
}
Aggregations