use of org.apache.hadoop.io.serializer.SerializationFactory in project crunch by cloudera.
the class CrunchInputSplit method readFields.
public void readFields(DataInput in) throws IOException {
nodeIndex = in.readInt();
int extraConfSize = in.readInt();
if (extraConfSize > 0) {
for (int i = 0; i < extraConfSize; i++) {
conf.set(in.readUTF(), in.readUTF());
}
}
inputFormatClass = (Class<? extends InputFormat<?, ?>>) readClass(in);
Class<? extends InputSplit> inputSplitClass = (Class<? extends InputSplit>) readClass(in);
inputSplit = (InputSplit) ReflectionUtils.newInstance(inputSplitClass, conf);
SerializationFactory factory = new SerializationFactory(conf);
Deserializer deserializer = factory.getDeserializer(inputSplitClass);
deserializer.open((DataInputStream) in);
inputSplit = (InputSplit) deserializer.deserialize(inputSplit);
}
use of org.apache.hadoop.io.serializer.SerializationFactory in project crunch by cloudera.
the class CrunchInputSplit method write.
public void write(DataOutput out) throws IOException {
out.writeInt(nodeIndex);
out.writeInt(extraConf.size());
for (Map.Entry<String, String> e : extraConf.entrySet()) {
out.writeUTF(e.getKey());
out.writeUTF(e.getValue());
}
Text.writeString(out, inputFormatClass.getName());
Text.writeString(out, inputSplit.getClass().getName());
SerializationFactory factory = new SerializationFactory(conf);
Serializer serializer = factory.getSerializer(inputSplit.getClass());
serializer.open((DataOutputStream) out);
serializer.serialize(inputSplit);
}
use of org.apache.hadoop.io.serializer.SerializationFactory in project gora by apache.
the class IOUtils method deserialize.
/**
* Deserializes the object in the given data input using
* available Hadoop serializations.
*
* @param conf Hadoop conf.
* @param in data input stream where serialized content is read.
* @param <T> object class type.
* @param obj data object.
* @param objClass object class type.
* @throws IOException occurred while deserializing the byte content.
* @return deserialized object.
*/
public static <T> T deserialize(Configuration conf, DataInput in, T obj, Class<T> objClass) throws IOException {
SerializationFactory serializationFactory = new SerializationFactory(getOrCreateConf(conf));
Deserializer<T> deserializer = serializationFactory.getDeserializer(objClass);
int length = WritableUtils.readVInt(in);
byte[] arr = new byte[length];
in.readFully(arr);
List<ByteBuffer> list = new ArrayList<>();
list.add(ByteBuffer.wrap(arr));
try (ByteBufferInputStream is = new ByteBufferInputStream(list)) {
deserializer.open(is);
T newObj = deserializer.deserialize(obj);
return newObj;
} finally {
if (deserializer != null)
deserializer.close();
}
}
use of org.apache.hadoop.io.serializer.SerializationFactory in project cdap by caskdata.
the class TaggedInputSplit method readFields.
@SuppressWarnings("unchecked")
@Override
public final void readFields(DataInput in) throws IOException {
Class<? extends InputSplit> inputSplitClass = (Class<? extends InputSplit>) readClass(in);
readAdditionalFields(in);
inputSplit = ReflectionUtils.newInstance(inputSplitClass, conf);
SerializationFactory factory = new SerializationFactory(conf);
Deserializer deserializer = factory.getDeserializer(inputSplitClass);
deserializer.open((DataInputStream) in);
inputSplit = (InputSplit) deserializer.deserialize(inputSplit);
}
use of org.apache.hadoop.io.serializer.SerializationFactory in project cdap by caskdata.
the class TaggedInputSplit method write.
@SuppressWarnings("unchecked")
@Override
public final void write(DataOutput out) throws IOException {
Class<? extends InputSplit> inputSplitClass = inputSplit.getClass();
Text.writeString(out, inputSplitClass.getName());
writeAdditionalFields(out);
SerializationFactory factory = new SerializationFactory(conf);
Serializer serializer = factory.getSerializer(inputSplitClass);
serializer.open((DataOutputStream) out);
serializer.serialize(inputSplit);
}
Aggregations