use of org.apache.hadoop.io.serializer.SerializationFactory in project hadoop by apache.
the class MapTask method getSplitDetails.
@SuppressWarnings("unchecked")
private <T> T getSplitDetails(Path file, long offset) throws IOException {
FileSystem fs = file.getFileSystem(conf);
FSDataInputStream inFile = fs.open(file);
inFile.seek(offset);
String className = StringInterner.weakIntern(Text.readString(inFile));
Class<T> cls;
try {
cls = (Class<T>) conf.getClassByName(className);
} catch (ClassNotFoundException ce) {
IOException wrap = new IOException("Split class " + className + " not found");
wrap.initCause(ce);
throw wrap;
}
SerializationFactory factory = new SerializationFactory(conf);
Deserializer<T> deserializer = (Deserializer<T>) factory.getDeserializer(cls);
deserializer.open(inFile);
T split = deserializer.deserialize(null);
long pos = inFile.getPos();
getCounters().findCounter(TaskCounter.SPLIT_RAW_BYTES).increment(pos - offset);
inFile.close();
return split;
}
use of org.apache.hadoop.io.serializer.SerializationFactory in project hadoop by apache.
the class TaggedInputSplit method readFields.
@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
inputSplitClass = (Class<? extends InputSplit>) readClass(in);
inputFormatClass = (Class<? extends InputFormat<?, ?>>) readClass(in);
mapperClass = (Class<? extends Mapper<?, ?, ?, ?>>) readClass(in);
inputSplit = (InputSplit) ReflectionUtils.newInstance(inputSplitClass, conf);
SerializationFactory factory = new SerializationFactory(conf);
Deserializer deserializer = factory.getDeserializer(inputSplitClass);
deserializer.open((DataInputStream) in);
inputSplit = (InputSplit) deserializer.deserialize(inputSplit);
}
use of org.apache.hadoop.io.serializer.SerializationFactory in project hadoop by apache.
the class TaggedInputSplit method write.
@SuppressWarnings("unchecked")
public void write(DataOutput out) throws IOException {
Text.writeString(out, inputSplitClass.getName());
Text.writeString(out, inputFormatClass.getName());
Text.writeString(out, mapperClass.getName());
SerializationFactory factory = new SerializationFactory(conf);
Serializer serializer = factory.getSerializer(inputSplitClass);
serializer.open((DataOutputStream) out);
serializer.serialize(inputSplit);
}
use of org.apache.hadoop.io.serializer.SerializationFactory in project hadoop by apache.
the class TestWritableJobConf method serDeser.
private <K> K serDeser(K conf) throws Exception {
SerializationFactory factory = new SerializationFactory(CONF);
Serializer<K> serializer = factory.getSerializer(GenericsUtil.getClass(conf));
Deserializer<K> deserializer = factory.getDeserializer(GenericsUtil.getClass(conf));
DataOutputBuffer out = new DataOutputBuffer();
serializer.open(out);
serializer.serialize(conf);
serializer.close();
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), out.getLength());
deserializer.open(in);
K after = deserializer.deserialize(null);
deserializer.close();
return after;
}
use of org.apache.hadoop.io.serializer.SerializationFactory in project hadoop by apache.
the class Chain method configure.
/**
* Configures all the chain elements for the task.
*
* @param jobConf chain job's JobConf.
*/
public void configure(JobConf jobConf) {
String prefix = getPrefix(isMap);
chainJobConf = jobConf;
SerializationFactory serializationFactory = new SerializationFactory(chainJobConf);
int index = jobConf.getInt(prefix + CHAIN_MAPPER_SIZE, 0);
for (int i = 0; i < index; i++) {
Class<? extends Mapper> klass = jobConf.getClass(prefix + CHAIN_MAPPER_CLASS + i, null, Mapper.class);
JobConf mConf = new JobConf(getChainElementConf(jobConf, prefix + CHAIN_MAPPER_CONFIG + i));
Mapper mapper = ReflectionUtils.newInstance(klass, mConf);
mappers.add(mapper);
if (mConf.getBoolean(MAPPER_BY_VALUE, true)) {
mappersKeySerialization.add(serializationFactory.getSerialization(mConf.getClass(MAPPER_OUTPUT_KEY_CLASS, null)));
mappersValueSerialization.add(serializationFactory.getSerialization(mConf.getClass(MAPPER_OUTPUT_VALUE_CLASS, null)));
} else {
mappersKeySerialization.add(null);
mappersValueSerialization.add(null);
}
}
Class<? extends Reducer> klass = jobConf.getClass(prefix + CHAIN_REDUCER_CLASS, null, Reducer.class);
if (klass != null) {
JobConf rConf = new JobConf(getChainElementConf(jobConf, prefix + CHAIN_REDUCER_CONFIG));
reducer = ReflectionUtils.newInstance(klass, rConf);
if (rConf.getBoolean(REDUCER_BY_VALUE, true)) {
reducerKeySerialization = serializationFactory.getSerialization(rConf.getClass(REDUCER_OUTPUT_KEY_CLASS, null));
reducerValueSerialization = serializationFactory.getSerialization(rConf.getClass(REDUCER_OUTPUT_VALUE_CLASS, null));
} else {
reducerKeySerialization = null;
reducerValueSerialization = null;
}
}
}
Aggregations