use of org.apache.hadoop.io.serializer.Deserializer in project hadoop by apache.
the class MapTask method getSplitDetails.
@SuppressWarnings("unchecked")
private <T> T getSplitDetails(Path file, long offset) throws IOException {
FileSystem fs = file.getFileSystem(conf);
FSDataInputStream inFile = fs.open(file);
inFile.seek(offset);
String className = StringInterner.weakIntern(Text.readString(inFile));
Class<T> cls;
try {
cls = (Class<T>) conf.getClassByName(className);
} catch (ClassNotFoundException ce) {
IOException wrap = new IOException("Split class " + className + " not found");
wrap.initCause(ce);
throw wrap;
}
SerializationFactory factory = new SerializationFactory(conf);
Deserializer<T> deserializer = (Deserializer<T>) factory.getDeserializer(cls);
deserializer.open(inFile);
T split = deserializer.deserialize(null);
long pos = inFile.getPos();
getCounters().findCounter(TaskCounter.SPLIT_RAW_BYTES).increment(pos - offset);
inFile.close();
return split;
}
use of org.apache.hadoop.io.serializer.Deserializer in project hadoop by apache.
the class TaggedInputSplit method readFields.
@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
inputSplitClass = (Class<? extends InputSplit>) readClass(in);
inputFormatClass = (Class<? extends InputFormat<?, ?>>) readClass(in);
mapperClass = (Class<? extends Mapper<?, ?, ?, ?>>) readClass(in);
inputSplit = (InputSplit) ReflectionUtils.newInstance(inputSplitClass, conf);
SerializationFactory factory = new SerializationFactory(conf);
Deserializer deserializer = factory.getDeserializer(inputSplitClass);
deserializer.open((DataInputStream) in);
inputSplit = (InputSplit) deserializer.deserialize(inputSplit);
}
use of org.apache.hadoop.io.serializer.Deserializer in project crunch by cloudera.
the class CrunchInputSplit method readFields.
public void readFields(DataInput in) throws IOException {
nodeIndex = in.readInt();
int extraConfSize = in.readInt();
if (extraConfSize > 0) {
for (int i = 0; i < extraConfSize; i++) {
conf.set(in.readUTF(), in.readUTF());
}
}
inputFormatClass = (Class<? extends InputFormat<?, ?>>) readClass(in);
Class<? extends InputSplit> inputSplitClass = (Class<? extends InputSplit>) readClass(in);
inputSplit = (InputSplit) ReflectionUtils.newInstance(inputSplitClass, conf);
SerializationFactory factory = new SerializationFactory(conf);
Deserializer deserializer = factory.getDeserializer(inputSplitClass);
deserializer.open((DataInputStream) in);
inputSplit = (InputSplit) deserializer.deserialize(inputSplit);
}
use of org.apache.hadoop.io.serializer.Deserializer in project cdap by caskdata.
the class TaggedInputSplit method readFields.
@SuppressWarnings("unchecked")
@Override
public final void readFields(DataInput in) throws IOException {
Class<? extends InputSplit> inputSplitClass = (Class<? extends InputSplit>) readClass(in);
readAdditionalFields(in);
inputSplit = ReflectionUtils.newInstance(inputSplitClass, conf);
SerializationFactory factory = new SerializationFactory(conf);
Deserializer deserializer = factory.getDeserializer(inputSplitClass);
deserializer.open((DataInputStream) in);
inputSplit = (InputSplit) deserializer.deserialize(inputSplit);
}
use of org.apache.hadoop.io.serializer.Deserializer in project ignite by apache.
the class HadoopV2TaskContext method readExternalSplit.
/**
* @param split External split.
* @return Native input split.
* @throws IgniteCheckedException If failed.
*/
@SuppressWarnings("unchecked")
private Object readExternalSplit(HadoopExternalSplit split) throws IgniteCheckedException {
Path jobDir = new Path(jobConf().get(MRJobConfig.MAPREDUCE_JOB_DIR));
FileSystem fs;
try {
fs = fileSystemForMrUserWithCaching(jobDir.toUri(), jobConf(), fsMap);
} catch (IOException e) {
throw new IgniteCheckedException(e);
}
try (FSDataInputStream in = fs.open(JobSubmissionFiles.getJobSplitFile(jobDir))) {
in.seek(split.offset());
String clsName = Text.readString(in);
Class<?> cls = jobConf().getClassByName(clsName);
assert cls != null;
Serialization serialization = new SerializationFactory(jobConf()).getSerialization(cls);
Deserializer deserializer = serialization.getDeserializer(cls);
deserializer.open(in);
Object res = deserializer.deserialize(null);
deserializer.close();
assert res != null;
return res;
} catch (IOException | ClassNotFoundException e) {
throw new IgniteCheckedException(e);
}
}
Aggregations