use of org.apache.ignite.internal.processors.hadoop.HadoopSerialization in project ignite by apache.
the class HadoopSerializationWrapperSelfTest method testIntWritableSerialization.
/**
* Tests read/write of IntWritable via native WritableSerialization.
* @throws Exception If fails.
*/
public void testIntWritableSerialization() throws Exception {
HadoopSerialization ser = new HadoopSerializationWrapper(new WritableSerialization(), IntWritable.class);
ByteArrayOutputStream buf = new ByteArrayOutputStream();
DataOutput out = new DataOutputStream(buf);
ser.write(out, new IntWritable(3));
ser.write(out, new IntWritable(-5));
assertEquals("[0, 0, 0, 3, -1, -1, -1, -5]", Arrays.toString(buf.toByteArray()));
DataInput in = new DataInputStream(new ByteArrayInputStream(buf.toByteArray()));
assertEquals(3, ((IntWritable) ser.read(in, null)).get());
assertEquals(-5, ((IntWritable) ser.read(in, null)).get());
}
use of org.apache.ignite.internal.processors.hadoop.HadoopSerialization in project ignite by apache.
the class HadoopV2JobSelfTest method testCustomSerializationApplying.
/**
* Tests that {@link HadoopJobEx} provides wrapped serializer if it's set in configuration.
*
* @throws IgniteCheckedException If fails.
*/
public void testCustomSerializationApplying() throws IgniteCheckedException {
JobConf cfg = new JobConf();
cfg.setMapOutputKeyClass(IntWritable.class);
cfg.setMapOutputValueClass(Text.class);
cfg.set(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY, CustomSerialization.class.getName());
HadoopDefaultJobInfo info = createJobInfo(cfg, null);
final UUID uuid = UUID.randomUUID();
HadoopJobId id = new HadoopJobId(uuid, 1);
HadoopJobEx job = info.createJob(HadoopV2Job.class, id, log, null, new HadoopHelperImpl());
HadoopTaskContext taskCtx = job.getTaskContext(new HadoopTaskInfo(HadoopTaskType.MAP, null, 0, 0, null));
HadoopSerialization ser = taskCtx.keySerialization();
assertEquals(HadoopSerializationWrapper.class.getName(), ser.getClass().getName());
DataInput in = new DataInputStream(new ByteArrayInputStream(new byte[0]));
assertEquals(TEST_SERIALIZED_VALUE, ser.read(in, null).toString());
ser = taskCtx.valueSerialization();
assertEquals(HadoopSerializationWrapper.class.getName(), ser.getClass().getName());
assertEquals(TEST_SERIALIZED_VALUE, ser.read(in, null).toString());
}
use of org.apache.ignite.internal.processors.hadoop.HadoopSerialization in project ignite by apache.
the class HadoopSerializationWrapperSelfTest method testIntJavaSerialization.
/**
* Tests read/write of Integer via native JavaleSerialization.
* @throws Exception If fails.
*/
public void testIntJavaSerialization() throws Exception {
HadoopSerialization ser = new HadoopSerializationWrapper(new JavaSerialization(), Integer.class);
ByteArrayOutputStream buf = new ByteArrayOutputStream();
DataOutput out = new DataOutputStream(buf);
ser.write(out, 3);
ser.write(out, -5);
ser.close();
DataInput in = new DataInputStream(new ByteArrayInputStream(buf.toByteArray()));
assertEquals(3, ((Integer) ser.read(in, null)).intValue());
assertEquals(-5, ((Integer) ser.read(in, null)).intValue());
}
use of org.apache.ignite.internal.processors.hadoop.HadoopSerialization in project ignite by apache.
the class HadoopShuffleJob method onDirectShuffleMessage.
/**
* Process shuffle message.
*
* @param src Source.
* @param msg Message.
* @throws IgniteCheckedException Exception.
*/
public void onDirectShuffleMessage(T src, HadoopDirectShuffleMessage msg) throws IgniteCheckedException {
byte[] buf = extractBuffer(msg);
assert buf != null;
int rdc = msg.reducer();
HadoopTaskContext taskCtx = locReducersCtx.get(rdc).get();
HadoopPerformanceCounter perfCntr = HadoopPerformanceCounter.getCounter(taskCtx.counters(), null);
perfCntr.onShuffleMessage(rdc, U.currentTimeMillis());
HadoopMultimap map = getOrCreateMap(locMaps, rdc);
HadoopSerialization keySer = taskCtx.keySerialization();
HadoopSerialization valSer = taskCtx.valueSerialization();
// Add data from message to the map.
try (HadoopMultimap.Adder adder = map.startAdding(taskCtx)) {
HadoopDirectDataInput in = new HadoopDirectDataInput(buf);
Object key = null;
Object val = null;
for (int i = 0; i < msg.count(); i++) {
key = keySer.read(in, key);
val = valSer.read(in, val);
adder.write(key, val);
}
}
if (localShuffleState(src).onShuffleMessage())
sendFinishResponse(src, msg.jobId());
}
Aggregations