use of org.apache.ignite.internal.processors.hadoop.HadoopSplitWrapper in project ignite by apache.
the class HadoopUtils method wrapSplit.
/**
* Wraps native split.
*
* @param id Split ID.
* @param split Split.
* @param hosts Hosts.
* @throws IOException If failed.
*/
public static HadoopSplitWrapper wrapSplit(int id, Object split, String[] hosts) throws IOException {
ByteArrayOutputStream arr = new ByteArrayOutputStream();
ObjectOutput out = new ObjectOutputStream(arr);
assert split instanceof Writable;
((Writable) split).write(out);
out.flush();
return new HadoopSplitWrapper(id, split.getClass().getName(), arr.toByteArray(), hosts);
}
use of org.apache.ignite.internal.processors.hadoop.HadoopSplitWrapper in project ignite by apache.
the class HadoopSplitWrapperSelfTest method testSerialization.
/**
* Tests serialization of wrapper and the wrapped native split.
* @throws Exception If fails.
*/
public void testSerialization() throws Exception {
FileSplit nativeSplit = new FileSplit(new Path("/path/to/file"), 100, 500, new String[] { "host1", "host2" });
assertEquals("/path/to/file:100+500", nativeSplit.toString());
HadoopSplitWrapper split = HadoopUtils.wrapSplit(10, nativeSplit, nativeSplit.getLocations());
assertEquals("[host1, host2]", Arrays.toString(split.hosts()));
ByteArrayOutputStream buf = new ByteArrayOutputStream();
ObjectOutput out = new ObjectOutputStream(buf);
out.writeObject(split);
ObjectInput in = new ObjectInputStream(new ByteArrayInputStream(buf.toByteArray()));
final HadoopSplitWrapper res = (HadoopSplitWrapper) in.readObject();
assertEquals("/path/to/file:100+500", HadoopUtils.unwrapSplit(res).toString());
GridTestUtils.assertThrows(log, new Callable<Object>() {
@Override
public Object call() throws Exception {
res.hosts();
return null;
}
}, AssertionError.class, null);
}
Aggregations