use of org.apache.ignite.internal.processors.hadoop.HadoopClassLoader in project ignite by apache.
the class IgniteHadoopIgfsSecondaryFileSystem method start.
/** {@inheritDoc} */
@Override
public void start() throws IgniteException {
HadoopClassLoader ldr = ctx.hadoopHelper().commonClassLoader();
ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(ldr);
try {
target = HadoopDelegateUtils.secondaryFileSystemDelegate(ldr, this);
target.start();
} finally {
HadoopCommonUtils.restoreContextClassLoader(oldLdr);
}
}
use of org.apache.ignite.internal.processors.hadoop.HadoopClassLoader in project ignite by apache.
the class HadoopSnappyTest method testSnappy.
/**
* Checks Snappy codec usage.
*
* @throws Exception On error.
*/
public void testSnappy() throws Throwable {
// Run Snappy test in default class loader:
checkSnappy();
// Run the same in several more class loaders simulating jobs and tasks:
for (int i = 0; i < 2; i++) {
ClassLoader hadoopClsLdr = new HadoopClassLoader(null, "cl-" + i, null, new HadoopHelperImpl());
Class<?> cls = (Class) Class.forName(HadoopSnappyTest.class.getName(), true, hadoopClsLdr);
assertEquals(hadoopClsLdr, cls.getClassLoader());
U.invoke(cls, null, "checkSnappy");
}
}
use of org.apache.ignite.internal.processors.hadoop.HadoopClassLoader in project ignite by apache.
the class HadoopJobTracker method start.
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override
public void start(final HadoopContext ctx) throws IgniteCheckedException {
super.start(ctx);
busyLock = new GridSpinReadWriteLock();
evtProcSvc = Executors.newFixedThreadPool(1);
assert jobCls == null;
HadoopClassLoader ldr = ctx.kernalContext().hadoopHelper().commonClassLoader();
try {
jobCls = (Class<HadoopJobEx>) ldr.loadClass(HadoopCommonUtils.JOB_CLS_NAME);
} catch (Exception ioe) {
throw new IgniteCheckedException("Failed to load job class [class=" + HadoopCommonUtils.JOB_CLS_NAME + ']', ioe);
}
}
use of org.apache.ignite.internal.processors.hadoop.HadoopClassLoader in project ignite by apache.
the class HadoopV2Job method getTaskContext.
/** {@inheritDoc} */
@SuppressWarnings({ "unchecked", "MismatchedQueryAndUpdateOfCollection" })
@Override
public HadoopTaskContext getTaskContext(HadoopTaskInfo info) throws IgniteCheckedException {
T2<HadoopTaskType, Integer> locTaskId = new T2<>(info.type(), info.taskNumber());
GridFutureAdapter<HadoopTaskContext> fut = ctxs.get(locTaskId);
if (fut != null)
return fut.get();
GridFutureAdapter<HadoopTaskContext> old = ctxs.putIfAbsent(locTaskId, fut = new GridFutureAdapter<>());
if (old != null)
return old.get();
Class<? extends HadoopTaskContext> cls = taskCtxClsPool.poll();
try {
if (cls == null) {
// If there is no pooled class, then load new one.
// Note that the classloader identified by the task it was initially created for,
// but later it may be reused for other tasks.
HadoopClassLoader ldr = sharedClsLdr != null ? sharedClsLdr : createClassLoader(HadoopClassLoader.nameForTask(info, false));
cls = (Class<? extends HadoopTaskContext>) ldr.loadClass(HadoopV2TaskContext.class.getName());
fullCtxClsQueue.add(cls);
}
Constructor<?> ctr = cls.getConstructor(HadoopTaskInfo.class, HadoopJobEx.class, HadoopJobId.class, UUID.class, DataInput.class);
if (jobConfData == null)
synchronized (jobConf) {
if (jobConfData == null) {
ByteArrayOutputStream buf = new ByteArrayOutputStream();
jobConf.write(new DataOutputStream(buf));
jobConfData = buf.toByteArray();
}
}
HadoopTaskContext res = (HadoopTaskContext) ctr.newInstance(info, this, jobId, locNodeId, new DataInputStream(new ByteArrayInputStream(jobConfData)));
fut.onDone(res);
return res;
} catch (Throwable e) {
IgniteCheckedException te = transformException(e);
fut.onDone(te);
if (e instanceof Error)
throw (Error) e;
throw te;
}
}
Aggregations