use of org.objectweb.asm.ClassVisitor in project drill by axbaretto.
the class MergeAdapter method getMergedClass.
public static MergedClassResult getMergedClass(final ClassSet set, final byte[] precompiledClass, ClassNode generatedClass, final boolean scalarReplace) {
if (verifyBytecode) {
if (!AsmUtil.isClassBytesOk(logger, "precompiledClass", precompiledClass)) {
throw new IllegalStateException("Problem found in precompiledClass");
}
if ((generatedClass != null) && !AsmUtil.isClassOk(logger, "generatedClass", generatedClass)) {
throw new IllegalStateException("Problem found in generatedClass");
}
}
/*
* Setup adapters for merging, remapping class names and class writing. This is done in
* reverse order of how they will be evaluated.
*/
final RemapClasses re = new RemapClasses(set);
try {
if (scalarReplace && generatedClass != null) {
if (logger.isDebugEnabled()) {
AsmUtil.logClass(logger, "generated " + set.generated.dot, generatedClass);
}
final ClassNode generatedMerged = new ClassNode();
ClassVisitor mergeGenerator = generatedMerged;
if (verifyBytecode) {
mergeGenerator = new DrillCheckClassAdapter(CompilationConfig.ASM_API_VERSION, new CheckClassVisitorFsm(CompilationConfig.ASM_API_VERSION, generatedMerged), true);
}
/*
* Even though we're effectively transforming-creating a new class in mergeGenerator,
* there's no way to pass in ClassWriter.COMPUTE_MAXS, which would save us from having
* to figure out stack size increases on our own. That gets handled by the
* InstructionModifier (from inside ValueHolderReplacement > ScalarReplacementNode).
*/
generatedClass.accept(new ValueHolderReplacementVisitor(mergeGenerator, verifyBytecode));
if (verifyBytecode) {
if (!AsmUtil.isClassOk(logger, "generatedMerged", generatedMerged)) {
throw new IllegalStateException("Problem found with generatedMerged");
}
}
generatedClass = generatedMerged;
}
final ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_FRAMES);
ClassVisitor writerVisitor = writer;
if (verifyBytecode) {
writerVisitor = new DrillCheckClassAdapter(CompilationConfig.ASM_API_VERSION, new CheckClassVisitorFsm(CompilationConfig.ASM_API_VERSION, writerVisitor), true);
}
ClassVisitor remappingAdapter = new RemappingClassAdapter(writerVisitor, re);
if (verifyBytecode) {
remappingAdapter = new DrillCheckClassAdapter(CompilationConfig.ASM_API_VERSION, new CheckClassVisitorFsm(CompilationConfig.ASM_API_VERSION, remappingAdapter), true);
}
ClassVisitor visitor = remappingAdapter;
if (generatedClass != null) {
visitor = new MergeAdapter(set, remappingAdapter, generatedClass);
}
ClassReader tReader = new ClassReader(precompiledClass);
tReader.accept(visitor, ClassReader.SKIP_FRAMES);
byte[] outputClass = writer.toByteArray();
if (logger.isDebugEnabled()) {
AsmUtil.logClassFromBytes(logger, "merged " + set.generated.dot, outputClass);
}
return new MergedClassResult(outputClass, re.getInnerClasses());
} catch (Error | RuntimeException e) {
logger.error("Failure while merging classes.", e);
AsmUtil.logClass(logger, "generatedClass", generatedClass);
throw e;
}
}
use of org.objectweb.asm.ClassVisitor in project cdap by caskdata.
the class SparkClassRewriter method rewriteSetProperties.
/**
* Rewrites a class by rewriting all calls to {@link System#setProperty(String, String)} to
* {@link SparkRuntimeEnv#setProperty(String, String)}.
*
* @param byteCodeStream {@link InputStream} for reading in the original bytecode.
* @return the rewritten bytecode
*/
private byte[] rewriteSetProperties(InputStream byteCodeStream) throws IOException {
final Type systemType = Type.getType(System.class);
ClassReader cr = new ClassReader(byteCodeStream);
ClassWriter cw = new ClassWriter(0);
cr.accept(new ClassVisitor(Opcodes.ASM5, cw) {
@Override
public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions);
return new MethodVisitor(Opcodes.ASM5, mv) {
@Override
public void visitMethodInsn(int opcode, String owner, String name, String desc, boolean itf) {
// If we see a call to System.setProperty, change it to SparkRuntimeEnv.setProperty
if (opcode == Opcodes.INVOKESTATIC && name.equals("setProperty") && owner.equals(systemType.getInternalName())) {
super.visitMethodInsn(opcode, SPARK_RUNTIME_ENV_TYPE.getInternalName(), name, desc, false);
} else {
super.visitMethodInsn(opcode, owner, name, desc, itf);
}
}
};
}
}, ClassReader.EXPAND_FRAMES);
return cw.toByteArray();
}
use of org.objectweb.asm.ClassVisitor in project cdap by caskdata.
the class SparkClassRewriter method rewriteConstructor.
/**
* Rewrites the constructors who don't delegate to other constructor with the given {@link ConstructorRewriter}.
*
* @param classType type of the class to be defined
* @param byteCodeStream {@link InputStream} for reading the original bytecode of the class
* @param rewriter a {@link ConstructorRewriter} for rewriting the constructor
* @return the rewritten bytecode
*/
private byte[] rewriteConstructor(final Type classType, InputStream byteCodeStream, final ConstructorRewriter rewriter) throws IOException {
ClassReader cr = new ClassReader(byteCodeStream);
ClassWriter cw = new ClassWriter(0);
cr.accept(new ClassVisitor(Opcodes.ASM5, cw) {
@Override
public MethodVisitor visitMethod(int access, final String name, final String desc, String signature, String[] exceptions) {
// Call super so that the method signature is registered with the ClassWriter (parent)
MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions);
// We only attempt to rewrite constructor
if (!"<init>".equals(name)) {
return mv;
}
return new AdviceAdapter(Opcodes.ASM5, mv, access, name, desc) {
boolean calledThis;
@Override
public void visitMethodInsn(int opcode, String owner, String name, String desc, boolean itf) {
// See if in this constructor it is calling other constructor (this(..)).
calledThis = calledThis || (opcode == Opcodes.INVOKESPECIAL && Type.getObjectType(owner).equals(classType) && name.equals("<init>") && Type.getReturnType(desc).equals(Type.VOID_TYPE));
super.visitMethodInsn(opcode, owner, name, desc, itf);
}
@Override
protected void onMethodEnter() {
if (calledThis) {
// For constructors that call this(), we don't need rewrite
return;
}
rewriter.onMethodEnter(name, desc, this);
}
@Override
protected void onMethodExit(int opcode) {
if (calledThis) {
// For constructors that call this(), we don't need rewrite
return;
}
// Add a call to SparkContextCache.setContext() for the normal method return path
if (opcode == RETURN) {
rewriter.onMethodExit(name, desc, this);
}
}
};
}
}, ClassReader.EXPAND_FRAMES);
return cw.toByteArray();
}
use of org.objectweb.asm.ClassVisitor in project cdap by caskdata.
the class SparkClassRewriter method rewritePythonRunner.
/**
* Rewrites the PythonRunner.main() method to wrap it with call to SparkRuntimeUtils.initSparkMain() method on
* enter and cancel on exit. Also, wrap the PythonRunner.main() call with a try block to catch the
* {@code SparkUserAppException} into a {@link RuntimeException} to avoid Spark calling System.exit in
* {@link SparkSubmit}.
*/
private byte[] rewritePythonRunner(InputStream byteCodeStream) throws IOException {
ClassReader cr = new ClassReader(byteCodeStream);
ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_MAXS);
// Intercept the static void main(String[] args) method.
final Method mainMethod = new Method("main", Type.VOID_TYPE, new Type[] { Type.getType(String[].class) });
cr.accept(new ClassVisitor(Opcodes.ASM5, cw) {
@Override
public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions);
if (!mainMethod.equals(new Method(name, desc)) || !Modifier.isStatic(access)) {
return mv;
}
// }
return new AdviceAdapter(Opcodes.ASM5, mv, access, name, desc) {
final Type sparkUserAppExceptionType = Type.getObjectType("org/apache/spark/SparkUserAppException");
final Type cancellableType = Type.getObjectType("org/apache/twill/common/Cancellable");
final Label tryLabel = newLabel();
final Label tryEndLabel = newLabel();
final Label catchLabel = newLabel();
final Label finallyLabel = newLabel();
int cancellable;
@Override
protected void onMethodEnter() {
cancellable = newLocal(cancellableType);
invokeStatic(SPARK_RUNTIME_UTILS_TYPE, new Method("initSparkMain", cancellableType, EMPTY_ARGS));
storeLocal(cancellable);
// try {
visitTryCatchBlock(tryLabel, tryEndLabel, catchLabel, sparkUserAppExceptionType.getInternalName());
visitLabel(tryLabel);
}
@Override
protected void onMethodExit(int opcode) {
// } catch (SparkUserAppException e) {
// throw new RuntimeException(e);
visitLabel(tryEndLabel);
goTo(finallyLabel);
visitLabel(catchLabel);
int exception = newLocal(sparkUserAppExceptionType);
storeLocal(exception);
newInstance(Type.getType(RuntimeException.class));
dup();
loadLocal(exception);
invokeConstructor(Type.getType(RuntimeException.class), Methods.getMethod(void.class, "<init>", Throwable.class));
throwException();
// } finally {
// cancellable.cancel()
// }
visitLabel(finallyLabel);
loadLocal(cancellable);
invokeInterface(cancellableType, new Method("cancel", Type.VOID_TYPE, EMPTY_ARGS));
}
};
}
}, ClassReader.EXPAND_FRAMES);
return cw.toByteArray();
}
use of org.objectweb.asm.ClassVisitor in project cdap by caskdata.
the class SparkClassRewriter method determineAkkaDispatcherReturnType.
/**
* Find the return type of the ActorSystem.dispatcher() method. It is ExecutionContextExecutor in
* Akka 2.3 (Spark 1.2+) and ExecutionContext in Akka 2.2 (Spark < 1.2, which CDAP doesn't support,
* however the Spark 1.5 in CDH 5.6. still has Akka 2.2, instead of 2.3).
*
* @return the return type of the ActorSystem.dispatcher() method or {@code null} if no such method
*/
@Nullable
private Type determineAkkaDispatcherReturnType() {
URL resource = resourceLookup.apply("akka/actor/ActorSystem.class");
if (resource == null) {
return null;
}
try (InputStream is = resource.openStream()) {
final AtomicReference<Type> result = new AtomicReference<>();
ClassReader cr = new ClassReader(is);
cr.accept(new ClassVisitor(Opcodes.ASM5) {
@Override
public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
if (name.equals("dispatcher") && Type.getArgumentTypes(desc).length == 0) {
// Expected to be either ExecutionContext (akka 2.2, only in CDH spark)
// or ExecutionContextExecutor (akka 2.3, for open source, HDP spark).
Type returnType = Type.getReturnType(desc);
if (returnType.equals(EXECUTION_CONTEXT_TYPE) || returnType.equals(EXECUTION_CONTEXT_EXECUTOR_TYPE)) {
result.set(returnType);
} else {
LOG.warn("Unsupported return type of ActorSystem.dispatcher(): {}", returnType.getClassName());
}
}
return super.visitMethod(access, name, desc, signature, exceptions);
}
}, ClassReader.SKIP_DEBUG | ClassReader.SKIP_CODE | ClassReader.SKIP_FRAMES);
return result.get();
} catch (IOException e) {
LOG.warn("Failed to determine ActorSystem dispatcher() return type.", e);
return null;
}
}
Aggregations