use of org.objectweb.asm.ClassWriter in project cdap by caskdata.
the class SparkClassRewriter method rewriteAkkaRemoting.
/**
* Rewrites the akka.remote.Remoting by rewriting usages of scala.concurrent.ExecutionContext.Implicits.global
* to Remoting.system().dispatcher() in the shutdown() method for fixing the Akka thread/permgen leak bug in
* https://github.com/akka/akka/issues/17729.
*
* @return the rewritten bytes or {@code null} if no rewriting is needed
*/
@Nullable
private byte[] rewriteAkkaRemoting(InputStream byteCodeStream) throws IOException {
final Type dispatcherReturnType = determineAkkaDispatcherReturnType();
if (dispatcherReturnType == null) {
LOG.warn("Failed to determine ActorSystem.dispatcher() return type. " + "No rewriting of akka.remote.Remoting class. ClassLoader leakage might happen in SDK.");
return null;
}
ClassReader cr = new ClassReader(byteCodeStream);
ClassWriter cw = new ClassWriter(0);
cr.accept(new ClassVisitor(Opcodes.ASM5, cw) {
@Override
public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
// Call super so that the method signature is registered with the ClassWriter (parent)
MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions);
// Only rewrite the shutdown() method
if (!"shutdown".equals(name)) {
return mv;
}
return new MethodVisitor(Opcodes.ASM5, mv) {
@Override
public void visitMethodInsn(int opcode, String owner, String name, String desc, boolean itf) {
// ()Lscala/concurrent/ExecutionContextExecutor;
if (opcode == Opcodes.INVOKEVIRTUAL && "global".equals(name) && "scala/concurrent/ExecutionContext$Implicits$".equals(owner) && Type.getMethodDescriptor(EXECUTION_CONTEXT_EXECUTOR_TYPE).equals(desc)) {
// Discard the GETSTATIC result from the stack by popping it
super.visitInsn(Opcodes.POP);
// Make the call "import system.dispatch", which translate to Java code as
// this.system().dispatcher()
// hence as bytecode
// ALOAD 0 (load this)
// INVOKEVIRTUAL akka/remote/Remoting.system ()Lakka/actor/ExtendedActorSystem;
// INVOKEVIRTUAL akka/actor/ExtendedActorSystem.dispatcher ()Lscala/concurrent/ExecutionContextExecutor;
Type extendedActorSystemType = Type.getObjectType("akka/actor/ExtendedActorSystem");
super.visitVarInsn(Opcodes.ALOAD, 0);
super.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "akka/remote/Remoting", "system", Type.getMethodDescriptor(extendedActorSystemType), false);
super.visitMethodInsn(Opcodes.INVOKEVIRTUAL, extendedActorSystemType.getInternalName(), "dispatcher", Type.getMethodDescriptor(dispatcherReturnType), false);
} else {
// For other instructions, just call parent to deal with it
super.visitMethodInsn(opcode, owner, name, desc, itf);
}
}
};
}
}, ClassReader.EXPAND_FRAMES);
return cw.toByteArray();
}
use of org.objectweb.asm.ClassWriter in project cdap by caskdata.
the class SparkClassRewriter method rewriteClient.
/**
* Defines the org.apache.spark.deploy.yarn.Client class with rewriting of the createConfArchive method to
* workaround the SPARK-13441 bug.
*/
@Nullable
private byte[] rewriteClient(InputStream byteCodeStream) throws IOException {
// We only need to rewrite if listing either HADOOP_CONF_DIR or YARN_CONF_DIR return null.
boolean needRewrite = false;
for (String env : ImmutableList.of("HADOOP_CONF_DIR", "YARN_CONF_DIR")) {
String value = System.getenv(env);
if (value != null) {
File path = new File(value);
if (path.isDirectory() && path.listFiles() == null) {
needRewrite = true;
break;
}
}
}
// If rewrite is not needed
if (!needRewrite) {
return null;
}
ClassReader cr = new ClassReader(byteCodeStream);
ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_MAXS);
cr.accept(new ClassVisitor(Opcodes.ASM5, cw) {
@Override
public MethodVisitor visitMethod(final int access, final String name, final String desc, String signature, String[] exceptions) {
MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions);
// Only rewrite the createConfArchive method
if (!"createConfArchive".equals(name)) {
return mv;
}
Type fileType = Type.getType(File.class);
Type stringType = Type.getType(String.class);
// Check if it's a recognizable return type.
// Spark 1.5+ return type is File
boolean isReturnFile = Type.getReturnType(desc).equals(fileType);
Type optionType = Type.getObjectType("scala/Option");
if (!isReturnFile) {
// Spark 1.4 return type is Option<File>
if (!Type.getReturnType(desc).equals(optionType)) {
// Unknown type. Not going to modify the code.
return mv;
}
}
// Generate this for Spark 1.5+
// return SparkRuntimeUtils.createConfArchive(this.sparkConf, SPARK_CONF_FILE, LOCALIZED_CONF_DIR,
// File.createTempFile(LOCALIZED_CONF_DIR, ".zip"));
// Generate this for Spark 1.4
// return Option.apply(SparkRuntimeUtils.createConfArchive(this.sparkConf, SPARK_CONF_FILE, LOCALIZED_CONF_DIR,
// File.createTempFile(LOCALIZED_CONF_DIR, ".zip")));
GeneratorAdapter mg = new GeneratorAdapter(mv, access, name, desc);
// Push the four parameters for the SparkRuntimeUtils.createConfArchive method
// load this.sparkConf to the stack
mg.loadThis();
mg.getField(Type.getObjectType("org/apache/spark/deploy/yarn/Client"), "sparkConf", SPARK_CONF_TYPE);
mg.visitLdcInsn(SPARK_CONF_FILE);
mg.visitLdcInsn(LOCALIZED_CONF_DIR);
// Call the File.createTempFile(LOCALIZED_CONF_DIR, ".zip") to generate the forth parameter
mg.visitLdcInsn(LOCALIZED_CONF_DIR);
mg.visitLdcInsn(".zip");
mg.invokeStatic(fileType, new Method("createTempFile", fileType, new Type[] { stringType, stringType }));
// call SparkRuntimeUtils.createConfArchive, return a File and leave it in stack
mg.invokeStatic(SPARK_RUNTIME_UTILS_TYPE, new Method("createConfArchive", fileType, new Type[] { SPARK_CONF_TYPE, stringType, stringType, fileType }));
if (isReturnFile) {
// Spark 1.5+ return type is File, hence just return the File from the stack
mg.returnValue();
mg.endMethod();
} else {
// Spark 1.4 return type is Option<File>
// return Option.apply(<file from stack>);
// where the file is actually just popped from the stack
mg.invokeStatic(optionType, new Method("apply", optionType, new Type[] { Type.getType(Object.class) }));
mg.checkCast(optionType);
mg.returnValue();
mg.endMethod();
}
return null;
}
}, ClassReader.EXPAND_FRAMES);
return cw.toByteArray();
}
use of org.objectweb.asm.ClassWriter in project cdap by caskdata.
the class DatumWriterGenerator method generate.
/**
* Generates a {@link DatumWriter} class for encoding data of the given output type with the given schema.
* @param outputType Type information of the output data type.
* @param schema Schema of the output data type.
* @return A {@link co.cask.cdap.internal.asm.ClassDefinition} that contains generated class information.
*/
ClassDefinition generate(TypeToken<?> outputType, Schema schema) {
classWriter = new ClassWriter(ClassWriter.COMPUTE_FRAMES);
preservedClasses = Lists.newArrayList();
TypeToken<?> interfaceType = getInterfaceType(outputType);
// Generate the class
String className = getClassName(interfaceType, schema);
classType = Type.getObjectType(className);
classWriter.visit(Opcodes.V1_8, Opcodes.ACC_PUBLIC + Opcodes.ACC_FINAL, className, Signatures.getClassSignature(interfaceType), Type.getInternalName(Object.class), new String[] { Type.getInternalName(interfaceType.getRawType()) });
// Static schema hash field, for verification
classWriter.visitField(Opcodes.ACC_PRIVATE + Opcodes.ACC_STATIC + Opcodes.ACC_FINAL, "SCHEMA_HASH", Type.getDescriptor(String.class), null, schema.getSchemaHash().toString()).visitEnd();
// Schema field
classWriter.visitField(Opcodes.ACC_PRIVATE + Opcodes.ACC_FINAL, "schema", Type.getDescriptor(Schema.class), null, null).visitEnd();
// Encode method
generateEncode(outputType, schema);
// Constructor
generateConstructor();
ClassDefinition classDefinition = new ClassDefinition(classWriter.toByteArray(), className, preservedClasses);
// End DEBUG block
return classDefinition;
}
use of org.objectweb.asm.ClassWriter in project cdap by caskdata.
the class FieldAccessorGenerator method generate.
ClassDefinition generate(Class<?> classType, Field field, boolean publicOnly) {
String name = String.format("%s$GeneratedAccessor%s", classType.getName(), field.getName());
if (name.startsWith("java.") || name.startsWith("javax.")) {
name = "co.cask.cdap." + name;
publicOnly = true;
}
this.className = name.replace('.', '/');
if (publicOnly) {
isPrivate = !Modifier.isPublic(field.getModifiers()) || !Modifier.isPublic(field.getDeclaringClass().getModifiers());
} else {
isPrivate = Modifier.isPrivate(field.getModifiers()) || Modifier.isPrivate(field.getDeclaringClass().getModifiers());
}
// Generate the class
classWriter = new ClassWriter(ClassWriter.COMPUTE_FRAMES);
classWriter.visit(Opcodes.V1_8, Opcodes.ACC_PUBLIC + Opcodes.ACC_FINAL, className, null, Type.getInternalName(AbstractFieldAccessor.class), new String[0]);
generateConstructor(field);
generateGetter(field);
generateSetter(field);
classWriter.visitEnd();
ClassDefinition classDefinition = new ClassDefinition(classWriter.toByteArray(), className);
// End DEBUG block
return classDefinition;
}
use of org.objectweb.asm.ClassWriter in project cdap by caskdata.
the class DatasetClassRewriter method rewriteClass.
@Override
public byte[] rewriteClass(String className, InputStream input) throws IOException {
ClassReader cr = new ClassReader(input);
ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_FRAMES);
cr.accept(new DatasetClassVisitor(className, cw), ClassReader.EXPAND_FRAMES);
return cw.toByteArray();
}
Aggregations