Search in sources :

Example 1 with UncaughtExceptionHandler

use of co.cask.cdap.common.logging.common.UncaughtExceptionHandler in project cdap by caskdata.

the class AbstractProgramTwillRunnable method initialize.

@Override
public void initialize(TwillContext context) {
    name = context.getSpecification().getName();
    LOG.info("Initializing runnable: " + name);
    Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler());
    System.setSecurityManager(new RunnableSecurityManager(System.getSecurityManager()));
    // Install the JUL to SLF4J Bridge
    SLF4JBridgeHandler.install();
    runLatch = new CountDownLatch(1);
    coreServices = new ArrayList<>();
    try {
        CommandLine cmdLine = parseArgs(context.getApplicationArguments());
        // Loads configurations
        hConf = new Configuration();
        hConf.clear();
        hConf.addResource(new File(cmdLine.getOptionValue(RunnableOptions.HADOOP_CONF_FILE)).toURI().toURL());
        UserGroupInformation.setConfiguration(hConf);
        cConf = CConfiguration.create(new File(cmdLine.getOptionValue(RunnableOptions.CDAP_CONF_FILE)));
        programOpts = createProgramOptions(cmdLine, context, context.getSpecification().getConfigs());
        // This impersonation info is added in PropertiesResolver#getSystemProperties
        // if kerberos is enabled we expect the principal to be provided in the program options as we
        // need it to be used later in ExploreClient to make request. If kerberos is disabled this will be null
        String principal = programOpts.getArguments().getOption(ProgramOptionConstants.PRINCIPAL);
        ProgramId programId = GSON.fromJson(cmdLine.getOptionValue(RunnableOptions.PROGRAM_ID), ProgramId.class);
        String instanceId = programOpts.getArguments().getOption(ProgramOptionConstants.INSTANCE_ID);
        String runId = programOpts.getArguments().getOption(ProgramOptionConstants.RUN_ID);
        Injector injector = Guice.createInjector(createModule(context, programId, runId, instanceId, principal));
        coreServices.add(injector.getInstance(ZKClientService.class));
        coreServices.add(injector.getInstance(KafkaClientService.class));
        coreServices.add(injector.getInstance(BrokerService.class));
        coreServices.add(injector.getInstance(MetricsCollectionService.class));
        coreServices.add(injector.getInstance(StreamCoordinatorClient.class));
        // Initialize log appender
        logAppenderInitializer = injector.getInstance(LogAppenderInitializer.class);
        logAppenderInitializer.initialize();
        // Create the ProgramRunner
        programRunner = createProgramRunner(injector);
        try {
            Location programJarLocation = Locations.toLocation(new File(cmdLine.getOptionValue(RunnableOptions.JAR)));
            ApplicationSpecification appSpec = readAppSpec(new File(cmdLine.getOptionValue(RunnableOptions.APP_SPEC_FILE)));
            program = Programs.create(cConf, programRunner, new ProgramDescriptor(programId, appSpec), programJarLocation, new File(cmdLine.getOptionValue(RunnableOptions.EXPANDED_JAR)));
        } catch (IOException e) {
            throw Throwables.propagate(e);
        }
        coreServices.add(new ProgramRunnableResourceReporter(program.getId(), injector.getInstance(MetricsCollectionService.class), context));
        LOG.info("Runnable initialized: {}", name);
    } catch (Throwable t) {
        LOG.error(t.getMessage(), t);
        throw Throwables.propagate(t);
    }
}
Also used : ApplicationSpecification(co.cask.cdap.api.app.ApplicationSpecification) KafkaClientService(org.apache.twill.kafka.client.KafkaClientService) CConfiguration(co.cask.cdap.common.conf.CConfiguration) Configuration(org.apache.hadoop.conf.Configuration) MetricsCollectionService(co.cask.cdap.api.metrics.MetricsCollectionService) StreamCoordinatorClient(co.cask.cdap.data.stream.StreamCoordinatorClient) IOException(java.io.IOException) CountDownLatch(java.util.concurrent.CountDownLatch) ProgramId(co.cask.cdap.proto.id.ProgramId) CommandLine(org.apache.commons.cli.CommandLine) LogAppenderInitializer(co.cask.cdap.logging.appender.LogAppenderInitializer) ZKClientService(org.apache.twill.zookeeper.ZKClientService) Injector(com.google.inject.Injector) ProgramDescriptor(co.cask.cdap.app.program.ProgramDescriptor) UncaughtExceptionHandler(co.cask.cdap.common.logging.common.UncaughtExceptionHandler) File(java.io.File) BrokerService(org.apache.twill.kafka.client.BrokerService) Location(org.apache.twill.filesystem.Location)

Example 2 with UncaughtExceptionHandler

use of co.cask.cdap.common.logging.common.UncaughtExceptionHandler in project cdap by caskdata.

the class AbstractMasterTwillRunnable method initialize.

@Override
public final void initialize(TwillContext context) {
    Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler());
    super.initialize(context);
    name = context.getSpecification().getName();
    LOG.info("Initializing runnable {}", name);
    Map<String, String> configs = context.getSpecification().getConfigs();
    try {
        // Load configuration
        hConf = new Configuration();
        hConf.clear();
        hConf.addResource(new File(configs.get("hConf")).toURI().toURL());
        UserGroupInformation.setConfiguration(hConf);
        cConf = CConfiguration.create(new File(configs.get("cConf")));
        LOG.debug("{} cConf {}", name, cConf);
        LOG.debug("{} HBase conf {}", name, hConf);
        Injector injector = doInit(context);
        services = Lists.newArrayList();
        // Add common base services
        services.add(injector.getInstance(ZKClientService.class));
        services.add(injector.getInstance(KafkaClientService.class));
        services.add(injector.getInstance(BrokerService.class));
        services.add(injector.getInstance(MetricsCollectionService.class));
        addServices(services);
        Preconditions.checkArgument(!services.isEmpty(), "Should have at least one service");
        LOG.info("Runnable initialized {}", name);
    } catch (Throwable t) {
        throw Throwables.propagate(t);
    }
}
Also used : KafkaClientService(org.apache.twill.kafka.client.KafkaClientService) CConfiguration(co.cask.cdap.common.conf.CConfiguration) Configuration(org.apache.hadoop.conf.Configuration) ZKClientService(org.apache.twill.zookeeper.ZKClientService) MetricsCollectionService(co.cask.cdap.api.metrics.MetricsCollectionService) Injector(com.google.inject.Injector) UncaughtExceptionHandler(co.cask.cdap.common.logging.common.UncaughtExceptionHandler) File(java.io.File) BrokerService(org.apache.twill.kafka.client.BrokerService)

Example 3 with UncaughtExceptionHandler

use of co.cask.cdap.common.logging.common.UncaughtExceptionHandler in project cdap by caskdata.

the class DaemonMain method doMain.

/**
   * The main method. It simply call methods in the same sequence
   * as if the program is started by jsvc.
   */
protected void doMain(final String[] args) throws Exception {
    init(args);
    final CountDownLatch shutdownLatch = new CountDownLatch(1);
    Runtime.getRuntime().addShutdownHook(new Thread() {

        @Override
        public void run() {
            try {
                try {
                    DaemonMain.this.stop();
                } finally {
                    try {
                        DaemonMain.this.destroy();
                    } finally {
                        shutdownLatch.countDown();
                    }
                }
            } catch (Throwable t) {
                LOG.error("Exception when shutting down: " + t.getMessage(), t);
            }
        }
    });
    start();
    // Set uncaught exception handler after startup, this is so that if startup throws exception then we
    // want it to be logged as error (the handler logs it as debug)
    Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler());
    shutdownLatch.await();
}
Also used : CountDownLatch(java.util.concurrent.CountDownLatch) UncaughtExceptionHandler(co.cask.cdap.common.logging.common.UncaughtExceptionHandler)

Example 4 with UncaughtExceptionHandler

use of co.cask.cdap.common.logging.common.UncaughtExceptionHandler in project cdap by caskdata.

the class MapReduceContainerLauncher method launch.

/**
   * Launches the given main class. The main class will be loaded through the {@link MapReduceClassLoader}.
   *
   * @param mainClassName the main class to launch
   * @param args          arguments for the main class
   */
@SuppressWarnings("unused")
public static void launch(String mainClassName, String[] args) throws Exception {
    Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler());
    ClassLoader systemClassLoader = ClassLoader.getSystemClassLoader();
    List<URL> urls = ClassLoaders.getClassLoaderURLs(systemClassLoader, new ArrayList<URL>());
    // Remove the URL that contains the given main classname to avoid infinite recursion.
    // This is needed because we generate a class with the same main classname in order to intercept the main()
    // method call from the container launch script.
    URL resource = systemClassLoader.getResource(mainClassName.replace('.', '/') + ".class");
    if (resource == null) {
        throw new IllegalStateException("Failed to find resource for main class " + mainClassName);
    }
    if (!urls.remove(ClassLoaders.getClassPathURL(mainClassName, resource))) {
        throw new IllegalStateException("Failed to remove main class resource " + resource);
    }
    // Create a MainClassLoader for dataset rewrite
    URL[] classLoaderUrls = urls.toArray(new URL[urls.size()]);
    ClassLoader mainClassLoader = new MainClassLoader(classLoaderUrls, systemClassLoader.getParent());
    // Install the JUL to SLF4J Bridge
    try {
        mainClassLoader.loadClass(SLF4JBridgeHandler.class.getName()).getDeclaredMethod("install").invoke(null);
    } catch (Exception e) {
        // Log the error and continue
        LOG.warn("Failed to invoke SLF4JBridgeHandler.install() required for jul-to-slf4j bridge", e);
    }
    ClassLoaders.setContextClassLoader(mainClassLoader);
    // Creates the MapReduceClassLoader. It has to be loaded from the MainClassLoader.
    try {
        final ClassLoader classLoader = (ClassLoader) mainClassLoader.loadClass(MapReduceClassLoader.class.getName()).newInstance();
        Runtime.getRuntime().addShutdownHook(new Thread() {

            @Override
            public void run() {
                if (classLoader instanceof AutoCloseable) {
                    try {
                        ((AutoCloseable) classLoader).close();
                    } catch (Exception e) {
                        System.err.println("Failed to close ClassLoader " + classLoader);
                        e.printStackTrace();
                    }
                }
            }
        });
        Thread.currentThread().setContextClassLoader(classLoader);
        // Setup logging and stdout/stderr redirect
        // Invoke MapReduceClassLoader.getTaskContextProvider()
        classLoader.getClass().getDeclaredMethod("getTaskContextProvider").invoke(classLoader);
        // Invoke StandardOutErrorRedirector.redirectToLogger()
        classLoader.loadClass("co.cask.cdap.common.logging.StandardOutErrorRedirector").getDeclaredMethod("redirectToLogger", String.class).invoke(null, mainClassName);
        Class<?> mainClass = classLoader.loadClass(mainClassName);
        Method mainMethod = mainClass.getMethod("main", String[].class);
        mainMethod.setAccessible(true);
        LOG.info("Launch main class {}.main({})", mainClassName, Arrays.toString(args));
        mainMethod.invoke(null, new Object[] { args });
        LOG.info("Main method returned {}", mainClassName);
    } catch (Throwable t) {
        // LOG the exception since this exception will be propagated back to JVM
        // and kill the main thread (hence the JVM process).
        // If we don't log it here as ERROR, it will be logged by UncaughtExceptionHandler as DEBUG level
        LOG.error("Exception raised when calling {}.main(String[]) method", mainClassName, t);
        throw t;
    }
}
Also used : MapReduceClassLoader(co.cask.cdap.internal.app.runtime.batch.MapReduceClassLoader) Method(java.lang.reflect.Method) URL(java.net.URL) SLF4JBridgeHandler(org.slf4j.bridge.SLF4JBridgeHandler) MapReduceClassLoader(co.cask.cdap.internal.app.runtime.batch.MapReduceClassLoader) MainClassLoader(co.cask.cdap.common.app.MainClassLoader) UncaughtExceptionHandler(co.cask.cdap.common.logging.common.UncaughtExceptionHandler) MainClassLoader(co.cask.cdap.common.app.MainClassLoader)

Example 5 with UncaughtExceptionHandler

use of co.cask.cdap.common.logging.common.UncaughtExceptionHandler in project cdap by caskdata.

the class SparkContainerLauncher method launch.

/**
   * Launches the given main class. The main class will be loaded through the {@link SparkContainerClassLoader}.
   *
   * @param mainClassName the main class to launch
   * @param args arguments for the main class
   */
@SuppressWarnings("unused")
public static void launch(String mainClassName, String[] args) throws Exception {
    Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler());
    ClassLoader systemClassLoader = ClassLoader.getSystemClassLoader();
    Set<URL> urls = ClassLoaders.getClassLoaderURLs(systemClassLoader, new LinkedHashSet<URL>());
    // Remove the URL that contains the given main classname to avoid infinite recursion.
    // This is needed because we generate a class with the same main classname in order to intercept the main()
    // method call from the container launch script.
    urls.remove(getURLByClass(systemClassLoader, mainClassName));
    // Remove the first scala from the set of classpath. This ensure the one from Spark is used for spark
    URL scalaURL = getURLByClass(systemClassLoader, "scala.language");
    Enumeration<URL> resources = systemClassLoader.getResources("scala/language.class");
    // Only remove the scala if there are more than one in the classpath
    int count = 0;
    while (resources.hasMoreElements()) {
        resources.nextElement();
        count++;
    }
    if (count > 1) {
        urls.remove(scalaURL);
    }
    // First create a FilterClassLoader that only loads JVM and kafka classes from the system classloader
    // This is to isolate the scala library from children
    ClassLoader parentClassLoader = new FilterClassLoader(systemClassLoader, KAFKA_FILTER);
    // Creates the SparkRunnerClassLoader for class rewriting and it will be used for the rest of the execution.
    // Use the extension classloader as the parent instead of the system classloader because
    // Spark classes are in the system classloader which we want to rewrite.
    ClassLoader classLoader = new SparkContainerClassLoader(urls.toArray(new URL[urls.size()]), parentClassLoader);
    // Sets the context classloader and launch the actual Spark main class.
    Thread.currentThread().setContextClassLoader(classLoader);
    // Install the JUL to SLF4J Bridge
    try {
        classLoader.loadClass(SLF4JBridgeHandler.class.getName()).getDeclaredMethod("install").invoke(null);
    } catch (Exception e) {
        // Log the error and continue
        LOG.warn("Failed to invoke SLF4JBridgeHandler.install() required for jul-to-slf4j bridge", e);
    }
    try {
        // Get the SparkRuntimeContext to initialize all necessary services and logging context
        // Need to do it using the SparkRunnerClassLoader through reflection.
        classLoader.loadClass(SparkRuntimeContextProvider.class.getName()).getMethod("get").invoke(null);
        // Invoke StandardOutErrorRedirector.redirectToLogger()
        classLoader.loadClass(StandardOutErrorRedirector.class.getName()).getDeclaredMethod("redirectToLogger", String.class).invoke(null, mainClassName);
        // which causes executor logs attempt to write to driver log directory
        if (System.getProperty("spark.executorEnv.CDAP_LOG_DIR") != null) {
            System.setProperty("spark.executorEnv.CDAP_LOG_DIR", "<LOG_DIR>");
        }
        LOG.info("Launch main class {}.main({})", mainClassName, Arrays.toString(args));
        classLoader.loadClass(mainClassName).getMethod("main", String[].class).invoke(null, new Object[] { args });
        LOG.info("Main method returned {}", mainClassName);
    } catch (Throwable t) {
        // LOG the exception since this exception will be propagated back to JVM
        // and kill the main thread (hence the JVM process).
        // If we don't log it here as ERROR, it will be logged by UncaughtExceptionHandler as DEBUG level
        LOG.error("Exception raised when calling {}.main(String[]) method", mainClassName, t);
        throw t;
    }
}
Also used : FilterClassLoader(co.cask.cdap.common.lang.FilterClassLoader) URL(java.net.URL) SLF4JBridgeHandler(org.slf4j.bridge.SLF4JBridgeHandler) SparkRuntimeContextProvider(co.cask.cdap.app.runtime.spark.SparkRuntimeContextProvider) SparkContainerClassLoader(co.cask.cdap.app.runtime.spark.classloader.SparkContainerClassLoader) StandardOutErrorRedirector(co.cask.cdap.common.logging.StandardOutErrorRedirector) SparkContainerClassLoader(co.cask.cdap.app.runtime.spark.classloader.SparkContainerClassLoader) FilterClassLoader(co.cask.cdap.common.lang.FilterClassLoader) UncaughtExceptionHandler(co.cask.cdap.common.logging.common.UncaughtExceptionHandler)

Aggregations

UncaughtExceptionHandler (co.cask.cdap.common.logging.common.UncaughtExceptionHandler)5 MetricsCollectionService (co.cask.cdap.api.metrics.MetricsCollectionService)2 CConfiguration (co.cask.cdap.common.conf.CConfiguration)2 Injector (com.google.inject.Injector)2 File (java.io.File)2 URL (java.net.URL)2 CountDownLatch (java.util.concurrent.CountDownLatch)2 Configuration (org.apache.hadoop.conf.Configuration)2 BrokerService (org.apache.twill.kafka.client.BrokerService)2 KafkaClientService (org.apache.twill.kafka.client.KafkaClientService)2 ZKClientService (org.apache.twill.zookeeper.ZKClientService)2 SLF4JBridgeHandler (org.slf4j.bridge.SLF4JBridgeHandler)2 ApplicationSpecification (co.cask.cdap.api.app.ApplicationSpecification)1 ProgramDescriptor (co.cask.cdap.app.program.ProgramDescriptor)1 SparkRuntimeContextProvider (co.cask.cdap.app.runtime.spark.SparkRuntimeContextProvider)1 SparkContainerClassLoader (co.cask.cdap.app.runtime.spark.classloader.SparkContainerClassLoader)1 MainClassLoader (co.cask.cdap.common.app.MainClassLoader)1 FilterClassLoader (co.cask.cdap.common.lang.FilterClassLoader)1 StandardOutErrorRedirector (co.cask.cdap.common.logging.StandardOutErrorRedirector)1 StreamCoordinatorClient (co.cask.cdap.data.stream.StreamCoordinatorClient)1