Search in sources :

Example 46 with CConfiguration

use of co.cask.cdap.common.conf.CConfiguration in project cdap by caskdata.

the class ExploreServiceTwillRunnable method doInit.

@Override
protected Injector doInit(TwillContext context) {
    setupHive();
    CConfiguration cConf = getCConfiguration();
    Configuration hConf = getConfiguration();
    addResource(hConf, "yarn-site.xml");
    addResource(hConf, "mapred-site.xml");
    addResource(hConf, "hive-site.xml");
    addResource(hConf, "tez-site.xml");
    // Set the host name to the one provided by Twill
    cConf.set(Constants.Explore.SERVER_ADDRESS, context.getHost().getHostName());
    String txClientId = String.format("cdap.service.%s.%d", Constants.Service.EXPLORE_HTTP_USER_SERVICE, context.getInstanceId());
    // NOTE: twill client will try to load all the classes present here - including hive classes but it
    // will fail since Hive classes are not in master classpath, and ignore those classes silently
    injector = createInjector(cConf, hConf, txClientId);
    injector.getInstance(LogAppenderInitializer.class).initialize();
    LoggingContextAccessor.setLoggingContext(new ServiceLoggingContext(NamespaceId.SYSTEM.getNamespace(), Constants.Logging.COMPONENT_NAME, Constants.Service.EXPLORE_HTTP_USER_SERVICE));
    return injector;
}
Also used : LogAppenderInitializer(co.cask.cdap.logging.appender.LogAppenderInitializer) CConfiguration(co.cask.cdap.common.conf.CConfiguration) Configuration(org.apache.hadoop.conf.Configuration) TezConfiguration(org.apache.tez.dag.api.TezConfiguration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) ServiceLoggingContext(co.cask.cdap.common.logging.ServiceLoggingContext) CConfiguration(co.cask.cdap.common.conf.CConfiguration)

Example 47 with CConfiguration

use of co.cask.cdap.common.conf.CConfiguration in project cdap by caskdata.

the class MasterTwillApplication method prepareLocalizeResource.

/**
   * Prepares the resources that need to be localized to service containers.
   *
   * @param tempDir a temporary directory for creating files to be localized
   * @param hConf the hadoop configuration
   * @return a list of extra classpath that need to be added to each container.
   * @throws IOException if failed to prepare localize resources
   */
List<String> prepareLocalizeResource(Path tempDir, Configuration hConf) throws IOException {
    CConfiguration containerCConf = CConfiguration.copy(cConf);
    containerCConf.set(Constants.CFG_LOCAL_DATA_DIR, "data");
    List<String> extraClassPath = new ArrayList<>();
    prepareLogSaverResources(tempDir, containerCConf, runnableLocalizeResources.get(Constants.Service.LOGSAVER), extraClassPath);
    if (cConf.getBoolean(Constants.Explore.EXPLORE_ENABLED)) {
        prepareExploreResources(tempDir, hConf, runnableLocalizeResources.get(Constants.Service.EXPLORE_HTTP_USER_SERVICE), extraClassPath);
    }
    Path cConfPath = saveCConf(containerCConf, Files.createTempFile(tempDir, "cConf", ".xml"));
    Path hConfPath = saveHConf(hConf, Files.createTempFile(tempDir, "hConf", ".xml"));
    for (String service : ALL_SERVICES) {
        Map<String, LocalizeResource> localizeResources = runnableLocalizeResources.get(service);
        localizeResources.put(CCONF_NAME, new LocalizeResource(cConfPath.toFile(), false));
        localizeResources.put(HCONF_NAME, new LocalizeResource(hConfPath.toFile(), false));
    }
    return extraClassPath;
}
Also used : Path(java.nio.file.Path) LocalizeResource(co.cask.cdap.internal.app.runtime.distributed.LocalizeResource) ArrayList(java.util.ArrayList) CConfiguration(co.cask.cdap.common.conf.CConfiguration)

Example 48 with CConfiguration

use of co.cask.cdap.common.conf.CConfiguration in project cdap by caskdata.

the class KafkaServerMain method init.

@Override
public void init(String[] args) {
    CConfiguration cConf = CConfiguration.create();
    String zkConnectStr = cConf.get(Constants.Zookeeper.QUORUM);
    String zkNamespace = cConf.get(KafkaConstants.ConfigKeys.ZOOKEEPER_NAMESPACE_CONFIG);
    if (zkNamespace != null) {
        ZKClientService client = ZKClientService.Builder.of(zkConnectStr).build();
        try {
            Services.startAndWait(client, cConf.getLong(Constants.Zookeeper.CLIENT_STARTUP_TIMEOUT_MILLIS), TimeUnit.MILLISECONDS, String.format("Connection timed out while trying to start ZooKeeper client. Please " + "verify that the ZooKeeper quorum settings are correct in " + "cdap-site.xml. Currently configured as: %s", cConf.get(Constants.Zookeeper.QUORUM)));
            String path = "/" + zkNamespace;
            LOG.info(String.format("Creating zookeeper namespace %s", path));
            ZKOperations.ignoreError(client.create(path, null, CreateMode.PERSISTENT), KeeperException.NodeExistsException.class, path).get();
            client.stopAndWait();
            zkConnectStr = String.format("%s/%s", zkConnectStr, zkNamespace);
        } catch (Exception e) {
            throw Throwables.propagate(e);
        } finally {
            client.stopAndWait();
        }
    }
    kafkaProperties = generateKafkaConfig(cConf);
    int partitions = Integer.parseInt(kafkaProperties.getProperty("num.partitions"), 10);
    Preconditions.checkState(partitions > 0, "Num partitions should be greater than zero.");
    int port = Integer.parseInt(kafkaProperties.getProperty("port"), 10);
    Preconditions.checkState(port > 0, "Port number is invalid.");
    String hostname = kafkaProperties.getProperty("host.name");
    InetAddress address = Networks.resolve(hostname, new InetSocketAddress("localhost", 0).getAddress());
    if (hostname != null) {
        if (address.isAnyLocalAddress()) {
            kafkaProperties.remove("host.name");
            try {
                address = InetAddress.getLocalHost();
            } catch (UnknownHostException e) {
                throw Throwables.propagate(e);
            }
        } else {
            hostname = address.getCanonicalHostName();
            kafkaProperties.setProperty("host.name", hostname);
        }
    }
    if (kafkaProperties.getProperty("broker.id") == null) {
        int brokerId = generateBrokerId(address);
        LOG.info(String.format("Initializing server with broker id %d", brokerId));
        kafkaProperties.setProperty("broker.id", Integer.toString(brokerId));
    }
    if (kafkaProperties.getProperty("zookeeper.connect") == null) {
        kafkaProperties.setProperty("zookeeper.connect", zkConnectStr);
    }
}
Also used : ZKClientService(org.apache.twill.zookeeper.ZKClientService) UnknownHostException(java.net.UnknownHostException) InetSocketAddress(java.net.InetSocketAddress) CConfiguration(co.cask.cdap.common.conf.CConfiguration) InetAddress(java.net.InetAddress) KeeperException(org.apache.zookeeper.KeeperException) UnknownHostException(java.net.UnknownHostException)

Example 49 with CConfiguration

use of co.cask.cdap.common.conf.CConfiguration in project cdap by caskdata.

the class StreamHandlerRunnable method doInit.

@Override
protected Injector doInit(TwillContext context) {
    CConfiguration cConf = getCConfiguration();
    Configuration hConf = getConfiguration();
    // Set the host name to the one provided by Twill
    cConf.set(Constants.Stream.ADDRESS, context.getHost().getHostName());
    // Set the worker threads to number of cores * 2 available
    cConf.setInt(Constants.Stream.WORKER_THREADS, Runtime.getRuntime().availableProcessors() * 2);
    // Set the instance id
    cConf.setInt(Constants.Stream.CONTAINER_INSTANCE_ID, context.getInstanceId());
    String txClientId = String.format("cdap.service.%s.%d", Constants.Service.STREAMS, context.getInstanceId());
    injector = createInjector(cConf, hConf, txClientId);
    injector.getInstance(LogAppenderInitializer.class).initialize();
    LoggingContextAccessor.setLoggingContext(new ServiceLoggingContext(NamespaceId.SYSTEM.getNamespace(), Constants.Logging.COMPONENT_NAME, Constants.Service.STREAMS));
    return injector;
}
Also used : LogAppenderInitializer(co.cask.cdap.logging.appender.LogAppenderInitializer) CConfiguration(co.cask.cdap.common.conf.CConfiguration) Configuration(org.apache.hadoop.conf.Configuration) ServiceLoggingContext(co.cask.cdap.common.logging.ServiceLoggingContext) CConfiguration(co.cask.cdap.common.conf.CConfiguration)

Example 50 with CConfiguration

use of co.cask.cdap.common.conf.CConfiguration in project cdap by caskdata.

the class SparkRuntimeContextProvider method createIfNotExists.

/**
   * Creates a singleton {@link SparkRuntimeContext}.
   * It has assumption on file location that are localized by the SparkRuntimeService.
   */
private static synchronized SparkRuntimeContext createIfNotExists() {
    if (sparkRuntimeContext != null) {
        return sparkRuntimeContext;
    }
    try {
        CConfiguration cConf = createCConf();
        Configuration hConf = createHConf();
        SparkRuntimeContextConfig contextConfig = new SparkRuntimeContextConfig(hConf);
        // Should be yarn only and only for executor node, not the driver node.
        Preconditions.checkState(!contextConfig.isLocal() && Boolean.parseBoolean(System.getenv("SPARK_YARN_MODE")), "SparkContextProvider.getSparkContext should only be called in Spark executor process.");
        // Create the program
        Program program = createProgram(cConf, contextConfig);
        Injector injector = createInjector(cConf, hConf, contextConfig.getProgramId(), contextConfig.getProgramOptions());
        final Service logAppenderService = new LogAppenderService(injector.getInstance(LogAppenderInitializer.class), contextConfig.getProgramOptions());
        final ZKClientService zkClientService = injector.getInstance(ZKClientService.class);
        final KafkaClientService kafkaClientService = injector.getInstance(KafkaClientService.class);
        final MetricsCollectionService metricsCollectionService = injector.getInstance(MetricsCollectionService.class);
        final StreamCoordinatorClient streamCoordinatorClient = injector.getInstance(StreamCoordinatorClient.class);
        // Use the shutdown hook to shutdown services, since this class should only be loaded from System classloader
        // of the spark executor, hence there should be exactly one instance only.
        // The problem with not shutting down nicely is that some logs/metrics might be lost
        Services.chainStart(logAppenderService, zkClientService, kafkaClientService, metricsCollectionService, streamCoordinatorClient);
        Runtime.getRuntime().addShutdownHook(new Thread() {

            @Override
            public void run() {
                // The logger may already been shutdown. Use System.out/err instead
                System.out.println("Shutting SparkClassLoader services");
                Future<List<ListenableFuture<Service.State>>> future = Services.chainStop(logAppenderService, streamCoordinatorClient, metricsCollectionService, kafkaClientService, zkClientService);
                try {
                    List<ListenableFuture<Service.State>> futures = future.get(5, TimeUnit.SECONDS);
                    System.out.println("SparkClassLoader services shutdown completed: " + futures);
                } catch (Exception e) {
                    System.err.println("Exception when shutting down services");
                    e.printStackTrace(System.err);
                }
            }
        });
        // Constructor the DatasetFramework
        DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class);
        WorkflowProgramInfo workflowInfo = contextConfig.getWorkflowProgramInfo();
        DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, contextConfig.getApplicationSpecification());
        // Setup dataset framework context, if required
        if (programDatasetFramework instanceof ProgramContextAware) {
            ProgramRunId programRunId = program.getId().run(ProgramRunners.getRunId(contextConfig.getProgramOptions()));
            ((ProgramContextAware) programDatasetFramework).setContext(new BasicProgramContext(programRunId));
        }
        PluginInstantiator pluginInstantiator = createPluginInstantiator(cConf, contextConfig, program.getClassLoader());
        // Create the context object
        sparkRuntimeContext = new SparkRuntimeContext(contextConfig.getConfiguration(), program, contextConfig.getProgramOptions(), cConf, getHostname(), injector.getInstance(TransactionSystemClient.class), programDatasetFramework, injector.getInstance(DiscoveryServiceClient.class), metricsCollectionService, injector.getInstance(StreamAdmin.class), contextConfig.getWorkflowProgramInfo(), pluginInstantiator, injector.getInstance(SecureStore.class), injector.getInstance(SecureStoreManager.class), injector.getInstance(AuthorizationEnforcer.class), injector.getInstance(AuthenticationContext.class), injector.getInstance(MessagingService.class));
        LoggingContextAccessor.setLoggingContext(sparkRuntimeContext.getLoggingContext());
        return sparkRuntimeContext;
    } catch (Exception e) {
        throw Throwables.propagate(e);
    }
}
Also used : CConfiguration(co.cask.cdap.common.conf.CConfiguration) Configuration(org.apache.hadoop.conf.Configuration) NameMappedDatasetFramework(co.cask.cdap.internal.app.runtime.workflow.NameMappedDatasetFramework) DatasetFramework(co.cask.cdap.data2.dataset2.DatasetFramework) LogAppenderInitializer(co.cask.cdap.logging.appender.LogAppenderInitializer) Injector(com.google.inject.Injector) List(java.util.List) Program(co.cask.cdap.app.program.Program) DefaultProgram(co.cask.cdap.app.program.DefaultProgram) KafkaClientService(org.apache.twill.kafka.client.KafkaClientService) MetricsCollectionService(co.cask.cdap.api.metrics.MetricsCollectionService) MessagingService(co.cask.cdap.messaging.MessagingService) MetricsCollectionService(co.cask.cdap.api.metrics.MetricsCollectionService) AbstractService(com.google.common.util.concurrent.AbstractService) ZKClientService(org.apache.twill.zookeeper.ZKClientService) Service(com.google.common.util.concurrent.Service) KafkaClientService(org.apache.twill.kafka.client.KafkaClientService) StreamCoordinatorClient(co.cask.cdap.data.stream.StreamCoordinatorClient) BasicProgramContext(co.cask.cdap.internal.app.runtime.BasicProgramContext) CConfiguration(co.cask.cdap.common.conf.CConfiguration) InvocationTargetException(java.lang.reflect.InvocationTargetException) MalformedURLException(java.net.MalformedURLException) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException) ZKClientService(org.apache.twill.zookeeper.ZKClientService) WorkflowProgramInfo(co.cask.cdap.internal.app.runtime.workflow.WorkflowProgramInfo) Future(java.util.concurrent.Future) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) PluginInstantiator(co.cask.cdap.internal.app.runtime.plugin.PluginInstantiator) ProgramRunId(co.cask.cdap.proto.id.ProgramRunId) ProgramContextAware(co.cask.cdap.data.ProgramContextAware)

Aggregations

CConfiguration (co.cask.cdap.common.conf.CConfiguration)180 Test (org.junit.Test)52 BeforeClass (org.junit.BeforeClass)46 ConfigModule (co.cask.cdap.common.guice.ConfigModule)40 Injector (com.google.inject.Injector)35 Configuration (org.apache.hadoop.conf.Configuration)32 AbstractModule (com.google.inject.AbstractModule)31 AuthorizationEnforcementModule (co.cask.cdap.security.authorization.AuthorizationEnforcementModule)28 DataSetsModules (co.cask.cdap.data.runtime.DataSetsModules)27 DiscoveryRuntimeModule (co.cask.cdap.common.guice.DiscoveryRuntimeModule)26 AuthenticationContextModules (co.cask.cdap.security.auth.context.AuthenticationContextModules)26 AuthorizationTestModule (co.cask.cdap.security.authorization.AuthorizationTestModule)25 TransactionManager (org.apache.tephra.TransactionManager)23 NonCustomLocationUnitTestModule (co.cask.cdap.common.guice.NonCustomLocationUnitTestModule)22 UnsupportedUGIProvider (co.cask.cdap.security.impersonation.UnsupportedUGIProvider)19 Location (org.apache.twill.filesystem.Location)18 DefaultOwnerAdmin (co.cask.cdap.security.impersonation.DefaultOwnerAdmin)17 SystemDatasetRuntimeModule (co.cask.cdap.data.runtime.SystemDatasetRuntimeModule)16 File (java.io.File)16 ZKClientModule (co.cask.cdap.common.guice.ZKClientModule)14