Search in sources :

Example 1 with JarHelper

use of org.apache.apex.common.util.JarHelper in project apex-core by apache.

the class StramClient method findJars.

public static LinkedHashSet<String> findJars(LogicalPlan dag, Class<?>[] defaultClasses) {
    List<Class<?>> jarClasses = new ArrayList<>();
    for (String className : dag.getClassNames()) {
        try {
            Class<?> clazz = Thread.currentThread().getContextClassLoader().loadClass(className);
            jarClasses.add(clazz);
        } catch (ClassNotFoundException e) {
            throw new IllegalArgumentException("Failed to load class " + className, e);
        }
    }
    for (Class<?> clazz : Lists.newArrayList(jarClasses)) {
        // process class and super classes (super does not require deploy annotation)
        for (Class<?> c = clazz; c != Object.class && c != null; c = c.getSuperclass()) {
            //LOG.debug("checking " + c);
            jarClasses.add(c);
            jarClasses.addAll(Arrays.asList(c.getInterfaces()));
        }
    }
    jarClasses.addAll(Arrays.asList(defaultClasses));
    if (dag.isDebug()) {
        LOG.debug("Deploy dependencies: {}", jarClasses);
    }
    // avoid duplicates
    LinkedHashSet<String> localJarFiles = new LinkedHashSet<>();
    JarHelper jarHelper = new JarHelper();
    for (Class<?> jarClass : jarClasses) {
        String jar = jarHelper.getJar(jarClass);
        if (jar != null) {
            localJarFiles.add(jar);
        }
    }
    String libJarsPath = dag.getValue(Context.DAGContext.LIBRARY_JARS);
    if (!StringUtils.isEmpty(libJarsPath)) {
        String[] libJars = StringUtils.splitByWholeSeparator(libJarsPath, LIB_JARS_SEP);
        localJarFiles.addAll(Arrays.asList(libJars));
    }
    LOG.info("Local jar file dependencies: " + localJarFiles);
    return localJarFiles;
}
Also used : LinkedHashSet(java.util.LinkedHashSet) JarHelper(org.apache.apex.common.util.JarHelper) ArrayList(java.util.ArrayList)

Example 2 with JarHelper

use of org.apache.apex.common.util.JarHelper in project apex-core by apache.

the class StramMiniClusterTest method testSetupShutdown.

@Test
public void testSetupShutdown() throws Exception {
    GetClusterNodesRequest request = Records.newRecord(GetClusterNodesRequest.class);
    ClientRMService clientRMService = yarnCluster.getResourceManager().getClientRMService();
    GetClusterNodesResponse response = clientRMService.getClusterNodes(request);
    List<NodeReport> nodeReports = response.getNodeReports();
    LOG.info("{}", nodeReports);
    for (NodeReport nr : nodeReports) {
        LOG.info("Node: {}", nr.getNodeId());
        LOG.info("Total memory: {}", nr.getCapability());
        LOG.info("Used memory: {}", nr.getUsed());
        LOG.info("Number containers: {}", nr.getNumContainers());
    }
    JarHelper jarHelper = new JarHelper();
    LOG.info("engine jar: {}", jarHelper.getJar(StreamingAppMaster.class));
    LOG.info("engine test jar: {}", jarHelper.getJar(StramMiniClusterTest.class));
    // create test application
    Properties dagProps = new Properties();
    // input module (ensure shutdown works while windows are generated)
    dagProps.put(StreamingApplication.APEX_PREFIX + "operator.numGen.classname", TestGeneratorInputOperator.class.getName());
    dagProps.put(StreamingApplication.APEX_PREFIX + "operator.numGen.maxTuples", "1");
    dagProps.put(StreamingApplication.APEX_PREFIX + "operator.module1.classname", GenericTestOperator.class.getName());
    dagProps.put(StreamingApplication.APEX_PREFIX + "operator.module2.classname", GenericTestOperator.class.getName());
    dagProps.put(StreamingApplication.APEX_PREFIX + "stream.fromNumGen.source", "numGen.outport");
    dagProps.put(StreamingApplication.APEX_PREFIX + "stream.fromNumGen.sinks", "module1.inport1");
    dagProps.put(StreamingApplication.APEX_PREFIX + "stream.n1n2.source", "module1.outport1");
    dagProps.put(StreamingApplication.APEX_PREFIX + "stream.n1n2.sinks", "module2.inport1");
    dagProps.setProperty(StreamingApplication.APEX_PREFIX + LogicalPlan.MASTER_MEMORY_MB.getName(), "128");
    dagProps.setProperty(StreamingApplication.APEX_PREFIX + LogicalPlan.CONTAINER_JVM_OPTIONS.getName(), "-Dlog4j.properties=custom_log4j.properties");
    dagProps.setProperty(StreamingApplication.APEX_PREFIX + "operator.*." + OperatorContext.MEMORY_MB.getName(), "64");
    dagProps.setProperty(StreamingApplication.APEX_PREFIX + "operator.*." + OperatorContext.VCORES.getName(), "1");
    dagProps.setProperty(StreamingApplication.APEX_PREFIX + "operator.*.port.*." + Context.PortContext.BUFFER_MEMORY_MB.getName(), "32");
    dagProps.setProperty(StreamingApplication.APEX_PREFIX + LogicalPlan.DEBUG.getName(), "true");
    LOG.info("dag properties: {}", dagProps);
    LOG.info("Initializing Client");
    LogicalPlanConfiguration tb = new LogicalPlanConfiguration(conf);
    tb.addFromProperties(dagProps, null);
    LogicalPlan dag = createDAG(tb);
    Configuration yarnConf = new Configuration(yarnCluster.getConfig());
    StramClient client = new StramClient(yarnConf, dag);
    try {
        client.start();
        if (StringUtils.isBlank(System.getenv("JAVA_HOME"))) {
            // JAVA_HOME not set in the yarn mini cluster
            client.javaCmd = "java";
        }
        LOG.info("Running client");
        client.startApplication();
        boolean result = client.monitorApplication();
        LOG.info("Client run completed. Result=" + result);
        Assert.assertTrue(result);
    } finally {
        client.stop();
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) LogicalPlanConfiguration(com.datatorrent.stram.plan.logical.LogicalPlanConfiguration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) GetClusterNodesResponse(org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse) TestGeneratorInputOperator(com.datatorrent.stram.engine.TestGeneratorInputOperator) Properties(java.util.Properties) ClientRMService(org.apache.hadoop.yarn.server.resourcemanager.ClientRMService) LogicalPlanConfiguration(com.datatorrent.stram.plan.logical.LogicalPlanConfiguration) JarHelper(org.apache.apex.common.util.JarHelper) GenericTestOperator(com.datatorrent.stram.engine.GenericTestOperator) LogicalPlan(com.datatorrent.stram.plan.logical.LogicalPlan) GetClusterNodesRequest(org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest) NodeReport(org.apache.hadoop.yarn.api.records.NodeReport) Test(org.junit.Test)

Aggregations

JarHelper (org.apache.apex.common.util.JarHelper)2 GenericTestOperator (com.datatorrent.stram.engine.GenericTestOperator)1 TestGeneratorInputOperator (com.datatorrent.stram.engine.TestGeneratorInputOperator)1 LogicalPlan (com.datatorrent.stram.plan.logical.LogicalPlan)1 LogicalPlanConfiguration (com.datatorrent.stram.plan.logical.LogicalPlanConfiguration)1 ArrayList (java.util.ArrayList)1 LinkedHashSet (java.util.LinkedHashSet)1 Properties (java.util.Properties)1 Configuration (org.apache.hadoop.conf.Configuration)1 GetClusterNodesRequest (org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest)1 GetClusterNodesResponse (org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse)1 NodeReport (org.apache.hadoop.yarn.api.records.NodeReport)1 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)1 ClientRMService (org.apache.hadoop.yarn.server.resourcemanager.ClientRMService)1 Test (org.junit.Test)1