Search in sources :

Example 11 with PrintStream

use of java.io.PrintStream in project flink by apache.

the class AbstractYarnClusterDescriptor method getClusterDescription.

@Override
public String getClusterDescription() {
    try {
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        PrintStream ps = new PrintStream(baos);
        YarnClient yarnClient = getYarnClient();
        YarnClusterMetrics metrics = yarnClient.getYarnClusterMetrics();
        ps.append("NodeManagers in the ClusterClient " + metrics.getNumNodeManagers());
        List<NodeReport> nodes = yarnClient.getNodeReports(NodeState.RUNNING);
        final String format = "|%-16s |%-16s %n";
        ps.printf("|Property         |Value          %n");
        ps.println("+---------------------------------------+");
        int totalMemory = 0;
        int totalCores = 0;
        for (NodeReport rep : nodes) {
            final Resource res = rep.getCapability();
            totalMemory += res.getMemory();
            totalCores += res.getVirtualCores();
            ps.format(format, "NodeID", rep.getNodeId());
            ps.format(format, "Memory", res.getMemory() + " MB");
            ps.format(format, "vCores", res.getVirtualCores());
            ps.format(format, "HealthReport", rep.getHealthReport());
            ps.format(format, "Containers", rep.getNumContainers());
            ps.println("+---------------------------------------+");
        }
        ps.println("Summary: totalMemory " + totalMemory + " totalCores " + totalCores);
        List<QueueInfo> qInfo = yarnClient.getAllQueues();
        for (QueueInfo q : qInfo) {
            ps.println("Queue: " + q.getQueueName() + ", Current Capacity: " + q.getCurrentCapacity() + " Max Capacity: " + q.getMaximumCapacity() + " Applications: " + q.getApplications().size());
        }
        yarnClient.stop();
        return baos.toString();
    } catch (Exception e) {
        throw new RuntimeException("Couldn't get cluster description", e);
    }
}
Also used : QueueInfo(org.apache.hadoop.yarn.api.records.QueueInfo) PrintStream(java.io.PrintStream) YarnClusterMetrics(org.apache.hadoop.yarn.api.records.YarnClusterMetrics) Resource(org.apache.hadoop.yarn.api.records.Resource) LocalResource(org.apache.hadoop.yarn.api.records.LocalResource) ByteArrayOutputStream(java.io.ByteArrayOutputStream) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) URISyntaxException(java.net.URISyntaxException) InvocationTargetException(java.lang.reflect.InvocationTargetException) IllegalConfigurationException(org.apache.flink.configuration.IllegalConfigurationException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) IOException(java.io.IOException) NodeReport(org.apache.hadoop.yarn.api.records.NodeReport)

Example 12 with PrintStream

use of java.io.PrintStream in project flink by apache.

the class YarnTestBase method startWithArgs.

/**
	 * This method returns once the "startedAfterString" has been seen.
	 */
protected Runner startWithArgs(String[] args, String startedAfterString, RunTypes type) {
    LOG.info("Running with args {}", Arrays.toString(args));
    outContent = new ByteArrayOutputStream();
    errContent = new ByteArrayOutputStream();
    System.setOut(new PrintStream(outContent));
    System.setErr(new PrintStream(errContent));
    final int START_TIMEOUT_SECONDS = 60;
    Runner runner = new Runner(args, type, 0);
    runner.setName("Frontend (CLI/YARN Client) runner thread (startWithArgs()).");
    runner.start();
    for (int second = 0; second < START_TIMEOUT_SECONDS; second++) {
        sleep(1000);
        // check output for correct TaskManager startup.
        if (outContent.toString().contains(startedAfterString) || errContent.toString().contains(startedAfterString)) {
            LOG.info("Found expected output in redirected streams");
            return runner;
        }
        // check if thread died
        if (!runner.isAlive()) {
            sendOutput();
            if (runner.getRunnerError() != null) {
                throw new RuntimeException("Runner failed with exception.", runner.getRunnerError());
            }
            Assert.fail("Runner thread died before the test was finished.");
        }
    }
    sendOutput();
    Assert.fail("During the timeout period of " + START_TIMEOUT_SECONDS + " seconds the " + "expected string did not show up");
    return null;
}
Also used : PrintStream(java.io.PrintStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream)

Example 13 with PrintStream

use of java.io.PrintStream in project flink by apache.

the class CliFrontendYarnAddressConfigurationTest method disableStdOutErr.

@BeforeClass
public static void disableStdOutErr() {
    class NullPrint extends OutputStream {

        @Override
        public void write(int b) {
        }
    }
    PrintStream nullPrinter = new PrintStream(new NullPrint());
    System.setOut(nullPrinter);
    System.setErr(nullPrinter);
    // Unset FLINK_CONF_DIR, as this is a precondition for this test to work properly
    Map<String, String> map = new HashMap<>(System.getenv());
    map.remove(ConfigConstants.ENV_FLINK_CONF_DIR);
    TestBaseUtils.setEnv(map);
}
Also used : PrintStream(java.io.PrintStream) HashMap(java.util.HashMap) OutputStream(java.io.OutputStream) BeforeClass(org.junit.BeforeClass)

Example 14 with PrintStream

use of java.io.PrintStream in project groovy by apache.

the class InspectorTest method testPrint.

public void testPrint() {
    ByteArrayOutputStream bytes = new ByteArrayOutputStream();
    PrintStream printStream = new PrintStream(bytes);
    String ls = System.getProperty("line.separator");
    String[] first = { "a", "b" };
    String[] second = { "x", "y" };
    Object[] memberInfo = { first, second };
    Inspector.print(printStream, memberInfo);
    assertEquals("0:\ta b " + ls + "1:\tx y " + ls, bytes.toString());
    // just for coverage, print to System.out (yuck)
    Inspector.print(memberInfo);
}
Also used : PrintStream(java.io.PrintStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream)

Example 15 with PrintStream

use of java.io.PrintStream in project groovy by apache.

the class GroovyShellTest method testLaunchesJUnitTestSuite.

public void testLaunchesJUnitTestSuite() throws Exception {
    // create a valid (empty) test suite on disk
    String testName = "GroovyShellTestJUnit3Test" + System.currentTimeMillis();
    File testSuite = new File(System.getProperty("java.io.tmpdir"), testName);
    ResourceGroovyMethods.write(testSuite, "import junit.framework.*; \r\n" + "public class " + testName + " extends TestSuite { \r\n" + "    public static Test suite() { \r\n" + "        return new TestSuite(); \r\n" + "    } \r\n" + "} \r\n");
    testSuite.deleteOnExit();
    PrintStream out = System.out;
    System.setOut(new PrintStream(new ByteArrayOutputStream()));
    try {
        // makes this more of an integration test than a unit test...
        GroovyShell.main(new String[] { testSuite.getCanonicalPath() });
    } finally {
        System.setOut(out);
    }
}
Also used : PrintStream(java.io.PrintStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) File(java.io.File)

Aggregations

PrintStream (java.io.PrintStream)1582 ByteArrayOutputStream (java.io.ByteArrayOutputStream)687 Test (org.junit.Test)481 File (java.io.File)276 IOException (java.io.IOException)257 FileOutputStream (java.io.FileOutputStream)177 ArrayList (java.util.ArrayList)78 FileNotFoundException (java.io.FileNotFoundException)75 OutputStream (java.io.OutputStream)72 Before (org.junit.Before)57 BufferedReader (java.io.BufferedReader)50 Date (java.util.Date)44 Map (java.util.Map)44 BufferedOutputStream (java.io.BufferedOutputStream)41 Path (org.apache.hadoop.fs.Path)41 UnsupportedEncodingException (java.io.UnsupportedEncodingException)40 Matchers.anyString (org.mockito.Matchers.anyString)37 InputStreamReader (java.io.InputStreamReader)35 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)35 HashMap (java.util.HashMap)32