Search in sources :

Example 1 with ClusterController

use of org.apache.flink.tests.util.flink.ClusterController in project flink by apache.

the class MetricsAvailabilityITCase method testReporter.

@Test
public void testReporter() throws Exception {
    try (ClusterController ignored = dist.startCluster(1)) {
        final RestClient restClient = new RestClient(new Configuration(), scheduledExecutorService);
        checkJobManagerMetricAvailability(restClient);
        final Collection<ResourceID> taskManagerIds = getTaskManagerIds(restClient);
        for (final ResourceID taskManagerId : taskManagerIds) {
            checkTaskManagerMetricAvailability(restClient, taskManagerId);
        }
    }
}
Also used : ClusterController(org.apache.flink.tests.util.flink.ClusterController) Configuration(org.apache.flink.configuration.Configuration) ResourceID(org.apache.flink.runtime.clusterframework.types.ResourceID) RestClient(org.apache.flink.runtime.rest.RestClient) Test(org.junit.Test)

Example 2 with ClusterController

use of org.apache.flink.tests.util.flink.ClusterController in project flink by apache.

the class SQLClientHBaseITCase method testHBase.

@Test
public void testHBase() throws Exception {
    try (ClusterController clusterController = flink.startCluster(2)) {
        // Create table and put data
        hbase.createTable("source", "family1", "family2");
        hbase.createTable("sink", "family1", "family2");
        hbase.putData("source", "row1", "family1", "f1c1", "v1");
        hbase.putData("source", "row1", "family2", "f2c1", "v2");
        hbase.putData("source", "row1", "family2", "f2c2", "v3");
        hbase.putData("source", "row2", "family1", "f1c1", "v4");
        hbase.putData("source", "row2", "family2", "f2c1", "v5");
        hbase.putData("source", "row2", "family2", "f2c2", "v6");
        // Initialize the SQL statements from "hbase_e2e.sql" file
        Map<String, String> varsMap = new HashMap<>();
        varsMap.put("$HBASE_CONNECTOR", hbaseConnector);
        List<String> sqlLines = initializeSqlLines(varsMap);
        // Execute SQL statements in "hbase_e2e.sql" file
        executeSqlStatements(clusterController, sqlLines);
        LOG.info("Verify the sink table result.");
        // Wait until all the results flushed to the HBase sink table.
        checkHBaseSinkResult();
        LOG.info("The HBase SQL client test run successfully.");
    }
}
Also used : ClusterController(org.apache.flink.tests.util.flink.ClusterController) HashMap(java.util.HashMap) Matchers.containsString(org.hamcrest.Matchers.containsString) Test(org.junit.Test)

Example 3 with ClusterController

use of org.apache.flink.tests.util.flink.ClusterController in project flink by apache.

the class PrometheusReporterEndToEndITCase method testReporter.

@Test
public void testReporter() throws Exception {
    final Path tmpPrometheusDir = tmp.newFolder().toPath().resolve("prometheus");
    final Path prometheusBinDir = tmpPrometheusDir.resolve(PROMETHEUS_FILE_NAME);
    final Path prometheusConfig = prometheusBinDir.resolve("prometheus.yml");
    final Path prometheusBinary = prometheusBinDir.resolve("prometheus");
    Files.createDirectory(tmpPrometheusDir);
    final Path prometheusArchive = downloadCache.getOrDownload("https://github.com/prometheus/prometheus/releases/download/v" + PROMETHEUS_VERSION + '/' + PROMETHEUS_FILE_NAME + ".tar.gz", tmpPrometheusDir);
    LOG.info("Unpacking Prometheus.");
    runBlocking(CommandLineWrapper.tar(prometheusArchive).extract().zipped().targetDir(tmpPrometheusDir).build());
    LOG.info("Setting Prometheus scrape interval.");
    runBlocking(CommandLineWrapper.sed("s/\\(scrape_interval:\\).*/\\1 1s/", prometheusConfig).inPlace().build());
    try (ClusterController ignored = dist.startCluster(1)) {
        final List<Integer> ports = dist.searchAllLogs(LOG_REPORTER_PORT_PATTERN, matcher -> matcher.group(1)).map(Integer::valueOf).collect(Collectors.toList());
        final String scrapeTargets = ports.stream().map(port -> "'localhost:" + port + "'").collect(Collectors.joining(", "));
        LOG.info("Setting Prometheus scrape targets to {}.", scrapeTargets);
        runBlocking(CommandLineWrapper.sed("s/\\(targets:\\).*/\\1 [" + scrapeTargets + "]/", prometheusConfig).inPlace().build());
        LOG.info("Starting Prometheus server.");
        try (AutoClosableProcess prometheus = runNonBlocking(prometheusBinary.toAbsolutePath().toString(), "--config.file=" + prometheusConfig.toAbsolutePath(), "--storage.tsdb.path=" + prometheusBinDir.resolve("data").toAbsolutePath())) {
            final OkHttpClient client = new OkHttpClient();
            checkMetricAvailability(client, "flink_jobmanager_numRegisteredTaskManagers");
            checkMetricAvailability(client, "flink_taskmanager_Status_Network_TotalMemorySegments");
        }
    }
}
Also used : Path(java.nio.file.Path) LocalStandaloneFlinkResourceFactory(org.apache.flink.tests.util.flink.LocalStandaloneFlinkResourceFactory) REFLECTION(org.apache.flink.metrics.prometheus.tests.PrometheusReporterEndToEndITCase.TestParams.InstantiationType.REFLECTION) Arrays(java.util.Arrays) BeforeClass(org.junit.BeforeClass) ProcessorArchitecture(org.apache.flink.util.ProcessorArchitecture) RunWith(org.junit.runner.RunWith) LoggerFactory(org.slf4j.LoggerFactory) ExceptionUtils(org.apache.flink.util.ExceptionUtils) FlinkResourceSetup(org.apache.flink.tests.util.flink.FlinkResourceSetup) JarLocation(org.apache.flink.tests.util.flink.JarLocation) AutoClosableProcess.runNonBlocking(org.apache.flink.tests.util.AutoClosableProcess.runNonBlocking) AutoClosableProcess(org.apache.flink.tests.util.AutoClosableProcess) ObjectMapper(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper) ConfigConstants(org.apache.flink.configuration.ConfigConstants) TestLogger(org.apache.flink.util.TestLogger) Response(okhttp3.Response) Assume(org.junit.Assume) Path(java.nio.file.Path) Parameterized(org.junit.runners.Parameterized) Request(okhttp3.Request) Logger(org.slf4j.Logger) Files(java.nio.file.Files) FlinkResource(org.apache.flink.tests.util.flink.FlinkResource) Collection(java.util.Collection) Configuration(org.apache.flink.configuration.Configuration) Test(org.junit.Test) OperatingSystem(org.apache.flink.util.OperatingSystem) Collectors(java.util.stream.Collectors) FACTORY(org.apache.flink.metrics.prometheus.tests.PrometheusReporterEndToEndITCase.TestParams.InstantiationType.FACTORY) Consumer(java.util.function.Consumer) List(java.util.List) OkHttpClient(okhttp3.OkHttpClient) Rule(org.junit.Rule) CommandLineWrapper(org.apache.flink.tests.util.CommandLineWrapper) DownloadCache(org.apache.flink.tests.util.cache.DownloadCache) ClusterController(org.apache.flink.tests.util.flink.ClusterController) PrometheusReporterFactory(org.apache.flink.metrics.prometheus.PrometheusReporterFactory) Pattern(java.util.regex.Pattern) PrometheusReporter(org.apache.flink.metrics.prometheus.PrometheusReporter) TemporaryFolder(org.junit.rules.TemporaryFolder) AutoClosableProcess.runBlocking(org.apache.flink.tests.util.AutoClosableProcess.runBlocking) ClusterController(org.apache.flink.tests.util.flink.ClusterController) OkHttpClient(okhttp3.OkHttpClient) AutoClosableProcess(org.apache.flink.tests.util.AutoClosableProcess) Test(org.junit.Test)

Example 4 with ClusterController

use of org.apache.flink.tests.util.flink.ClusterController in project flink by apache.

the class SQLClientKafkaITCase method testKafka.

@Test
public void testKafka() throws Exception {
    try (ClusterController clusterController = flink.startCluster(2)) {
        // Create topic and send message
        String testJsonTopic = "test-json-" + kafkaVersion + "-" + UUID.randomUUID().toString();
        String testAvroTopic = "test-avro-" + kafkaVersion + "-" + UUID.randomUUID().toString();
        kafka.createTopic(1, 1, testJsonTopic);
        String[] messages = new String[] { "{\"rowtime\": \"2018-03-12 08:00:00\", \"user\": \"Alice\", \"event\": { \"type\": \"WARNING\", \"message\": \"This is a warning.\"}}", "{\"rowtime\": \"2018-03-12 08:10:00\", \"user\": \"Alice\", \"event\": { \"type\": \"WARNING\", \"message\": \"This is a warning.\"}}", "{\"rowtime\": \"2018-03-12 09:00:00\", \"user\": \"Bob\", \"event\": { \"type\": \"WARNING\", \"message\": \"This is another warning.\"}}", "{\"rowtime\": \"2018-03-12 09:10:00\", \"user\": \"Alice\", \"event\": { \"type\": \"INFO\", \"message\": \"This is a info.\"}}", "{\"rowtime\": \"2018-03-12 09:20:00\", \"user\": \"Steve\", \"event\": { \"type\": \"INFO\", \"message\": \"This is another info.\"}}", "{\"rowtime\": \"2018-03-12 09:30:00\", \"user\": \"Steve\", \"event\": { \"type\": \"INFO\", \"message\": \"This is another info.\"}}", "{\"rowtime\": \"2018-03-12 09:30:00\", \"user\": null, \"event\": { \"type\": \"WARNING\", \"message\": \"This is a bad message because the user is missing.\"}}", "{\"rowtime\": \"2018-03-12 10:40:00\", \"user\": \"Bob\", \"event\": { \"type\": \"ERROR\", \"message\": \"This is an error.\"}}" };
        kafka.sendMessages(testJsonTopic, messages);
        // Create topic test-avro
        kafka.createTopic(1, 1, testAvroTopic);
        // Initialize the SQL statements from "kafka_e2e.sql" file
        Map<String, String> varsMap = new HashMap<>();
        varsMap.put("$KAFKA_IDENTIFIER", this.kafkaIdentifier);
        varsMap.put("$TOPIC_JSON_NAME", testJsonTopic);
        varsMap.put("$TOPIC_AVRO_NAME", testAvroTopic);
        varsMap.put("$RESULT", this.result.toAbsolutePath().toString());
        varsMap.put("$KAFKA_BOOTSTRAP_SERVERS", StringUtils.join(kafka.getBootstrapServerAddresses().toArray(), ","));
        List<String> sqlLines = initializeSqlLines(varsMap);
        // Execute SQL statements in "kafka_e2e.sql" file
        executeSqlStatements(clusterController, sqlLines);
        // Wait until all the results flushed to the CSV file.
        LOG.info("Verify the CSV result.");
        checkCsvResultFile();
        LOG.info("The Kafka({}) SQL client test run successfully.", this.kafkaSQLVersion);
    }
}
Also used : ClusterController(org.apache.flink.tests.util.flink.ClusterController) HashMap(java.util.HashMap) Test(org.junit.Test)

Aggregations

ClusterController (org.apache.flink.tests.util.flink.ClusterController)4 Test (org.junit.Test)4 HashMap (java.util.HashMap)2 Configuration (org.apache.flink.configuration.Configuration)2 Files (java.nio.file.Files)1 Path (java.nio.file.Path)1 Arrays (java.util.Arrays)1 Collection (java.util.Collection)1 List (java.util.List)1 Consumer (java.util.function.Consumer)1 Pattern (java.util.regex.Pattern)1 Collectors (java.util.stream.Collectors)1 OkHttpClient (okhttp3.OkHttpClient)1 Request (okhttp3.Request)1 Response (okhttp3.Response)1 ConfigConstants (org.apache.flink.configuration.ConfigConstants)1 PrometheusReporter (org.apache.flink.metrics.prometheus.PrometheusReporter)1 PrometheusReporterFactory (org.apache.flink.metrics.prometheus.PrometheusReporterFactory)1 FACTORY (org.apache.flink.metrics.prometheus.tests.PrometheusReporterEndToEndITCase.TestParams.InstantiationType.FACTORY)1 REFLECTION (org.apache.flink.metrics.prometheus.tests.PrometheusReporterEndToEndITCase.TestParams.InstantiationType.REFLECTION)1