use of org.apache.flink.tests.util.AutoClosableProcess in project flink by apache.
the class PrometheusReporterEndToEndITCase method testReporter.
@Test
public void testReporter() throws Exception {
final Path tmpPrometheusDir = tmp.newFolder().toPath().resolve("prometheus");
final Path prometheusBinDir = tmpPrometheusDir.resolve(PROMETHEUS_FILE_NAME);
final Path prometheusConfig = prometheusBinDir.resolve("prometheus.yml");
final Path prometheusBinary = prometheusBinDir.resolve("prometheus");
Files.createDirectory(tmpPrometheusDir);
final Path prometheusArchive = downloadCache.getOrDownload("https://github.com/prometheus/prometheus/releases/download/v" + PROMETHEUS_VERSION + '/' + PROMETHEUS_FILE_NAME + ".tar.gz", tmpPrometheusDir);
LOG.info("Unpacking Prometheus.");
runBlocking(CommandLineWrapper.tar(prometheusArchive).extract().zipped().targetDir(tmpPrometheusDir).build());
LOG.info("Setting Prometheus scrape interval.");
runBlocking(CommandLineWrapper.sed("s/\\(scrape_interval:\\).*/\\1 1s/", prometheusConfig).inPlace().build());
try (ClusterController ignored = dist.startCluster(1)) {
final List<Integer> ports = dist.searchAllLogs(LOG_REPORTER_PORT_PATTERN, matcher -> matcher.group(1)).map(Integer::valueOf).collect(Collectors.toList());
final String scrapeTargets = ports.stream().map(port -> "'localhost:" + port + "'").collect(Collectors.joining(", "));
LOG.info("Setting Prometheus scrape targets to {}.", scrapeTargets);
runBlocking(CommandLineWrapper.sed("s/\\(targets:\\).*/\\1 [" + scrapeTargets + "]/", prometheusConfig).inPlace().build());
LOG.info("Starting Prometheus server.");
try (AutoClosableProcess prometheus = runNonBlocking(prometheusBinary.toAbsolutePath().toString(), "--config.file=" + prometheusConfig.toAbsolutePath(), "--storage.tsdb.path=" + prometheusBinDir.resolve("data").toAbsolutePath())) {
final OkHttpClient client = new OkHttpClient();
checkMetricAvailability(client, "flink_jobmanager_numRegisteredTaskManagers");
checkMetricAvailability(client, "flink_taskmanager_Status_Network_TotalMemorySegments");
}
}
}
use of org.apache.flink.tests.util.AutoClosableProcess in project flink by apache.
the class LocalStandaloneKafkaResource method readMessage.
@Override
public List<String> readMessage(int expectedNumMessages, String groupId, String topic) throws IOException {
final List<String> messages = Collections.synchronizedList(new ArrayList<>(expectedNumMessages));
try (final AutoClosableProcess kafka = AutoClosableProcess.create(kafkaDir.resolve(Paths.get("bin", "kafka-console-consumer.sh")).toString(), "--bootstrap-server", KAFKA_ADDRESS, "--from-beginning", "--max-messages", String.valueOf(expectedNumMessages), "--topic", topic, "--consumer-property", "group.id=" + groupId).setStdoutProcessor(messages::add).runNonBlocking()) {
final Deadline deadline = Deadline.fromNow(Duration.ofSeconds(120));
while (deadline.hasTimeLeft() && messages.size() < expectedNumMessages) {
try {
LOG.info("Waiting for messages. Received {}/{}.", messages.size(), expectedNumMessages);
Thread.sleep(500);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
}
if (messages.size() != expectedNumMessages) {
throw new IOException("Could not read expected number of messages.");
}
return messages;
}
}
Aggregations