use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by cdapio.
the class KafkaLogProcessorPipelineTest method testMetricsAppender.
@Test
public void testMetricsAppender() throws Exception {
Injector injector = KAFKA_TESTER.getInjector();
MetricsCollectionService collectionService = injector.getInstance(MetricsCollectionService.class);
collectionService.startAndWait();
LoggerContext loggerContext = new LocalAppenderContext(injector.getInstance(TransactionRunner.class), injector.getInstance(LocationFactory.class), injector.getInstance(MetricsCollectionService.class));
final File logDir = TEMP_FOLDER.newFolder();
loggerContext.putProperty("logDirectory", logDir.getAbsolutePath());
LogPipelineConfigurator configurator = new LogPipelineConfigurator(CConfiguration.create());
configurator.setContext(loggerContext);
URL configURL = getClass().getClassLoader().getResource("pipeline-metric-appender.xml");
Assert.assertNotNull(configURL);
configurator.doConfigure(configURL);
String topic = "metricsPipeline";
TestCheckpointManager checkpointManager = new TestCheckpointManager();
KafkaPipelineConfig config = new KafkaPipelineConfig(topic, Collections.singleton(0), 1024L, 100L, 1048576, 200L);
KAFKA_TESTER.createTopic(topic, 1);
loggerContext.start();
KafkaLogProcessorPipeline pipeline = new KafkaLogProcessorPipeline(new LogProcessorPipelineContext(CConfiguration.create(), "testMetricAppender", loggerContext, NO_OP_METRICS_CONTEXT, 0), checkpointManager, KAFKA_TESTER.getBrokerService(), config);
pipeline.startAndWait();
// Publish some log messages to Kafka
long now = System.currentTimeMillis();
WorkerLoggingContext loggingContext = new WorkerLoggingContext("default", "app1", "worker1", "run1", "instance1");
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.DEBUG, "hidden", now - 600), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "4", now - 100)), loggingContext);
WorkflowProgramLoggingContext workflowProgramLoggingContext = new WorkflowProgramLoggingContext("default", "app1", "wflow1", "run1", ProgramType.MAPREDUCE, "mr1", "mrun1");
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.WARN, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.WARN, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.TRACE, "3", now - 500)), workflowProgramLoggingContext);
ServiceLoggingContext serviceLoggingContext = new ServiceLoggingContext(NamespaceId.SYSTEM.getNamespace(), Constants.Logging.COMPONENT_NAME, Constants.Service.TRANSACTION);
publishLog(topic, ImmutableList.of(LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "0", now - 1000), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "2", now - 700), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.ERROR, "3", now - 500), LogPipelineTestUtil.createLoggingEvent("test.logger", Level.INFO, "1", now - 900)), serviceLoggingContext);
final MetricStore metricStore = injector.getInstance(MetricStore.class);
try {
verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.info", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(loggingContext), new ArrayList<>()), 5L);
verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.debug", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(loggingContext), new ArrayList<>()), 1L);
verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.warn", AggregationFunction.SUM, // mapreduce metrics context
ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.MAPREDUCE, "mr1", Constants.Metrics.Tag.RUN_ID, "mrun1"), new ArrayList<>()), 2L);
verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.app.log.trace", AggregationFunction.SUM, // workflow metrics context
ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, "default", Constants.Metrics.Tag.APP, "app1", Constants.Metrics.Tag.WORKFLOW, "wflow1", Constants.Metrics.Tag.RUN_ID, "run1"), new ArrayList<>()), 1L);
verifyMetricsWithRetry(metricStore, new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, "system.services.log.error", AggregationFunction.SUM, LoggingContextHelper.getMetricsTags(serviceLoggingContext), new ArrayList<>()), 3L);
} finally {
pipeline.stopAndWait();
loggerContext.stop();
collectionService.stopAndWait();
}
}
use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by cdapio.
the class MetricsProcessorServiceTest method testMetricsProcessor.
@Test
public void testMetricsProcessor() throws Exception {
injector.getInstance(TransactionManager.class).startAndWait();
StoreDefinition.createAllTables(injector.getInstance(StructuredTableAdmin.class));
injector.getInstance(DatasetOpExecutorService.class).startAndWait();
injector.getInstance(DatasetService.class).startAndWait();
final MetricStore metricStore = injector.getInstance(MetricStore.class);
Set<Integer> partitions = new HashSet<>();
for (int i = 0; i < cConf.getInt(Constants.Metrics.MESSAGING_TOPIC_NUM); i++) {
partitions.add(i);
}
// Start KafkaMetricsProcessorService after metrics are published to Kafka
// Intentionally set queue size to a small value, so that MessagingMetricsProcessorManagerService
// internally can persist metrics when more messages are to be fetched
MessagingMetricsProcessorManagerService messagingMetricsProcessorManagerService = new MessagingMetricsProcessorManagerService(cConf, injector.getInstance(MetricDatasetFactory.class), messagingService, injector.getInstance(SchemaGenerator.class), injector.getInstance(DatumReaderFactory.class), metricStore, injector.getInstance(MetricsWriterProvider.class), partitions, new NoopMetricsContext(), 50, 0);
messagingMetricsProcessorManagerService.startAndWait();
long startTime = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());
// Publish metrics with messaging service and record expected metrics
for (int i = 10; i < 20; i++) {
publishMessagingMetrics(i, startTime, METRICS_CONTEXT, expected, SYSTEM_METRIC_PREFIX, MetricType.COUNTER);
}
Thread.sleep(500);
// Stop and restart messagingMetricsProcessorManagerService
messagingMetricsProcessorManagerService.stopAndWait();
// Intentionally set queue size to a large value, so that MessagingMetricsProcessorManagerService
// internally only persists metrics during terminating.
messagingMetricsProcessorManagerService = new MessagingMetricsProcessorManagerService(cConf, injector.getInstance(MetricDatasetFactory.class), messagingService, injector.getInstance(SchemaGenerator.class), injector.getInstance(DatumReaderFactory.class), metricStore, injector.getInstance(MetricsWriterProvider.class), partitions, new NoopMetricsContext(), 50, 0);
messagingMetricsProcessorManagerService.startAndWait();
// Publish metrics after MessagingMetricsProcessorManagerService restarts and record expected metrics
for (int i = 20; i < 30; i++) {
publishMessagingMetrics(i, startTime, METRICS_CONTEXT, expected, SYSTEM_METRIC_PREFIX, MetricType.GAUGE);
}
final List<String> missingMetricNames = new ArrayList<>();
// are retrieved when timeout occurs, print out the missing metrics
try {
Tasks.waitFor(true, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return canQueryAllMetrics(metricStore, METRICS_CONTEXT, expected, missingMetricNames);
}
}, 10000, TimeUnit.MILLISECONDS, "Failed to get all metrics");
} catch (TimeoutException e) {
Assert.fail(String.format("Metrics: [%s] cannot be found in the metrics store.", Joiner.on(", ").join(missingMetricNames)));
}
// Query metrics from the metricStore and compare them with the expected ones
assertMetricsResult(metricStore, METRICS_CONTEXT, expected);
// Query for the 5 counter metrics published with messaging between time 5 - 14
Collection<MetricTimeSeries> queryResult = metricStore.query(new MetricDataQuery(5, 14, 1, Integer.MAX_VALUE, ImmutableMap.of(SYSTEM_METRIC_PREFIX + COUNTER_METRIC_NAME, AggregationFunction.SUM), METRICS_CONTEXT, ImmutableList.<String>of(), null));
MetricTimeSeries timeSeries = Iterables.getOnlyElement(queryResult);
Assert.assertEquals(5, timeSeries.getTimeValues().size());
for (TimeValue timeValue : timeSeries.getTimeValues()) {
Assert.assertEquals(1L, timeValue.getValue());
}
// Stop services and servers
messagingMetricsProcessorManagerService.stopAndWait();
// Delete all metrics
metricStore.deleteAll();
}
use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by cdapio.
the class Spark2Test method testSparkWithObjectStore.
@Test
public void testSparkWithObjectStore() throws Exception {
ApplicationManager applicationManager = deploy(NamespaceId.DEFAULT, SparkAppUsingObjectStore.class);
DataSetManager<ObjectStore<String>> keysManager = getDataset("keys");
prepareInputData(keysManager);
SparkManager sparkManager = applicationManager.getSparkManager(CharCountProgram.class.getSimpleName()).start();
sparkManager.waitForRun(ProgramRunStatus.RUNNING, 10, TimeUnit.SECONDS);
sparkManager.waitForStopped(60, TimeUnit.SECONDS);
DataSetManager<KeyValueTable> countManager = getDataset("count");
checkOutputData(countManager);
// validate that the table emitted metrics
// one read + one write in beforeSubmit(), increment (= read + write) in main -> 4
Tasks.waitFor(4L, new Callable<Long>() {
@Override
public Long call() throws Exception {
Collection<MetricTimeSeries> metrics = getMetricsManager().query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getNamespace(), Constants.Metrics.Tag.APP, SparkAppUsingObjectStore.class.getSimpleName(), Constants.Metrics.Tag.SPARK, CharCountProgram.class.getSimpleName(), Constants.Metrics.Tag.DATASET, "totals"), Collections.<String>emptyList()));
if (metrics.isEmpty()) {
return 0L;
}
Assert.assertEquals(1, metrics.size());
MetricTimeSeries ts = metrics.iterator().next();
Assert.assertEquals(1, ts.getTimeValues().size());
return ts.getTimeValues().get(0).getValue();
}
}, 10L, TimeUnit.SECONDS, 50L, TimeUnit.MILLISECONDS);
}
use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by cdapio.
the class MapReduceProgramRunnerTest method testMapreduceWithDynamicDatasets.
@Test
public void testMapreduceWithDynamicDatasets() throws Exception {
DatasetId rtInput1 = DefaultId.NAMESPACE.dataset("rtInput1");
DatasetId rtInput2 = DefaultId.NAMESPACE.dataset("rtInput2");
DatasetId rtOutput1 = DefaultId.NAMESPACE.dataset("rtOutput1");
// create the datasets here because they are not created by the app
dsFramework.addInstance("fileSet", rtInput1, FileSetProperties.builder().setBasePath("rtInput1").setInputFormat(TextInputFormat.class).setOutputFormat(TextOutputFormat.class).setOutputProperty(TextOutputFormat.SEPERATOR, ":").build());
dsFramework.addInstance("fileSet", rtOutput1, FileSetProperties.builder().setBasePath("rtOutput1").setInputFormat(TextInputFormat.class).setOutputFormat(TextOutputFormat.class).setOutputProperty(TextOutputFormat.SEPERATOR, ":").build());
// build runtime args for app
Map<String, String> runtimeArguments = Maps.newHashMap();
// Make sure there is only one mapper running at a time since this test has the Mapper writing
// to a dataset using increment and the in-memory table doesn't really support concurrent increment
runtimeArguments.put("mr.job.conf.mapreduce.local.map.tasks.maximum", "1");
runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_NAME, "rtInput1");
runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_PATHS, "abc, xyz");
runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_NAME, "rtOutput1");
runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_PATH, "a001");
// test reading and writing distinct datasets, reading more than one path
testMapreduceWithFile("rtInput1", "abc, xyz", "rtOutput1", "a001", AppWithMapReduceUsingRuntimeDatasets.class, AppWithMapReduceUsingRuntimeDatasets.ComputeSum.class, runtimeArguments, AppWithMapReduceUsingRuntimeDatasets.COUNTERS, null);
// validate that the table emitted metrics
Collection<MetricTimeSeries> metrics = metricStore.query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getNamespace(), Constants.Metrics.Tag.APP, AppWithMapReduceUsingRuntimeDatasets.APP_NAME, Constants.Metrics.Tag.MAPREDUCE, AppWithMapReduceUsingRuntimeDatasets.MR_NAME, Constants.Metrics.Tag.DATASET, "rtt"), Collections.<String>emptyList()));
Assert.assertEquals(1, metrics.size());
MetricTimeSeries ts = metrics.iterator().next();
Assert.assertEquals(1, ts.getTimeValues().size());
Assert.assertEquals(1, ts.getTimeValues().get(0).getValue());
// test reading and writing same dataset
dsFramework.addInstance("fileSet", rtInput2, FileSetProperties.builder().setBasePath("rtInput2").setInputFormat(TextInputFormat.class).setOutputFormat(TextOutputFormat.class).setOutputProperty(TextOutputFormat.SEPERATOR, ":").build());
runtimeArguments = Maps.newHashMap();
runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_NAME, "rtInput2");
runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.INPUT_PATHS, "zzz");
runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_NAME, "rtInput2");
runtimeArguments.put(AppWithMapReduceUsingRuntimeDatasets.OUTPUT_PATH, "f123");
testMapreduceWithFile("rtInput2", "zzz", "rtInput2", "f123", AppWithMapReduceUsingRuntimeDatasets.class, AppWithMapReduceUsingRuntimeDatasets.ComputeSum.class, runtimeArguments, AppWithMapReduceUsingRuntimeDatasets.COUNTERS, null);
}
use of io.cdap.cdap.api.metrics.MetricDataQuery in project cdap by cdapio.
the class PreviewDataPipelineTest method getTotalMetric.
private long getTotalMetric(Map<String, String> tags, String metricName, PreviewManager previewManager) {
MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, tags, new ArrayList<>());
Collection<MetricTimeSeries> result = previewManager.getMetricsQueryHelper().getMetricStore().query(query);
if (result.isEmpty()) {
return 0;
}
List<TimeValue> timeValues = result.iterator().next().getTimeValues();
if (timeValues.isEmpty()) {
return 0;
}
return timeValues.get(0).getValue();
}
Aggregations