Search in sources :

Example 16 with TimeValue

use of co.cask.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.

the class PreviewDataPipelineTest method getTotalMetric.

private long getTotalMetric(Map<String, String> tags, String metricName, PreviewRunner runner) {
    MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, tags, new ArrayList<String>());
    Collection<MetricTimeSeries> result = runner.getMetricsQueryHelper().getMetricStore().query(query);
    if (result.isEmpty()) {
        return 0;
    }
    List<TimeValue> timeValues = result.iterator().next().getTimeValues();
    if (timeValues.isEmpty()) {
        return 0;
    }
    return timeValues.get(0).getValue();
}
Also used : MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue)

Example 17 with TimeValue

use of co.cask.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.

the class TestAppWithCube method testApp.

@Category(SlowTests.class)
@Test
public void testApp() throws Exception {
    // Deploy the application
    ApplicationManager appManager = deployApplication(AppWithCube.class);
    ServiceManager serviceManager = appManager.getServiceManager(AppWithCube.SERVICE_NAME).start();
    try {
        serviceManager.waitForStatus(true);
        URL url = serviceManager.getServiceURL();
        long tsInSec = System.currentTimeMillis() / 1000;
        // round to a minute for testing minute resolution
        tsInSec = (tsInSec / 60) * 60;
        // add couple facts
        add(url, ImmutableList.of(new CubeFact(tsInSec).addDimensionValue("user", "alex").addDimensionValue("action", "click").addMeasurement("count", MeasureType.COUNTER, 1)));
        add(url, ImmutableList.of(new CubeFact(tsInSec).addDimensionValue("user", "alex").addDimensionValue("action", "click").addMeasurement("count", MeasureType.COUNTER, 1), new CubeFact(tsInSec + 1).addDimensionValue("user", "alex").addDimensionValue("action", "back").addMeasurement("count", MeasureType.COUNTER, 1), new CubeFact(tsInSec + 2).addDimensionValue("user", "alex").addDimensionValue("action", "click").addMeasurement("count", MeasureType.COUNTER, 1)));
        // search for tags
        Collection<DimensionValue> tags = searchDimensionValue(url, new CubeExploreQuery(tsInSec - 60, tsInSec + 60, 1, 100, new ArrayList<DimensionValue>()));
        Assert.assertEquals(1, tags.size());
        DimensionValue tv = tags.iterator().next();
        Assert.assertEquals("user", tv.getName());
        Assert.assertEquals("alex", tv.getValue());
        tags = searchDimensionValue(url, CubeExploreQuery.builder().from().resolution(1, TimeUnit.SECONDS).where().dimension("user", "alex").timeRange(tsInSec - 60, tsInSec + 60).limit(100).build());
        Assert.assertEquals(2, tags.size());
        Iterator<DimensionValue> iterator = tags.iterator();
        tv = iterator.next();
        Assert.assertEquals("action", tv.getName());
        Assert.assertEquals("back", tv.getValue());
        tv = iterator.next();
        Assert.assertEquals("action", tv.getName());
        Assert.assertEquals("click", tv.getValue());
        // search for measures
        Collection<String> measures = searchMeasure(url, new CubeExploreQuery(tsInSec - 60, tsInSec + 60, 1, 100, ImmutableList.of(new DimensionValue("user", "alex"))));
        Assert.assertEquals(1, measures.size());
        String measure = measures.iterator().next();
        Assert.assertEquals("count", measure);
        // query for data
        // 1-sec resolution
        Collection<TimeSeries> data = query(url, CubeQuery.builder().select().measurement("count", AggregationFunction.SUM).from(null).resolution(1, TimeUnit.SECONDS).where().dimension("action", "click").timeRange(tsInSec - 60, tsInSec + 60).limit(100).build());
        Assert.assertEquals(1, data.size());
        TimeSeries series = data.iterator().next();
        List<TimeValue> timeValues = series.getTimeValues();
        Assert.assertEquals(2, timeValues.size());
        TimeValue timeValue = timeValues.get(0);
        Assert.assertEquals(tsInSec, timeValue.getTimestamp());
        Assert.assertEquals(2, timeValue.getValue());
        timeValue = timeValues.get(1);
        Assert.assertEquals(tsInSec + 2, timeValue.getTimestamp());
        Assert.assertEquals(1, timeValue.getValue());
        // 60-sec resolution
        data = query(url, new CubeQuery(null, tsInSec - 60, tsInSec + 60, 60, 100, ImmutableMap.of("count", AggregationFunction.SUM), ImmutableMap.of("action", "click"), new ArrayList<String>(), null));
        Assert.assertEquals(1, data.size());
        series = data.iterator().next();
        timeValues = series.getTimeValues();
        Assert.assertEquals(1, timeValues.size());
        timeValue = timeValues.get(0);
        Assert.assertEquals(tsInSec, timeValue.getTimestamp());
        Assert.assertEquals(3, timeValue.getValue());
    } finally {
        serviceManager.stop();
        serviceManager.waitForStatus(false);
    }
}
Also used : ApplicationManager(co.cask.cdap.test.ApplicationManager) TimeSeries(co.cask.cdap.api.dataset.lib.cube.TimeSeries) ArrayList(java.util.ArrayList) CubeQuery(co.cask.cdap.api.dataset.lib.cube.CubeQuery) URL(java.net.URL) CubeFact(co.cask.cdap.api.dataset.lib.cube.CubeFact) DimensionValue(co.cask.cdap.api.dataset.lib.cube.DimensionValue) ServiceManager(co.cask.cdap.test.ServiceManager) CubeExploreQuery(co.cask.cdap.api.dataset.lib.cube.CubeExploreQuery) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue) Category(org.junit.experimental.categories.Category) Test(org.junit.Test)

Example 18 with TimeValue

use of co.cask.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.

the class MetricStoreRequestExecutor method queryTimeSeries.

private Iterator<TimeValue> queryTimeSeries(MetricDataQuery query) throws Exception {
    Collection<MetricTimeSeries> result = metricStore.query(query);
    if (result.size() == 0) {
        return new ArrayList<TimeValue>().iterator();
    }
    // since there's no group by condition, it'll return single time series always
    MetricTimeSeries timeSeries = result.iterator().next();
    return Iterables.transform(timeSeries.getTimeValues(), new Function<TimeValue, TimeValue>() {

        @Override
        public TimeValue apply(TimeValue input) {
            return new TimeValue(input.getTimestamp(), input.getValue());
        }
    }).iterator();
}
Also used : Function(com.google.common.base.Function) AggregationFunction(co.cask.cdap.api.dataset.lib.cube.AggregationFunction) MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue)

Example 19 with TimeValue

use of co.cask.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.

the class MetricsProcessorServiceTest method testMetricsProcessor.

@Test
public void testMetricsProcessor() throws Exception {
    injector.getInstance(TransactionManager.class).startAndWait();
    injector.getInstance(DatasetOpExecutor.class).startAndWait();
    injector.getInstance(DatasetService.class).startAndWait();
    zkServer = InMemoryZKServer.builder().build();
    zkServer.startAndWait();
    Properties kafkaConfig = generateKafkaConfig(tmpFolder1);
    EmbeddedKafkaServer kafkaServer = new EmbeddedKafkaServer(kafkaConfig);
    kafkaServer.startAndWait();
    ZKClientService zkClient = ZKClientService.Builder.of(zkServer.getConnectionStr()).build();
    zkClient.startAndWait();
    KafkaClientService kafkaClient = new ZKKafkaClientService(zkClient);
    kafkaClient.startAndWait();
    final MetricStore metricStore = injector.getInstance(MetricStore.class);
    Set<Integer> partitions = new HashSet<>();
    for (int i = 0; i < PARTITION_SIZE; i++) {
        partitions.add(i);
    }
    KafkaPublisher publisher = kafkaClient.getPublisher(KafkaPublisher.Ack.FIRE_AND_FORGET, Compression.SNAPPY);
    final KafkaPublisher.Preparer preparer = publisher.prepare(TOPIC_PREFIX);
    // Wait for metrics to be successfully published to Kafka. Retry if publishing fails.
    Tasks.waitFor(true, new Callable<Boolean>() {

        @Override
        public Boolean call() throws Exception {
            return publishKafkaMetrics(METRICS_CONTEXT, expected, preparer);
        }
    }, 15, TimeUnit.SECONDS, "Failed to publish correct number of metrics to Kafka");
    // Start KafkaMetricsProcessorService after metrics are published to Kafka
    KafkaMetricsProcessorService kafkaMetricsProcessorService = new KafkaMetricsProcessorService(kafkaClient, injector.getInstance(MetricDatasetFactory.class), new MetricsMessageCallbackFactory(injector.getInstance(SchemaGenerator.class), injector.getInstance(DatumReaderFactory.class), metricStore, 4), TOPIC_PREFIX, partitions, new NoopMetricsContext());
    kafkaMetricsProcessorService.startAndWait();
    // Intentionally set queue size to a small value, so that MessagingMetricsProcessorService
    // internally can persist metrics when more messages are to be fetched
    MessagingMetricsProcessorService messagingMetricsProcessorService = new MessagingMetricsProcessorService(injector.getInstance(MetricDatasetFactory.class), TOPIC_PREFIX, messagingService, injector.getInstance(SchemaGenerator.class), injector.getInstance(DatumReaderFactory.class), metricStore, 1000L, 5, partitions, new NoopMetricsContext(), 50, 0);
    messagingMetricsProcessorService.startAndWait();
    long startTime = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());
    // Publish metrics with messaging service and record expected metrics
    for (int i = 10; i < 20; i++) {
        publishMessagingMetrics(i, startTime, METRICS_CONTEXT, expected, SYSTEM_METRIC_PREFIX, MetricType.COUNTER);
    }
    Thread.sleep(500);
    // Stop and restart messagingMetricsProcessorService
    messagingMetricsProcessorService.stopAndWait();
    // Intentionally set queue size to a large value, so that MessagingMetricsProcessorService
    // internally only persists metrics during terminating.
    messagingMetricsProcessorService = new MessagingMetricsProcessorService(injector.getInstance(MetricDatasetFactory.class), TOPIC_PREFIX, messagingService, injector.getInstance(SchemaGenerator.class), injector.getInstance(DatumReaderFactory.class), metricStore, 500L, 100, partitions, new NoopMetricsContext(), 50, 0);
    messagingMetricsProcessorService.startAndWait();
    // Publish metrics after MessagingMetricsProcessorService restarts and record expected metrics
    for (int i = 20; i < 30; i++) {
        publishMessagingMetrics(i, startTime, METRICS_CONTEXT, expected, SYSTEM_METRIC_PREFIX, MetricType.GAUGE);
    }
    final List<String> missingMetricNames = new ArrayList<>();
    // are retrieved when timeout occurs, print out the missing metrics
    try {
        Tasks.waitFor(true, new Callable<Boolean>() {

            @Override
            public Boolean call() throws Exception {
                return canQueryAllMetrics(metricStore, METRICS_CONTEXT, expected, missingMetricNames);
            }
        }, 10000, TimeUnit.MILLISECONDS, "Failed to get all metrics");
    } catch (TimeoutException e) {
        Assert.fail(String.format("Metrics: [%s] cannot be found in the metrics store.", Joiner.on(", ").join(missingMetricNames)));
    }
    // Query metrics from the metricStore and compare them with the expected ones
    assertMetricsResult(metricStore, METRICS_CONTEXT, expected);
    // Query for the 5 counter metrics published with messaging between time 5 - 14
    Collection<MetricTimeSeries> queryResult = metricStore.query(new MetricDataQuery(5, 14, 1, Integer.MAX_VALUE, ImmutableMap.of(SYSTEM_METRIC_PREFIX + COUNTER_METRIC_NAME, AggregationFunction.SUM), METRICS_CONTEXT, ImmutableList.<String>of(), null));
    MetricTimeSeries timeSeries = Iterables.getOnlyElement(queryResult);
    Assert.assertEquals(5, timeSeries.getTimeValues().size());
    for (TimeValue timeValue : timeSeries.getTimeValues()) {
        Assert.assertEquals(1L, timeValue.getValue());
    }
    // Stop services and servers
    kafkaMetricsProcessorService.stopAndWait();
    messagingMetricsProcessorService.stopAndWait();
    kafkaServer.stopAndWait();
    zkServer.stopAndWait();
    // Delete all metrics
    metricStore.deleteAll();
}
Also used : MetricStore(co.cask.cdap.api.metrics.MetricStore) ZKKafkaClientService(org.apache.twill.internal.kafka.client.ZKKafkaClientService) DatumReaderFactory(co.cask.cdap.internal.io.DatumReaderFactory) ArrayList(java.util.ArrayList) MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) DatasetService(co.cask.cdap.data2.datafabric.dataset.service.DatasetService) Properties(java.util.Properties) NoopMetricsContext(co.cask.cdap.api.metrics.NoopMetricsContext) MetricDatasetFactory(co.cask.cdap.metrics.store.MetricDatasetFactory) EmbeddedKafkaServer(org.apache.twill.internal.kafka.EmbeddedKafkaServer) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue) HashSet(java.util.HashSet) KafkaPublisher(org.apache.twill.kafka.client.KafkaPublisher) TimeoutException(java.util.concurrent.TimeoutException) ZKKafkaClientService(org.apache.twill.internal.kafka.client.ZKKafkaClientService) KafkaClientService(org.apache.twill.kafka.client.KafkaClientService) SchemaGenerator(co.cask.cdap.internal.io.SchemaGenerator) DatasetOpExecutor(co.cask.cdap.data2.datafabric.dataset.service.executor.DatasetOpExecutor) TopicNotFoundException(co.cask.cdap.api.messaging.TopicNotFoundException) TimeoutException(java.util.concurrent.TimeoutException) IOException(java.io.IOException) ZKClientService(org.apache.twill.zookeeper.ZKClientService) TransactionManager(org.apache.tephra.TransactionManager) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) Test(org.junit.Test)

Example 20 with TimeValue

use of co.cask.cdap.api.dataset.lib.cube.TimeValue in project cdap by caskdata.

the class MetricsQueryHelper method decorate.

private MetricQueryResult.TimeValue[] decorate(List<TimeValue> points) {
    MetricQueryResult.TimeValue[] timeValues = new MetricQueryResult.TimeValue[points.size()];
    int k = 0;
    for (TimeValue timeValue : points) {
        timeValues[k++] = new MetricQueryResult.TimeValue(timeValue.getTimestamp(), timeValue.getValue());
    }
    return timeValues;
}
Also used : MetricQueryResult(co.cask.cdap.proto.MetricQueryResult) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue)

Aggregations

TimeValue (co.cask.cdap.api.dataset.lib.cube.TimeValue)20 MetricTimeSeries (co.cask.cdap.api.metrics.MetricTimeSeries)12 MetricDataQuery (co.cask.cdap.api.metrics.MetricDataQuery)11 Test (org.junit.Test)5 AggregationFunction (co.cask.cdap.api.dataset.lib.cube.AggregationFunction)4 DimensionValue (co.cask.cdap.api.dataset.lib.cube.DimensionValue)4 Map (java.util.Map)4 TimeSeries (co.cask.cdap.api.dataset.lib.cube.TimeSeries)3 InMemoryMetricsTable (co.cask.cdap.data2.dataset2.lib.table.inmemory.InMemoryMetricsTable)2 ImmutableList (com.google.common.collect.ImmutableList)2 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 LinkedHashMap (java.util.LinkedHashMap)2 List (java.util.List)2 Cube (co.cask.cdap.api.dataset.lib.cube.Cube)1 CubeDeleteQuery (co.cask.cdap.api.dataset.lib.cube.CubeDeleteQuery)1 CubeExploreQuery (co.cask.cdap.api.dataset.lib.cube.CubeExploreQuery)1 CubeFact (co.cask.cdap.api.dataset.lib.cube.CubeFact)1 CubeQuery (co.cask.cdap.api.dataset.lib.cube.CubeQuery)1 Interpolators (co.cask.cdap.api.dataset.lib.cube.Interpolators)1