Search in sources :

Example 1 with MetricType

use of com.linkedin.thirdeye.api.MetricType in project pinot by linkedin.

the class testMetricTransfer method transfer.

@Test
public void transfer() {
    // create a mock MetricTimeSeries
    List<String> names = new ArrayList<>(1);
    String mName = "metric0";
    names.add(0, mName);
    List<MetricType> types = Collections.nCopies(names.size(), MetricType.DOUBLE);
    MetricSchema metricSchema = new MetricSchema(names, types);
    MetricTimeSeries metrics = new MetricTimeSeries(metricSchema);
    // the last three values are current values; the rest values are baseline values
    double[] m0 = { 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 };
    for (long i = 0l; i <= 5l; i++) {
        metrics.set(i, mName, 1.0);
    }
    // create a list of mock scaling factors
    ScalingFactor sf0 = new ScalingFactor(2l, 6l, 0.8);
    List<ScalingFactor> sfList0 = new ArrayList<>();
    sfList0.add(sf0);
    Properties properties = new Properties();
    properties.put(MetricTransfer.SEASONAL_SIZE, "3");
    properties.put(MetricTransfer.SEASONAL_UNIT, TimeUnit.MILLISECONDS.toString());
    // mistakenly set 2 on purpose
    properties.put(MetricTransfer.BASELINE_SEASONAL_PERIOD, "2");
    MetricTransfer.rescaleMetric(metrics, 3, sfList0, mName, properties);
    double[] m1_expected = { 0.8, 0.8, 0.0, 1.0, 1.0, 1.0 };
    double[] m_actual = new double[6];
    for (int i = 0; i <= 5; i++) {
        m_actual[i] = metrics.get(i, mName).doubleValue();
    }
    Assert.assertEquals(m_actual, m1_expected);
    //    // revert to the original cases
    //    ScalingFactor _sf0 = new ScalingFactor(2l, 4l, 1.25);
    //    // no points in time range and no change
    //    sfList0.remove(0);
    //    Assert.assertEquals(sfList0.size(), 0);
    //    sfList0.add(_sf0);
    //    MetricTransfer.rescaleMetric(metrics, , sfList0, mName);
    //    for (int i=0; i<=5; i++) {
    //      m_actual[i]= metrics.get(i, mName).doubleValue();
    //    }
    //    Assert.assertEquals(m_actual, m0);
    //should not affect
    sfList0.remove(0);
    ScalingFactor sf1 = new ScalingFactor(12l, 14l, 0.8);
    sfList0.add(sf1);
    MetricTransfer.rescaleMetric(metrics, 3, sfList0, mName, properties);
    for (int i = 0; i <= 5; i++) {
        m_actual[i] = metrics.get(i, mName).doubleValue();
    }
    Assert.assertEquals(m_actual, m1_expected);
}
Also used : MetricSchema(com.linkedin.thirdeye.api.MetricSchema) MetricType(com.linkedin.thirdeye.api.MetricType) ArrayList(java.util.ArrayList) MetricTimeSeries(com.linkedin.thirdeye.api.MetricTimeSeries) Properties(java.util.Properties) Test(org.testng.annotations.Test)

Example 2 with MetricType

use of com.linkedin.thirdeye.api.MetricType in project pinot by linkedin.

the class TimeSeriesResponseConverter method toMap.

/**
   * Convert the response to a Map<DimensionKey, MetricTimeSeries>. DimensionKey is generated based
   * off of schemaDimensions, while the MetricTimeSeries objects are generated based on the rows
   * within the response input. The metrics returned in the MetricTimeSeries instances correspond to
   * the metric names as opposed to the full metric function (eg __COUNT instead of SUM(__COUNT))
   */
public static Map<DimensionKey, MetricTimeSeries> toMap(TimeSeriesResponse response, List<String> schemaDimensions) {
    DimensionKeyGenerator dimensionKeyGenerator = new DimensionKeyGenerator(schemaDimensions);
    List<String> metrics = new ArrayList<>(response.getMetrics());
    Set<String> metricSet = new HashSet<>(metrics);
    List<MetricType> types = Collections.nCopies(metrics.size(), MetricType.DOUBLE);
    MetricSchema metricSchema = new MetricSchema(metrics, types);
    SetMultimap<DimensionKey, TimeSeriesRow> dimensionKeyToRows = HashMultimap.create();
    // group the rows by their dimension key
    for (int i = 0; i < response.getNumRows(); i++) {
        TimeSeriesRow row = response.getRow(i);
        DimensionKey dimensionKey = dimensionKeyGenerator.get(row.getDimensionNames(), row.getDimensionValues());
        dimensionKeyToRows.put(dimensionKey, row);
    }
    Map<DimensionKey, MetricTimeSeries> result = new HashMap<>();
    for (Entry<DimensionKey, Collection<TimeSeriesRow>> entry : dimensionKeyToRows.asMap().entrySet()) {
        DimensionKey key = entry.getKey();
        MetricTimeSeries metricTimeSeries = new MetricTimeSeries(metricSchema);
        result.put(key, metricTimeSeries);
        for (TimeSeriesRow timeSeriesRow : entry.getValue()) {
            long timestamp = timeSeriesRow.getStart();
            for (TimeSeriesMetric metric : timeSeriesRow.getMetrics()) {
                String metricName = metric.getMetricName();
                // contain additional info, eg the raw metrics required for calculating derived ones.
                if (metricSet.contains(metricName)) {
                    Double value = metric.getValue();
                    metricTimeSeries.increment(timestamp, metricName, value);
                }
            }
        }
    }
    return result;
}
Also used : MetricSchema(com.linkedin.thirdeye.api.MetricSchema) HashMap(java.util.HashMap) MetricType(com.linkedin.thirdeye.api.MetricType) ArrayList(java.util.ArrayList) MetricTimeSeries(com.linkedin.thirdeye.api.MetricTimeSeries) DimensionKey(com.linkedin.thirdeye.api.DimensionKey) TimeSeriesMetric(com.linkedin.thirdeye.client.timeseries.TimeSeriesRow.TimeSeriesMetric) Collection(java.util.Collection) HashSet(java.util.HashSet)

Aggregations

MetricSchema (com.linkedin.thirdeye.api.MetricSchema)2 MetricTimeSeries (com.linkedin.thirdeye.api.MetricTimeSeries)2 MetricType (com.linkedin.thirdeye.api.MetricType)2 ArrayList (java.util.ArrayList)2 DimensionKey (com.linkedin.thirdeye.api.DimensionKey)1 TimeSeriesMetric (com.linkedin.thirdeye.client.timeseries.TimeSeriesRow.TimeSeriesMetric)1 Collection (java.util.Collection)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 Properties (java.util.Properties)1 Test (org.testng.annotations.Test)1