Search in sources :

Example 1 with RrdDef

use of org.rrd4j.core.RrdDef in project openhab1-addons by openhab.

the class RRD4jService method getRrdDef.

private RrdDef getRrdDef(String itemName, File file) {
    RrdDef rrdDef = new RrdDef(file.getAbsolutePath());
    RrdDefConfig useRdc = getRrdDefConfig(itemName);
    rrdDef.setStep(useRdc.step);
    rrdDef.setStartTime(System.currentTimeMillis() / 1000 - 1);
    rrdDef.addDatasource(DATASOURCE_STATE, useRdc.dsType, useRdc.heartbeat, useRdc.min, useRdc.max);
    for (RrdArchiveDef rad : useRdc.archives) {
        rrdDef.addArchive(rad.fcn, rad.xff, rad.steps, rad.rows);
    }
    return rrdDef;
}
Also used : RrdDef(org.rrd4j.core.RrdDef)

Example 2 with RrdDef

use of org.rrd4j.core.RrdDef in project ddf by codice.

the class RrdJmxCollector method createRrdFile.

/**
     * Create an RRD file based on the metric's name (path) in the DDF metrics sub-directory. An RRD
     * DB instance is created from this RRD file's definition (if the RRD file did not already
     * exist, which can occur if the RRD file is created then DDF is restarted and this method is
     * called). If the RRD file already exists, then just create an RRD DB instance based on the
     * existing RRD file.
     *
     * @param metricName
     *            path where the RRD file is to be created. This is required.
     * @param dsName
     *            data source name for the RRD file. This is required.
     * @param dsType
     *            data source type, i.e., DERIVE, COUNTER or GAUGE (This is required.) (ABSOLUTE is
     *            not currently supported)
     * @param minValue
     *            the minimum value that will be stored in the data source; any values smaller than
     *            this will be stored as NaN (aka Unknown)
     * @param maxValue
     *            the maximum value that will be stored in the data source; any values larger than
     *            this will be stored as NaN (aka Unknown)
     *
     * @throws IOException
     * @throws CollectorException
     */
private void createRrdFile(final String metricName, final String dsName, final DsType dsType, double minValue, double maxValue) throws IOException, CollectorException {
    LOGGER.trace("ENTERING: createRrdFile");
    if (StringUtils.isEmpty(metricName)) {
        throw new CollectorException("Path where RRD file is to be created must be specified.");
    } else {
        rrdPath = metricsDir + metricName + RRD_FILENAME_SUFFIX;
    }
    if (StringUtils.isEmpty(dsName)) {
        throw new CollectorException("The name of the data source used in the RRD file must be specified.");
    }
    if (!dsType.equals(DsType.COUNTER) && !dsType.equals(DsType.GAUGE) && !dsType.equals(DsType.DERIVE)) {
        throw new CollectorException("Data Source type for the RRD file must be either DERIVE, COUNTER or GAUGE.");
    }
    File file = new File(rrdPath);
    if (!file.exists()) {
        // Create necessary parent directories
        if (!file.getParentFile().exists()) {
            if (!file.getParentFile().mkdirs()) {
                LOGGER.debug("Could not create parent file: {}", file.getParentFile().getAbsolutePath());
            }
        }
        LOGGER.debug("Creating new RRD file {}", rrdPath);
        RrdDef def = new RrdDef(rrdPath, rrdStep);
        // NOTE: Currently restrict each RRD file to only have one data source
        // (even though RRD supports multiple data sources in a single RRD file)
        def.addDatasource(dsName, dsType, 90, minValue, maxValue);
        // since all JMX MBeans are recreated).
        if (dsType == DsType.COUNTER || dsType == DsType.DERIVE) {
            // 1 minute resolution for last 60 minutes
            def.addArchive(ConsolFun.TOTAL, DEFAULT_XFF_FACTOR, 1, 60);
            // 15 minute resolution for the last year
            def.addArchive(ConsolFun.TOTAL, DEFAULT_XFF_FACTOR, 15, ONE_YEAR_IN_15_MINUTE_STEPS);
            // 1 minute resolution for last 60 minutes
            def.addArchive(ConsolFun.AVERAGE, DEFAULT_XFF_FACTOR, 1, 60);
            // 15 minute resolution for the last year
            def.addArchive(ConsolFun.AVERAGE, DEFAULT_XFF_FACTOR, 15, ONE_YEAR_IN_15_MINUTE_STEPS);
            // 1 minute resolution for last 60 minutes
            def.addArchive(ConsolFun.MAX, DEFAULT_XFF_FACTOR, 1, 60);
            // 15 minute resolution for the last year
            def.addArchive(ConsolFun.MAX, DEFAULT_XFF_FACTOR, 15, ONE_YEAR_IN_15_MINUTE_STEPS);
            // 1 minute resolution for last 60 minutes
            def.addArchive(ConsolFun.MIN, DEFAULT_XFF_FACTOR, 1, 60);
            // 15 minute resolution for the last year
            def.addArchive(ConsolFun.MIN, DEFAULT_XFF_FACTOR, 15, ONE_YEAR_IN_15_MINUTE_STEPS);
        // Use a GAUGE to store the values we measure directly as they are,
        // e.g., response time for an ingest or query
        } else if (dsType == DsType.GAUGE) {
            // If you want to know the amount, look at the averages.
            // If you want to know the rate, look at the maximum.
            // 1 minute resolution for last 60 minutes
            def.addArchive(ConsolFun.TOTAL, DEFAULT_XFF_FACTOR, 1, 60);
            // 15 minute resolution for the last year
            def.addArchive(ConsolFun.TOTAL, DEFAULT_XFF_FACTOR, 15, ONE_YEAR_IN_15_MINUTE_STEPS);
            // 1 minute resolution for last 60 minutes
            def.addArchive(ConsolFun.AVERAGE, DEFAULT_XFF_FACTOR, 1, 60);
            // 15 minute resolution for the last year
            def.addArchive(ConsolFun.AVERAGE, DEFAULT_XFF_FACTOR, 15, ONE_YEAR_IN_15_MINUTE_STEPS);
            // 1 minute resolution for last 60 minutes
            def.addArchive(ConsolFun.MAX, DEFAULT_XFF_FACTOR, 1, 60);
            // 15 minute resolution for the last year
            def.addArchive(ConsolFun.MAX, DEFAULT_XFF_FACTOR, 15, ONE_YEAR_IN_15_MINUTE_STEPS);
            // 1 minute resolution for last 60 minutes
            def.addArchive(ConsolFun.MIN, DEFAULT_XFF_FACTOR, 1, 60);
            // 15 minute resolution for the last year
            def.addArchive(ConsolFun.MIN, DEFAULT_XFF_FACTOR, 15, ONE_YEAR_IN_15_MINUTE_STEPS);
        }
        // Create RRD file based on the RRD file definition
        rrdDb = pool.requestRrdDb(def);
    } else {
        LOGGER.debug("rrd file {} already exists - absolute path = {}", rrdPath, file.getAbsolutePath());
        rrdDb = pool.requestRrdDb(rrdPath);
    }
    LOGGER.trace("EXITING: createRrdFile");
}
Also used : RrdDef(org.rrd4j.core.RrdDef) CollectorException(ddf.metrics.collector.CollectorException) File(java.io.File)

Example 3 with RrdDef

use of org.rrd4j.core.RrdDef in project ddf by codice.

the class MetricsEndpointTest method testGetMetricsList.

@SuppressWarnings("rawtypes")
@Test
public // @Ignore
void testGetMetricsList() throws Exception {
    // Delete all .rrd files in test directory to ensure starting with clean directory
    File testDir = new File(TEST_DIR);
    File[] fileList = testDir.listFiles();
    if (fileList != null) {
        for (File file : fileList) {
            if (file.isFile()) {
                file.delete();
            }
        }
    }
    // Create RRD file that Metrics Endpoint will detect
    rrdPath = TEST_DIR + "uptime.rrd";
    RrdDef def = new RrdDef(rrdPath, 1);
    def.addDatasource("uptime", DsType.COUNTER, 90, 0, Double.NaN);
    def.addArchive(ConsolFun.TOTAL, 0.5, 1, 60);
    rrdDb = RrdDbPool.getInstance().requestRrdDb(def);
    UriInfo uriInfo = createUriInfo();
    // Get the metrics list from the endpoint
    MetricsEndpoint endpoint = getEndpoint();
    endpoint.setMetricsDir(TEST_DIR);
    Response response = endpoint.getMetricsList(uriInfo);
    String metricsList = (String) response.getEntity();
    LOGGER.debug("metricsList = {}", metricsList);
    cleanupRrd();
    // Useful class for simple JSON to handle collections when parsing
    // (Called internally by the simple JSON parser.parse(...) method)
    ContainerFactory containerFactory = new ContainerFactory() {

        public List creatArrayContainer() {
            return new LinkedList();
        }

        public Map createObjectContainer() {
            return new LinkedHashMap();
        }
    };
    // Parse the returned JSON text
    JSONParser parser = new JSONParser();
    Map json = (Map) parser.parse(metricsList, containerFactory);
    Set<String> metricNames = (Set<String>) json.keySet();
    assertThat(metricNames.size(), equalTo(1));
    assertThat(metricNames, hasItem("uptime"));
    Iterator metricsIter = json.entrySet().iterator();
    // http://<host>:<port>/services/internal/metrics?dateOffset=3600
    while (metricsIter.hasNext()) {
        Map.Entry entry = (Map.Entry) metricsIter.next();
        Map metricTimeRangeLinks = (Map) entry.getValue();
        LOGGER.debug("metricTimeRangeLinks = {}", metricTimeRangeLinks);
        // Verify each metric name, e.g., "uptime", has all of the time ranges represented
        assertThat(metricTimeRangeLinks.containsKey("15m"), is(true));
        assertThat(metricTimeRangeLinks.containsKey("1h"), is(true));
        // assertThat(metricTimeRangeLinks.containsKey("4h"), is(true));
        // assertThat(metricTimeRangeLinks.containsKey("12h"), is(true));
        assertThat(metricTimeRangeLinks.containsKey("1d"), is(true));
        // assertThat(metricTimeRangeLinks.containsKey("3d"), is(true));
        assertThat(metricTimeRangeLinks.containsKey("1w"), is(true));
        assertThat(metricTimeRangeLinks.containsKey("1M"), is(true));
        assertThat(metricTimeRangeLinks.containsKey("3M"), is(true));
        assertThat(metricTimeRangeLinks.containsKey("6M"), is(true));
        assertThat(metricTimeRangeLinks.containsKey("1y"), is(true));
        Iterator timeRangeLinksIter = metricTimeRangeLinks.entrySet().iterator();
        // supported formats and that the correct dateOffset is specified in the hyperlinks
        while (timeRangeLinksIter.hasNext()) {
            Map.Entry timeRangeLinkEntry = (Map.Entry) timeRangeLinksIter.next();
            String timeRange = (String) timeRangeLinkEntry.getKey();
            Map<String, String> metricHyperlinks = (Map<String, String>) timeRangeLinkEntry.getValue();
            Long dateOffset = MetricsEndpoint.TIME_RANGES.get(timeRange);
            assertThat(metricHyperlinks.containsKey("PNG"), is(true));
            assertThat(metricHyperlinks.get("PNG"), endsWith("dateOffset=" + dateOffset));
            assertThat(metricHyperlinks.containsKey("CSV"), is(true));
            assertThat(metricHyperlinks.get("CSV"), endsWith("dateOffset=" + dateOffset));
            assertThat(metricHyperlinks.containsKey("XLS"), is(true));
            assertThat(metricHyperlinks.get("XLS"), endsWith("dateOffset=" + dateOffset));
        }
    }
}
Also used : RrdDef(org.rrd4j.core.RrdDef) Set(java.util.Set) Matchers.anyString(org.mockito.Matchers.anyString) LinkedList(java.util.LinkedList) LinkedHashMap(java.util.LinkedHashMap) Response(javax.ws.rs.core.Response) Iterator(java.util.Iterator) Matchers.anyLong(org.mockito.Matchers.anyLong) ContainerFactory(org.json.simple.parser.ContainerFactory) JSONParser(org.json.simple.parser.JSONParser) File(java.io.File) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) MultivaluedMap(javax.ws.rs.core.MultivaluedMap) UriInfo(javax.ws.rs.core.UriInfo) Test(org.junit.Test)

Example 4 with RrdDef

use of org.rrd4j.core.RrdDef in project ddf by codice.

the class MetricsEndpointTest method createRrdFile.

private void createRrdFile(int dateOffset, String metricName) throws Exception {
    // Create RRD file that Metrics Endpoint will detect
    rrdPath = TEST_DIR + metricName + ".rrd";
    int rrdStep = 60;
    RrdDef def = new RrdDef(rrdPath, rrdStep);
    long startTime = System.currentTimeMillis() / 1000 - dateOffset;
    def.setStartTime(startTime - rrdStep);
    def.addDatasource("data", DsType.COUNTER, 90, 0, Double.NaN);
    def.addArchive(ConsolFun.TOTAL, 0.5, 1, 5);
    rrdDb = RrdDbPool.getInstance().requestRrdDb(def);
    // Add enough samples to get one averaged sample stored into the RRD file
    long endTime = startTime;
    Sample sample = rrdDb.createSample();
    sample.setTime(endTime);
    sample.setValue("data", 100);
    sample.update();
    endTime += rrdStep;
    sample.setTime(endTime);
    sample.setValue("data", 200);
    sample.update();
    endTime += rrdStep;
    sample.setTime(endTime);
    sample.setValue("data", 100);
    sample.update();
    endTime += rrdStep;
    LOGGER.debug(rrdDb.dump());
    FetchRequest fetchRequest = rrdDb.createFetchRequest(ConsolFun.TOTAL, startTime, endTime);
    FetchData fetchData = fetchRequest.fetchData();
    LOGGER.debug(fetchData.dump());
    long[] timestamps = fetchData.getTimestamps();
    double[] values = fetchData.getValues(0);
    for (int i = 0; i < timestamps.length; i++) {
        LOGGER.debug("{}:  {}", getCalendarTime(timestamps[i]), values[i]);
    }
    rrdDb.close();
}
Also used : RrdDef(org.rrd4j.core.RrdDef) Sample(org.rrd4j.core.Sample) FetchRequest(org.rrd4j.core.FetchRequest) FetchData(org.rrd4j.core.FetchData)

Aggregations

RrdDef (org.rrd4j.core.RrdDef)4 File (java.io.File)2 CollectorException (ddf.metrics.collector.CollectorException)1 Iterator (java.util.Iterator)1 LinkedHashMap (java.util.LinkedHashMap)1 LinkedList (java.util.LinkedList)1 Map (java.util.Map)1 Set (java.util.Set)1 MultivaluedMap (javax.ws.rs.core.MultivaluedMap)1 Response (javax.ws.rs.core.Response)1 UriInfo (javax.ws.rs.core.UriInfo)1 ContainerFactory (org.json.simple.parser.ContainerFactory)1 JSONParser (org.json.simple.parser.JSONParser)1 Test (org.junit.Test)1 Matchers.anyLong (org.mockito.Matchers.anyLong)1 Matchers.anyString (org.mockito.Matchers.anyString)1 FetchData (org.rrd4j.core.FetchData)1 FetchRequest (org.rrd4j.core.FetchRequest)1 Sample (org.rrd4j.core.Sample)1