use of org.apache.geode.internal.statistics.StatArchiveReader.StatValue in project geode by apache.
the class StatArchiveWriterReaderIntegrationTest method testLongGaugeOneSample.
@Test
public void testLongGaugeOneSample() throws Exception {
final TestStatisticsManager manager = new TestStatisticsManager(1, getUniqueName(), WRITER_INITIAL_DATE_MILLIS);
final TestStatisticsSampler sampler = new TestStatisticsSampler(manager);
final SampleCollector sampleCollector = new SampleCollector(sampler);
final StatArchiveDescriptor archiveDescriptor = new StatArchiveDescriptor.Builder().setArchiveName(this.archiveFileName).setSystemId(1).setSystemStartTime(WRITER_INITIAL_DATE_MILLIS).setSystemDirectoryPath(this.testName.getMethodName()).setProductDescription(getClass().getSimpleName()).build();
final StatArchiveWriter writer = new TestStatArchiveWriter(archiveDescriptor);
sampleCollector.addSampleHandler(writer);
final StatisticDescriptor[] statsST1 = new StatisticDescriptor[] { manager.createLongGauge("long_gauge_1", "d1", "u1") };
final StatisticsType ST1 = manager.createType("ST1", "ST1", statsST1);
final Statistics st1_1 = manager.createAtomicStatistics(ST1, "st1_1", 1);
incLong(st1_1, "long_gauge_1", 5);
final long sampleTimeNanos = WRITER_PREVIOUS_TIMESTAMP_NANOS + NANOS_PER_MILLI * 2;
sampleCollector.sample(sampleTimeNanos);
writer.close();
final StatisticDescriptor[] sds = ST1.getStatistics();
for (int i = 0; i < sds.length; i++) {
assertEquals(5L, st1_1.get(sds[i].getName()));
}
final StatArchiveReader reader = new StatArchiveReader(new File[] { new File(this.archiveFileName) }, null, false);
// compare all resourceInst values against what was printed above
final List resources = reader.getResourceInstList();
assertNotNull(resources);
assertEquals(1, resources.size());
final StatArchiveReader.ResourceInst ri = (StatArchiveReader.ResourceInst) resources.get(0);
assertNotNull(ri);
final String statsName = ri.getName();
assertNotNull(statsName);
assertEquals("st1_1", statsName);
assertEquals("ST1", ri.getType().getName());
final StatValue[] statValues = ri.getStatValues();
assertNotNull(statValues);
assertEquals(1, statValues.length);
final String statName = ri.getType().getStats()[0].getName();
assertNotNull(statName);
assertEquals("long_gauge_1", statName);
assertEquals(statName, statValues[0].getDescriptor().getName());
assertEquals(1, statValues[0].getSnapshotsSize());
assertEquals(5.0, statValues[0].getSnapshotsMostRecent(), 0.01);
final long[] timeStampsMillis = statValues[0].getRawAbsoluteTimeStamps();
assertNotNull(timeStampsMillis);
assertEquals(1, timeStampsMillis.length);
final long initialPreviousTimeStampMillis = NanoTimer.nanosToMillis(WRITER_PREVIOUS_TIMESTAMP_NANOS);
final long timeStampMillis = NanoTimer.nanosToMillis(sampleTimeNanos);
final long deltaMillis = timeStampMillis - initialPreviousTimeStampMillis;
assertEquals(2, deltaMillis);
final long expectedTimeStampMillis = deltaMillis + WRITER_INITIAL_DATE_MILLIS;
assertEquals(expectedTimeStampMillis, timeStampsMillis[0]);
final double[] snapshots = statValues[0].getRawSnapshots();
assertNotNull(snapshots);
assertEquals(1, snapshots.length);
assertEquals(5.0, snapshots[0], 0.01);
}
use of org.apache.geode.internal.statistics.StatArchiveReader.StatValue in project geode by apache.
the class StatArchiveWriterReaderIntegrationTest method testWriteWhenSamplingBegins.
/**
* Tests the stat archive file written by StatArchiveWriter.
*/
@Test
public void testWriteWhenSamplingBegins() throws Exception {
final TestStatisticsManager manager = new TestStatisticsManager(1, getUniqueName(), WRITER_INITIAL_DATE_MILLIS);
final TestStatisticsSampler sampler = new TestStatisticsSampler(manager);
final SampleCollector sampleCollector = new SampleCollector(sampler);
final StatArchiveDescriptor archiveDescriptor = new StatArchiveDescriptor.Builder().setArchiveName(this.archiveFileName).setSystemId(1).setSystemStartTime(WRITER_INITIAL_DATE_MILLIS - 2000).setSystemDirectoryPath(this.testName.getMethodName()).setProductDescription(getClass().getSimpleName()).build();
final StatArchiveWriter writer = new TestStatArchiveWriter(archiveDescriptor);
sampleCollector.addSampleHandler(writer);
long sampleTimeNanos = WRITER_PREVIOUS_TIMESTAMP_NANOS + NANOS_PER_MILLI * 1000;
// 1) create ST1 and st1_1
final StatisticDescriptor[] statsST1 = new StatisticDescriptor[] { manager.createDoubleCounter("double_counter_1", "d1", "u1"), manager.createDoubleCounter("double_counter_2", "d2", "u2", true), manager.createDoubleGauge("double_gauge_3", "d3", "u3"), manager.createDoubleGauge("double_gauge_4", "d4", "u4", false), manager.createIntCounter("int_counter_5", "d5", "u5"), manager.createIntCounter("int_counter_6", "d6", "u6", true), manager.createIntGauge("int_gauge_7", "d7", "u7"), manager.createIntGauge("int_gauge_8", "d8", "u8", false), manager.createLongCounter("long_counter_9", "d9", "u9"), manager.createLongCounter("long_counter_10", "d10", "u10", true), manager.createLongGauge("long_gauge_11", "d11", "u11"), manager.createLongGauge("long_gauge_12", "d12", "u12", false) };
final StatisticsType ST1 = manager.createType("ST1", "ST1", statsST1);
final Statistics st1_1 = manager.createAtomicStatistics(ST1, "st1_1", 1);
// 2) create st1_2
final Statistics st1_2 = manager.createAtomicStatistics(ST1, "st1_2", 2);
// 3) some new values
incDouble(st1_1, "double_counter_1", 18347.94880);
incDouble(st1_1, "double_gauge_4", 24885.02346);
incInt(st1_1, "int_counter_5", 3);
incInt(st1_1, "int_gauge_8", 4);
incLong(st1_1, "long_counter_9", 1073741824);
incLong(st1_1, "long_gauge_12", 154367897);
incDouble(st1_2, "double_counter_2", 346.95);
incDouble(st1_2, "double_gauge_3", 9865.23008);
incInt(st1_2, "int_counter_6", 4);
incInt(st1_2, "int_gauge_7", 3);
incLong(st1_2, "long_counter_10", 3497536);
incLong(st1_2, "long_gauge_11", 103909646);
sampleCollector.sample(sampleTimeNanos += (1000 * NANOS_PER_MILLI));
// 4) all new values
incDouble(st1_1, "double_counter_1", 1.098367);
incDouble(st1_1, "double_counter_2", 50247.0983254);
incDouble(st1_1, "double_gauge_3", 987654.2344);
incDouble(st1_1, "double_gauge_4", 23.097);
incInt(st1_1, "int_counter_5", 3);
incInt(st1_1, "int_counter_6", 4);
incInt(st1_1, "int_gauge_7", 3);
incInt(st1_1, "int_gauge_8", 4);
incLong(st1_1, "long_counter_9", 5);
incLong(st1_1, "long_counter_10", 465793);
incLong(st1_1, "long_gauge_11", -203050);
incLong(st1_1, "long_gauge_12", 6);
incDouble(st1_2, "double_counter_1", 0.000846643);
incDouble(st1_2, "double_counter_2", 4.0);
incDouble(st1_2, "double_gauge_3", -4.0);
incDouble(st1_2, "double_gauge_4", 19276.0346624);
incInt(st1_2, "int_counter_5", 1);
incInt(st1_2, "int_counter_6", 2);
incInt(st1_2, "int_gauge_7", -1);
incInt(st1_2, "int_gauge_8", -2);
incLong(st1_2, "long_counter_9", 309876);
incLong(st1_2, "long_counter_10", 4);
incLong(st1_2, "long_gauge_11", -4);
incLong(st1_2, "long_gauge_12", 1098764);
sampleCollector.sample(sampleTimeNanos += (1000 * NANOS_PER_MILLI));
// 5) no new values
sampleCollector.sample(sampleTimeNanos += (1000 * NANOS_PER_MILLI));
// 6) some new values
incDouble(st1_1, "double_counter_2", 10.255);
incDouble(st1_1, "double_gauge_3", -4123.05);
incInt(st1_1, "int_counter_6", 2);
incInt(st1_1, "int_gauge_7", 3);
incLong(st1_1, "long_counter_10", 4);
incLong(st1_1, "long_gauge_11", -2);
incDouble(st1_2, "double_counter_1", 5.00007634);
incDouble(st1_2, "double_gauge_4", 16904.06524);
incInt(st1_2, "int_counter_5", 4);
incInt(st1_2, "int_gauge_8", 1);
incLong(st1_2, "long_counter_9", 8);
incLong(st1_2, "long_gauge_12", 10);
sampleCollector.sample(sampleTimeNanos += (1000 * NANOS_PER_MILLI));
// 7) all new values
incDouble(st1_1, "double_counter_1", 4065.340);
incDouble(st1_1, "double_counter_2", 2.01342568);
incDouble(st1_1, "double_gauge_3", 1.367890);
incDouble(st1_1, "double_gauge_4", 8.0549003);
incInt(st1_1, "int_counter_5", 2);
incInt(st1_1, "int_counter_6", 9);
incInt(st1_1, "int_gauge_7", 1);
incInt(st1_1, "int_gauge_8", 2);
incLong(st1_1, "long_counter_9", 6);
incLong(st1_1, "long_counter_10", 2);
incLong(st1_1, "long_gauge_11", -10);
incLong(st1_1, "long_gauge_12", 8);
incDouble(st1_2, "double_counter_1", 128.2450);
incDouble(st1_2, "double_counter_2", 113.550);
incDouble(st1_2, "double_gauge_3", 21.0676);
incDouble(st1_2, "double_gauge_4", 2.01346);
incInt(st1_2, "int_counter_5", 3);
incInt(st1_2, "int_counter_6", 4);
incInt(st1_2, "int_gauge_7", 4);
incInt(st1_2, "int_gauge_8", 2);
incLong(st1_2, "long_counter_9", 1);
incLong(st1_2, "long_counter_10", 2);
incLong(st1_2, "long_gauge_11", 3);
incLong(st1_2, "long_gauge_12", -2);
sampleCollector.sample(sampleTimeNanos += (1000 * NANOS_PER_MILLI));
// 8) create ST2 and ST3 and st2_1 and st3_1 and st3_2
final StatisticDescriptor[] statsST2 = new StatisticDescriptor[] { manager.createIntGauge("int_gauge_7", "d7", "u7"), manager.createIntGauge("int_gauge_8", "d8", "u8", false), manager.createLongCounter("long_counter_9", "d9", "u9"), manager.createLongCounter("long_counter_10", "d10", "u10", true), manager.createLongGauge("long_gauge_11", "d11", "u11"), manager.createLongGauge("long_gauge_12", "d12", "u12", false) };
final StatisticsType ST2 = manager.createType("ST2", "ST2", statsST2);
final Statistics st2_1 = manager.createAtomicStatistics(ST2, "st2_1", 1);
final StatisticDescriptor[] statsST3 = new StatisticDescriptor[] { manager.createDoubleCounter("double_counter_1", "d1", "u1"), manager.createDoubleCounter("double_counter_2", "d2", "u2", true), manager.createDoubleGauge("double_gauge_3", "d3", "u3"), manager.createDoubleGauge("double_gauge_4", "d4", "u4", false), manager.createIntCounter("int_counter_5", "d5", "u5"), manager.createIntCounter("int_counter_6", "d6", "u6", true) };
final StatisticsType ST3 = manager.createType("ST3", "ST3", statsST3);
final Statistics st3_1 = manager.createAtomicStatistics(ST3, "st3_1", 1);
final Statistics st3_2 = manager.createAtomicStatistics(ST3, "st3_2", 2);
// 9) all new values
incDouble(st1_1, "double_counter_1", 9499.10);
incDouble(st1_1, "double_counter_2", 83.0);
incDouble(st1_1, "double_gauge_3", -7.05678);
incDouble(st1_1, "double_gauge_4", 5111.031);
incInt(st1_1, "int_counter_5", 1);
incInt(st1_1, "int_counter_6", 3);
incInt(st1_1, "int_gauge_7", 9);
incInt(st1_1, "int_gauge_8", -3);
incLong(st1_1, "long_counter_9", 3);
incLong(st1_1, "long_counter_10", 8);
incLong(st1_1, "long_gauge_11", 5);
incLong(st1_1, "long_gauge_12", 4);
incDouble(st1_2, "double_counter_1", 2509.0235);
incDouble(st1_2, "double_counter_2", 409.10063);
incDouble(st1_2, "double_gauge_3", -42.66904);
incDouble(st1_2, "double_gauge_4", 21.0098);
incInt(st1_2, "int_counter_5", 8);
incInt(st1_2, "int_counter_6", 9);
incInt(st1_2, "int_gauge_7", -2);
incInt(st1_2, "int_gauge_8", 6);
incLong(st1_2, "long_counter_9", 4);
incLong(st1_2, "long_counter_10", 5);
incLong(st1_2, "long_gauge_11", 5);
incLong(st1_2, "long_gauge_12", -1);
incInt(st2_1, "int_gauge_7", 2);
incInt(st2_1, "int_gauge_8", -1);
incLong(st2_1, "long_counter_9", 1002948);
incLong(st2_1, "long_counter_10", 29038856);
incLong(st2_1, "long_gauge_11", -2947465);
incLong(st2_1, "long_gauge_12", 4934745);
incDouble(st3_1, "double_counter_1", 562.0458);
incDouble(st3_1, "double_counter_2", 14.0086);
incDouble(st3_1, "double_gauge_3", -2.0);
incDouble(st3_1, "double_gauge_4", 1.0);
incInt(st3_1, "int_counter_5", 2);
incInt(st3_1, "int_counter_6", 1);
incDouble(st3_2, "double_counter_1", 33.087);
incDouble(st3_2, "double_counter_2", 2.02);
incDouble(st3_2, "double_gauge_3", 1.06);
incDouble(st3_2, "double_gauge_4", 3.021);
incInt(st3_2, "int_counter_5", 1);
incInt(st3_2, "int_counter_6", 4);
sampleCollector.sample(sampleTimeNanos += (1000 * NANOS_PER_MILLI));
// 10) some new values
incDouble(st1_1, "double_counter_1", 3.014);
incDouble(st1_1, "double_gauge_3", 57.003);
incInt(st1_1, "int_counter_5", 3);
incInt(st1_1, "int_gauge_7", 5);
incLong(st1_1, "long_counter_9", 1);
incLong(st1_1, "long_gauge_11", 1);
incDouble(st1_2, "double_counter_2", 20.107);
incDouble(st1_2, "double_gauge_4", 1.5078);
incInt(st1_2, "int_counter_6", 1);
incInt(st1_2, "int_gauge_8", -1);
incLong(st1_2, "long_counter_10", 1073741824);
incLong(st1_2, "long_gauge_12", 5);
incInt(st2_1, "int_gauge_7", 2);
incLong(st2_1, "long_counter_9", 2);
incLong(st2_1, "long_gauge_11", -2);
incDouble(st3_1, "double_counter_1", 24.80097);
incDouble(st3_1, "double_gauge_3", -22.09834);
incInt(st3_1, "int_counter_5", 2);
incDouble(st3_2, "double_counter_2", 21.0483);
incDouble(st3_2, "double_gauge_4", 36310.012);
incInt(st3_2, "int_counter_6", 4);
sampleCollector.sample(sampleTimeNanos += (1000 * NANOS_PER_MILLI));
// 11) remove ST2 and st2_1
manager.destroyStatistics(st2_1);
// 12) some new values
incDouble(st1_1, "double_counter_1", 339.0803);
incDouble(st1_1, "double_counter_2", 21.06);
incDouble(st1_1, "double_gauge_3", 12.056);
incDouble(st1_1, "double_gauge_4", 27.108);
incInt(st1_1, "int_counter_5", 2);
incInt(st1_1, "int_counter_6", 4);
incInt(st1_2, "int_gauge_7", 4);
incInt(st1_2, "int_gauge_8", 7);
incLong(st1_2, "long_counter_9", 8);
incLong(st1_2, "long_counter_10", 4);
incLong(st1_2, "long_gauge_11", 2);
incLong(st1_2, "long_gauge_12", 1);
incDouble(st3_1, "double_counter_1", 41.103);
incDouble(st3_1, "double_counter_2", 2.0333);
incDouble(st3_1, "double_gauge_3", -14.0);
incDouble(st3_2, "double_gauge_4", 26.01);
incInt(st3_2, "int_counter_5", 3);
incInt(st3_2, "int_counter_6", 1);
sampleCollector.sample(sampleTimeNanos += (1000 * NANOS_PER_MILLI));
// 13) no new values
sampleCollector.sample(sampleTimeNanos += (1000 * NANOS_PER_MILLI));
// 14) remove st1_2
manager.destroyStatistics(st1_2);
// 15) all new values
incDouble(st1_1, "double_counter_1", 62.1350);
incDouble(st1_1, "double_counter_2", 33.306);
incDouble(st1_1, "double_gauge_3", 41.1340);
incDouble(st1_1, "double_gauge_4", -1.04321);
incInt(st1_1, "int_counter_5", 2);
incInt(st1_1, "int_counter_6", 2);
incInt(st1_1, "int_gauge_7", 1);
incInt(st1_1, "int_gauge_8", 9);
incLong(st1_1, "long_counter_9", 2);
incLong(st1_1, "long_counter_10", 5);
incLong(st1_1, "long_gauge_11", 3);
incLong(st1_1, "long_gauge_12", -2);
incDouble(st3_1, "double_counter_1", 3461.0153);
incDouble(st3_1, "double_counter_2", 5.03167);
incDouble(st3_1, "double_gauge_3", -1.31051);
incDouble(st3_1, "double_gauge_4", 71.031);
incInt(st3_1, "int_counter_5", 4);
incInt(st3_1, "int_counter_6", 2);
incDouble(st3_2, "double_counter_1", 531.5608);
incDouble(st3_2, "double_counter_2", 55.0532);
incDouble(st3_2, "double_gauge_3", 8.40956);
incDouble(st3_2, "double_gauge_4", 23230.0462);
incInt(st3_2, "int_counter_5", 9);
incInt(st3_2, "int_counter_6", 5);
sampleCollector.sample(sampleTimeNanos += (1000 * NANOS_PER_MILLI));
// close the writer
writer.close();
if (false) {
StatisticDescriptor[] sds = ST1.getStatistics();
for (int i = 0; i < sds.length; i++) {
logger.info("testWriteAfterSamplingBegins#st1_1#" + sds[i].getName() + "=" + st1_1.get(sds[i].getName()));
}
for (int i = 0; i < sds.length; i++) {
logger.info("testWriteAfterSamplingBegins#st1_2#" + sds[i].getName() + "=" + st1_2.get(sds[i].getName()));
}
sds = ST2.getStatistics();
for (int i = 0; i < sds.length; i++) {
logger.info("testWriteAfterSamplingBegins#st2_1#" + sds[i].getName() + "=" + st2_1.get(sds[i].getName()));
}
sds = ST3.getStatistics();
for (int i = 0; i < sds.length; i++) {
logger.info("testWriteAfterSamplingBegins#st3_1#" + sds[i].getName() + "=" + st3_1.get(sds[i].getName()));
}
for (int i = 0; i < sds.length; i++) {
logger.info("testWriteAfterSamplingBegins#st3_2#" + sds[i].getName() + "=" + st3_2.get(sds[i].getName()));
}
}
// validate that stat archive file exists
final File actual = new File(this.archiveFileName);
assertTrue(actual.exists());
// validate content of stat archive file using StatArchiveReader
final StatArchiveReader reader = new StatArchiveReader(new File[] { actual }, null, false);
// compare all resourceInst values against what was printed above
final List resources = reader.getResourceInstList();
for (final Iterator iter = resources.iterator(); iter.hasNext(); ) {
final StatArchiveReader.ResourceInst ri = (StatArchiveReader.ResourceInst) iter.next();
final String resourceName = ri.getName();
assertNotNull(resourceName);
final String expectedStatsType = this.statisticTypes.get(resourceName);
assertNotNull(expectedStatsType);
assertEquals(expectedStatsType, ri.getType().getName());
final Map<String, Number> expectedStatValues = this.allStatistics.get(resourceName);
assertNotNull(expectedStatValues);
final StatValue[] statValues = ri.getStatValues();
for (int i = 0; i < statValues.length; i++) {
final String statName = ri.getType().getStats()[i].getName();
assertNotNull(statName);
assertNotNull(expectedStatValues.get(statName));
assertEquals(statName, statValues[i].getDescriptor().getName());
statValues[i].setFilter(StatValue.FILTER_NONE);
final double[] rawSnapshots = statValues[i].getRawSnapshots();
// for (int j = 0; j < rawSnapshots.length; j++) {
// logger.info("DEBUG " + ri.getName() + " " + statName + " rawSnapshots[" + j + "] = " +
// rawSnapshots[j]);
// }
assertEquals("Value " + i + " for " + statName + " is wrong: " + expectedStatValues, expectedStatValues.get(statName).doubleValue(), statValues[i].getSnapshotsMostRecent(), 0.01);
}
}
// validate byte content of stat archive file against saved expected file
final File expected = new File(TestUtil.getResourcePath(getClass(), "StatArchiveWriterReaderJUnitTest_" + this.testName.getMethodName() + "_expected.gfs"));
assertTrue(expected + " does not exist!", expected.exists());
assertEquals(expected.length(), actual.length());
assertTrue("Actual stat archive file bytes differ from expected stat archive file bytes!", Arrays.equals(readBytes(expected), readBytes(actual)));
}
use of org.apache.geode.internal.statistics.StatArchiveReader.StatValue in project geode by apache.
the class StatArchiveWriterReaderIntegrationTest method testLongCounterOneSample.
@Test
public void testLongCounterOneSample() throws Exception {
final TestStatisticsManager manager = new TestStatisticsManager(1, getUniqueName(), WRITER_INITIAL_DATE_MILLIS);
final TestStatisticsSampler sampler = new TestStatisticsSampler(manager);
final SampleCollector sampleCollector = new SampleCollector(sampler);
final StatArchiveDescriptor archiveDescriptor = new StatArchiveDescriptor.Builder().setArchiveName(this.archiveFileName).setSystemId(1).setSystemStartTime(WRITER_INITIAL_DATE_MILLIS).setSystemDirectoryPath(this.testName.getMethodName()).setProductDescription(getClass().getSimpleName()).build();
final StatArchiveWriter writer = new TestStatArchiveWriter(archiveDescriptor);
sampleCollector.addSampleHandler(writer);
final StatisticDescriptor[] statsST1 = new StatisticDescriptor[] { manager.createLongCounter("long_counter_1", "d1", "u1") };
final StatisticsType ST1 = manager.createType("ST1", "ST1", statsST1);
final Statistics st1_1 = manager.createAtomicStatistics(ST1, "st1_1", 1);
final long value = 5;
incLong(st1_1, "long_counter_1", value);
final long sampleIncNanos = NANOS_PER_MILLI * 1000;
final long sampleTimeNanos = WRITER_PREVIOUS_TIMESTAMP_NANOS + sampleIncNanos;
sampleCollector.sample(sampleTimeNanos);
writer.close();
final StatisticDescriptor[] sds = ST1.getStatistics();
for (int i = 0; i < sds.length; i++) {
assertEquals(value, st1_1.get(sds[i].getName()));
}
final StatArchiveReader reader = new StatArchiveReader(new File[] { new File(this.archiveFileName) }, null, false);
// compare all resourceInst values against what was printed above
final List resources = reader.getResourceInstList();
assertNotNull(resources);
assertEquals(1, resources.size());
final StatArchiveReader.ResourceInst ri = (StatArchiveReader.ResourceInst) resources.get(0);
assertNotNull(ri);
final String statsName = ri.getName();
assertNotNull(statsName);
assertEquals("st1_1", statsName);
assertEquals("ST1", ri.getType().getName());
final StatValue[] statValues = ri.getStatValues();
assertNotNull(statValues);
assertEquals(1, statValues.length);
final String statName = ri.getType().getStats()[0].getName();
assertNotNull(statName);
assertEquals("long_counter_1", statName);
assertEquals(statName, statValues[0].getDescriptor().getName());
assertEquals(1, statValues[0].getSnapshotsSize());
assertEquals((double) value, statValues[0].getSnapshotsMostRecent(), 0.01);
final long[] timeStampsMillis = statValues[0].getRawAbsoluteTimeStamps();
assertNotNull(timeStampsMillis);
assertEquals(1, timeStampsMillis.length);
final long initialPreviousTimeStampMillis = NanoTimer.nanosToMillis(WRITER_PREVIOUS_TIMESTAMP_NANOS);
final long timeStampMillis = NanoTimer.nanosToMillis(sampleTimeNanos);
final long deltaMillis = timeStampMillis - initialPreviousTimeStampMillis;
assertEquals(NanoTimer.nanosToMillis(sampleIncNanos), deltaMillis);
final long expectedTimeStampMillis = deltaMillis + WRITER_INITIAL_DATE_MILLIS;
assertEquals(expectedTimeStampMillis, timeStampsMillis[0]);
final double[] snapshots = statValues[0].getRawSnapshots();
assertNotNull(snapshots);
assertEquals(1, snapshots.length);
assertEquals((double) value, snapshots[0], 0.01);
}
use of org.apache.geode.internal.statistics.StatArchiveReader.StatValue in project geode by apache.
the class StatUtils method addResourceInstsToSet.
private static void addResourceInstsToSet(final File archiveFile, final String specString, final Set<ResourceInst> resourceInsts) throws IOException {
StatSpec statSpec = new StatSpec(specString);
StatArchiveReader reader = new StatArchiveReader(new File[] { archiveFile }, new StatSpec[] { statSpec }, true);
StatValue[] statValues = reader.matchSpec(statSpec);
for (StatValue statValue : statValues) {
for (ResourceInst resourceInst : statValue.getResources()) {
resourceInsts.add(resourceInst);
}
}
}
use of org.apache.geode.internal.statistics.StatArchiveReader.StatValue in project geode by apache.
the class StatisticsDistributedTest method testPubAndSubCustomStats.
@Test
public void testPubAndSubCustomStats() throws Exception {
String regionName = "region_" + getName();
VM[] pubs = new VM[NUM_PUBS];
for (int pubVM = 0; pubVM < NUM_PUBS; pubVM++) {
pubs[pubVM] = getHost(0).getVM(pubVM);
}
VM sub = getHost(0).getVM(NUM_PUBS);
String subArchive = this.directory.getAbsolutePath() + File.separator + getName() + "_sub" + ".gfs";
String[] pubArchives = new String[NUM_PUBS];
for (int pubVM = 0; pubVM < NUM_PUBS; pubVM++) {
pubArchives[pubVM] = this.directory.getAbsolutePath() + File.separator + getName() + "_pub-" + pubVM + ".gfs";
}
for (int i = 0; i < NUM_PUBS; i++) {
final int pubVM = i;
pubs[pubVM].invoke("pub-connect-and-create-data-" + pubVM, () -> {
Properties props = new Properties();
props.setProperty(STATISTIC_SAMPLING_ENABLED, "true");
props.setProperty(STATISTIC_SAMPLE_RATE, "1000");
props.setProperty(STATISTIC_ARCHIVE_FILE, pubArchives[pubVM]);
InternalDistributedSystem system = getSystem(props);
// assert that sampler is working as expected
GemFireStatSampler sampler = system.getStatSampler();
assertTrue(sampler.isSamplingEnabled());
assertTrue(sampler.isAlive());
assertEquals(new File(pubArchives[pubVM]), sampler.getArchiveFileName());
await("awaiting SampleCollector to exist").atMost(30, SECONDS).until(() -> sampler.getSampleCollector() != null);
SampleCollector sampleCollector = sampler.getSampleCollector();
assertNotNull(sampleCollector);
StatArchiveHandler archiveHandler = sampleCollector.getStatArchiveHandler();
assertNotNull(archiveHandler);
assertTrue(archiveHandler.isArchiving());
// create cache and region
Cache cache = getCache();
RegionFactory<String, Number> factory = cache.createRegionFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
RegionMembershipListener rml = new RegionMembershipListener();
rmlRef.set(rml);
factory.addCacheListener(rml);
Region<String, Number> region = factory.create(regionName);
// create the keys
if (region.getAttributes().getScope() == Scope.DISTRIBUTED_ACK) {
for (int key = 0; key < NUM_KEYS; key++) {
region.create("KEY-" + key, null);
}
}
});
}
DistributedMember subMember = sub.invoke("sub-connect-and-create-keys", () -> {
Properties props = new Properties();
props.setProperty(STATISTIC_SAMPLING_ENABLED, "true");
props.setProperty(STATISTIC_SAMPLE_RATE, "1000");
props.setProperty(STATISTIC_ARCHIVE_FILE, subArchive);
InternalDistributedSystem system = getSystem(props);
PubSubStats statistics = new PubSubStats(system, "sub-1", 1);
subStatsRef.set(statistics);
// assert that sampler is working as expected
GemFireStatSampler sampler = system.getStatSampler();
assertTrue(sampler.isSamplingEnabled());
assertTrue(sampler.isAlive());
assertEquals(new File(subArchive), sampler.getArchiveFileName());
await("awaiting SampleCollector to exist").atMost(30, SECONDS).until(() -> sampler.getSampleCollector() != null);
SampleCollector sampleCollector = sampler.getSampleCollector();
assertNotNull(sampleCollector);
StatArchiveHandler archiveHandler = sampleCollector.getStatArchiveHandler();
assertNotNull(archiveHandler);
assertTrue(archiveHandler.isArchiving());
// create cache and region with UpdateListener
Cache cache = getCache();
RegionFactory<String, Number> factory = cache.createRegionFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
CacheListener<String, Number> cl = new UpdateListener(statistics);
factory.addCacheListener(cl);
Region<String, Number> region = factory.create(regionName);
// create the keys
if (region.getAttributes().getScope() == Scope.DISTRIBUTED_ACK) {
for (int key = 0; key < NUM_KEYS; key++) {
region.create("KEY-" + key, null);
}
}
assertEquals(0, statistics.getUpdateEvents());
return system.getDistributedMember();
});
for (int i = 0; i < NUM_PUBS; i++) {
final int pubVM = i;
AsyncInvocation[] publishers = new AsyncInvocation[NUM_PUB_THREADS];
for (int j = 0; j < NUM_PUB_THREADS; j++) {
final int pubThread = j;
publishers[pubThread] = pubs[pubVM].invokeAsync("pub-connect-and-put-data-" + pubVM + "-thread-" + pubThread, () -> {
PubSubStats statistics = new PubSubStats(basicGetSystem(), "pub-" + pubThread, pubVM);
pubStatsRef.set(pubThread, statistics);
RegionMembershipListener rml = rmlRef.get();
Region<String, Number> region = getCache().getRegion(regionName);
// assert that sub is in rml membership
assertNotNull(rml);
await("awaiting Membership to contain subMember").atMost(30, SECONDS).until(() -> rml.contains(subMember) && rml.size() == NUM_PUBS);
// publish lots of puts cycling through the NUM_KEYS
assertEquals(0, statistics.getPuts());
// cycle through the keys randomly
if (RANDOMIZE_PUTS) {
Random randomGenerator = new Random();
int key = 0;
for (int idx = 0; idx < MAX_PUTS; idx++) {
long start = statistics.startPut();
key = randomGenerator.nextInt(NUM_KEYS);
region.put("KEY-" + key, idx);
statistics.endPut(start);
}
// cycle through he keys in order and wrapping back around
} else {
int key = 0;
for (int idx = 0; idx < MAX_PUTS; idx++) {
long start = statistics.startPut();
region.put("KEY-" + key, idx);
// cycle through the keys...
key++;
if (key >= NUM_KEYS) {
key = 0;
}
statistics.endPut(start);
}
}
assertEquals(MAX_PUTS, statistics.getPuts());
// wait for 2 samples to ensure all stats have been archived
StatisticsType statSamplerType = getSystem().findType("StatSampler");
Statistics[] statsArray = getSystem().findStatisticsByType(statSamplerType);
assertEquals(1, statsArray.length);
Statistics statSamplerStats = statsArray[0];
int initialSampleCount = statSamplerStats.getInt(StatSamplerStats.SAMPLE_COUNT);
await("awaiting sampleCount >= 2").atMost(30, SECONDS).until(() -> statSamplerStats.getInt(StatSamplerStats.SAMPLE_COUNT) >= initialSampleCount + 2);
});
}
for (int pubThread = 0; pubThread < publishers.length; pubThread++) {
publishers[pubThread].join();
if (publishers[pubThread].exceptionOccurred()) {
fail("Test failed", publishers[pubThread].getException());
}
}
}
sub.invoke("sub-wait-for-samples", () -> {
// wait for 2 samples to ensure all stats have been archived
StatisticsType statSamplerType = getSystem().findType("StatSampler");
Statistics[] statsArray = getSystem().findStatisticsByType(statSamplerType);
assertEquals(1, statsArray.length);
Statistics statSamplerStats = statsArray[0];
int initialSampleCount = statSamplerStats.getInt(StatSamplerStats.SAMPLE_COUNT);
await("awaiting sampleCount >= 2").atMost(30, SECONDS).until(() -> statSamplerStats.getInt(StatSamplerStats.SAMPLE_COUNT) >= initialSampleCount + 2);
// now post total updateEvents to static
PubSubStats statistics = subStatsRef.get();
assertNotNull(statistics);
updateEvents.set(statistics.getUpdateEvents());
});
// validate pub values against sub values
int totalUpdateEvents = sub.invoke(() -> getUpdateEvents());
// validate pub values against pub statistics against pub archive
for (int i = 0; i < NUM_PUBS; i++) {
final int pubIdx = i;
pubs[pubIdx].invoke("pub-validation", () -> {
// add up all the puts
assertEquals(NUM_PUB_THREADS, pubStatsRef.length());
int totalPuts = 0;
for (int pubThreadIdx = 0; pubThreadIdx < NUM_PUB_THREADS; pubThreadIdx++) {
PubSubStats statistics = pubStatsRef.get(pubThreadIdx);
assertNotNull(statistics);
totalPuts += statistics.getPuts();
}
// assert that total puts adds up to max puts times num threads
assertEquals(MAX_PUTS * NUM_PUB_THREADS, totalPuts);
// assert that archive file contains same values as statistics
File archive = new File(pubArchives[pubIdx]);
assertTrue(archive.exists());
StatArchiveReader reader = new StatArchiveReader(new File[] { archive }, null, false);
double combinedPuts = 0;
List resources = reader.getResourceInstList();
assertNotNull(resources);
assertFalse(resources.isEmpty());
for (Iterator<ResourceInst> iter = resources.iterator(); iter.hasNext(); ) {
ResourceInst ri = iter.next();
if (!ri.getType().getName().equals(PubSubStats.TYPE_NAME)) {
continue;
}
StatValue[] statValues = ri.getStatValues();
for (int idx = 0; idx < statValues.length; idx++) {
String statName = ri.getType().getStats()[idx].getName();
assertNotNull(statName);
if (statName.equals(PubSubStats.PUTS)) {
StatValue sv = statValues[idx];
sv.setFilter(StatValue.FILTER_NONE);
double mostRecent = sv.getSnapshotsMostRecent();
double min = sv.getSnapshotsMinimum();
double max = sv.getSnapshotsMaximum();
double maxMinusMin = sv.getSnapshotsMaximum() - sv.getSnapshotsMinimum();
double mean = sv.getSnapshotsAverage();
double stdDev = sv.getSnapshotsStandardDeviation();
assertEquals(mostRecent, max, 0f);
double summation = 0;
double[] rawSnapshots = sv.getRawSnapshots();
for (int j = 0; j < rawSnapshots.length; j++) {
summation += rawSnapshots[j];
}
assertEquals(mean, summation / sv.getSnapshotsSize(), 0);
combinedPuts += mostRecent;
}
}
}
// assert that sum of mostRecent values for all puts equals totalPuts
assertEquals((double) totalPuts, combinedPuts, 0);
puts.getAndAdd(totalPuts);
});
}
// validate pub values against sub values
int totalCombinedPuts = 0;
for (int i = 0; i < NUM_PUBS; i++) {
int pubIdx = i;
int totalPuts = pubs[pubIdx].invoke(() -> getPuts());
assertEquals(MAX_PUTS * NUM_PUB_THREADS, totalPuts);
totalCombinedPuts += totalPuts;
}
assertEquals(totalCombinedPuts, totalUpdateEvents);
assertEquals(MAX_PUTS * NUM_PUB_THREADS * NUM_PUBS, totalCombinedPuts);
// validate sub values against sub statistics against sub archive
final int totalPuts = totalCombinedPuts;
sub.invoke("sub-validation", () -> {
PubSubStats statistics = subStatsRef.get();
assertNotNull(statistics);
int updateEvents = statistics.getUpdateEvents();
assertEquals(totalPuts, updateEvents);
assertEquals(totalUpdateEvents, updateEvents);
assertEquals(MAX_PUTS * NUM_PUB_THREADS * NUM_PUBS, updateEvents);
// assert that archive file contains same values as statistics
File archive = new File(subArchive);
assertTrue(archive.exists());
StatArchiveReader reader = new StatArchiveReader(new File[] { archive }, null, false);
double combinedUpdateEvents = 0;
List resources = reader.getResourceInstList();
for (Iterator<ResourceInst> iter = resources.iterator(); iter.hasNext(); ) {
ResourceInst ri = iter.next();
if (!ri.getType().getName().equals(PubSubStats.TYPE_NAME)) {
continue;
}
StatValue[] statValues = ri.getStatValues();
for (int i = 0; i < statValues.length; i++) {
String statName = ri.getType().getStats()[i].getName();
assertNotNull(statName);
if (statName.equals(PubSubStats.UPDATE_EVENTS)) {
StatValue sv = statValues[i];
sv.setFilter(StatValue.FILTER_NONE);
double mostRecent = sv.getSnapshotsMostRecent();
double min = sv.getSnapshotsMinimum();
double max = sv.getSnapshotsMaximum();
double maxMinusMin = sv.getSnapshotsMaximum() - sv.getSnapshotsMinimum();
double mean = sv.getSnapshotsAverage();
double stdDev = sv.getSnapshotsStandardDeviation();
assertEquals(mostRecent, max, 0);
double summation = 0;
double[] rawSnapshots = sv.getRawSnapshots();
for (int j = 0; j < rawSnapshots.length; j++) {
summation += rawSnapshots[j];
}
assertEquals(mean, summation / sv.getSnapshotsSize(), 0);
combinedUpdateEvents += mostRecent;
}
}
}
assertEquals((double) totalUpdateEvents, combinedUpdateEvents, 0);
});
int updateEvents = sub.invoke(() -> readIntStat(new File(subArchive), "PubSubStats", "updateEvents"));
assertTrue(updateEvents > 0);
assertEquals(MAX_PUTS * NUM_PUB_THREADS * NUM_PUBS, updateEvents);
int puts = 0;
for (int pubVM = 0; pubVM < NUM_PUBS; pubVM++) {
int currentPubVM = pubVM;
int vmPuts = pubs[pubVM].invoke(() -> readIntStat(new File(pubArchives[currentPubVM]), "PubSubStats", "puts"));
assertTrue(vmPuts > 0);
assertEquals(MAX_PUTS * NUM_PUB_THREADS, vmPuts);
puts += vmPuts;
}
assertTrue(puts > 0);
assertEquals(MAX_PUTS * NUM_PUB_THREADS * NUM_PUBS, puts);
// use regex "testPubAndSubCustomStats"
MultipleArchiveReader reader = new MultipleArchiveReader(this.directory, ".*" + getTestMethodName() + ".*\\.gfs");
int combinedUpdateEvents = reader.readIntStat(PubSubStats.TYPE_NAME, PubSubStats.UPDATE_EVENTS);
assertTrue("Failed to read updateEvents stat values", combinedUpdateEvents > 0);
int combinedPuts = reader.readIntStat(PubSubStats.TYPE_NAME, PubSubStats.PUTS);
assertTrue("Failed to read puts stat values", combinedPuts > 0);
assertTrue("updateEvents is " + combinedUpdateEvents + " but puts is " + combinedPuts, combinedUpdateEvents == combinedPuts);
}
Aggregations