Search in sources :

Example 6 with MetricsRecordBuilder

use of org.apache.hadoop.metrics2.MetricsRecordBuilder in project hadoop by apache.

the class TestBatchIbr method logIbrCounts.

static void logIbrCounts(List<DataNode> datanodes) {
    final String name = "IncrementalBlockReportsNumOps";
    for (DataNode dn : datanodes) {
        final MetricsRecordBuilder m = MetricsAsserts.getMetrics(dn.getMetrics().name());
        final long ibr = MetricsAsserts.getLongCounter(name, m);
        LOG.info(dn.getDisplayName() + ": " + name + "=" + ibr);
    }
}
Also used : MetricsRecordBuilder(org.apache.hadoop.metrics2.MetricsRecordBuilder)

Example 7 with MetricsRecordBuilder

use of org.apache.hadoop.metrics2.MetricsRecordBuilder in project hadoop by apache.

the class TestFSNamesystemLock method testDetailedHoldMetrics.

@Test
public void testDetailedHoldMetrics() throws Exception {
    Configuration conf = new Configuration();
    conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_LOCK_DETAILED_METRICS_KEY, true);
    FakeTimer timer = new FakeTimer();
    MetricsRegistry registry = new MetricsRegistry("Test");
    MutableRatesWithAggregation rates = registry.newRatesWithAggregation("Test");
    FSNamesystemLock fsLock = new FSNamesystemLock(conf, rates, timer);
    fsLock.readLock();
    timer.advance(1);
    fsLock.readUnlock("foo");
    fsLock.readLock();
    timer.advance(2);
    fsLock.readUnlock("foo");
    fsLock.readLock();
    timer.advance(1);
    fsLock.readLock();
    timer.advance(1);
    fsLock.readUnlock("bar");
    fsLock.readUnlock("bar");
    fsLock.writeLock();
    timer.advance(1);
    fsLock.writeUnlock("baz");
    MetricsRecordBuilder rb = MetricsAsserts.mockMetricsRecordBuilder();
    rates.snapshot(rb, true);
    assertGauge("FSNReadLockFooAvgTime", 1.5, rb);
    assertCounter("FSNReadLockFooNumOps", 2L, rb);
    assertGauge("FSNReadLockBarAvgTime", 2.0, rb);
    assertCounter("FSNReadLockBarNumOps", 1L, rb);
    assertGauge("FSNWriteLockBazAvgTime", 1.0, rb);
    assertCounter("FSNWriteLockBazNumOps", 1L, rb);
}
Also used : MutableRatesWithAggregation(org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation) MetricsRegistry(org.apache.hadoop.metrics2.lib.MetricsRegistry) Configuration(org.apache.hadoop.conf.Configuration) FakeTimer(org.apache.hadoop.util.FakeTimer) MetricsRecordBuilder(org.apache.hadoop.metrics2.MetricsRecordBuilder) Test(org.junit.Test)

Example 8 with MetricsRecordBuilder

use of org.apache.hadoop.metrics2.MetricsRecordBuilder in project hadoop by apache.

the class TestNameNodeMetrics method testCapacityMetrics.

/**
   * Test that capacity metrics are exported and pass
   * basic sanity tests.
   */
@Test(timeout = 1800)
public void testCapacityMetrics() throws Exception {
    MetricsRecordBuilder rb = getMetrics(NS_METRICS);
    long capacityTotal = MetricsAsserts.getLongGauge("CapacityTotal", rb);
    assert (capacityTotal != 0);
    long capacityUsed = MetricsAsserts.getLongGauge("CapacityUsed", rb);
    long capacityRemaining = MetricsAsserts.getLongGauge("CapacityRemaining", rb);
    long capacityUsedNonDFS = MetricsAsserts.getLongGauge("CapacityUsedNonDFS", rb);
    // considered.
    assert (capacityUsed + capacityRemaining + capacityUsedNonDFS <= capacityTotal);
}
Also used : MetricsRecordBuilder(org.apache.hadoop.metrics2.MetricsRecordBuilder) Test(org.junit.Test)

Example 9 with MetricsRecordBuilder

use of org.apache.hadoop.metrics2.MetricsRecordBuilder in project hadoop by apache.

the class TestNameNodeMetrics method testFileAdd.

/** Test metrics associated with addition of a file */
@Test
public void testFileAdd() throws Exception {
    // Add files with 100 blocks
    final Path file = getTestPath("testFileAdd");
    createFile(file, 3200, (short) 3);
    final long blockCount = 32;
    int blockCapacity = namesystem.getBlockCapacity();
    assertGauge("BlockCapacity", blockCapacity, getMetrics(NS_METRICS));
    MetricsRecordBuilder rb = getMetrics(NN_METRICS);
    // File create operations is 1
    // Number of files created is depth of <code>file</code> path
    assertCounter("CreateFileOps", 1L, rb);
    assertCounter("FilesCreated", (long) file.depth(), rb);
    // Add 1 for root
    long filesTotal = file.depth() + 1;
    rb = getMetrics(NS_METRICS);
    assertGauge("FilesTotal", filesTotal, rb);
    assertGauge("BlocksTotal", blockCount, rb);
    fs.delete(file, true);
    // reduce the filecount for deleted file
    filesTotal--;
    rb = waitForDnMetricValue(NS_METRICS, "FilesTotal", filesTotal);
    assertGauge("BlocksTotal", 0L, rb);
    assertGauge("PendingDeletionBlocks", 0L, rb);
    rb = getMetrics(NN_METRICS);
    // Delete file operations and number of files deleted must be 1
    assertCounter("DeleteFileOps", 1L, rb);
    assertCounter("FilesDeleted", 1L, rb);
}
Also used : Path(org.apache.hadoop.fs.Path) MetricsRecordBuilder(org.apache.hadoop.metrics2.MetricsRecordBuilder) Test(org.junit.Test)

Example 10 with MetricsRecordBuilder

use of org.apache.hadoop.metrics2.MetricsRecordBuilder in project hadoop by apache.

the class TestNameNodeMetrics method testRenameMetrics.

@Test
public void testRenameMetrics() throws Exception {
    Path src = getTestPath("src");
    createFile(src, 100, (short) 1);
    Path target = getTestPath("target");
    createFile(target, 100, (short) 1);
    fs.rename(src, target, Rename.OVERWRITE);
    MetricsRecordBuilder rb = getMetrics(NN_METRICS);
    assertCounter("FilesRenamed", 1L, rb);
    assertCounter("FilesDeleted", 1L, rb);
}
Also used : Path(org.apache.hadoop.fs.Path) MetricsRecordBuilder(org.apache.hadoop.metrics2.MetricsRecordBuilder) Test(org.junit.Test)

Aggregations

MetricsRecordBuilder (org.apache.hadoop.metrics2.MetricsRecordBuilder)99 Test (org.junit.Test)47 Path (org.apache.hadoop.fs.Path)20 Configuration (org.apache.hadoop.conf.Configuration)14 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)12 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)11 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)11 FileSystem (org.apache.hadoop.fs.FileSystem)8 MetricsInfo (org.apache.hadoop.metrics2.MetricsInfo)7 IOException (java.io.IOException)6 MetricsCollector (org.apache.hadoop.metrics2.MetricsCollector)6 MetricsSource (org.apache.hadoop.metrics2.MetricsSource)5 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)4 Quantile (org.apache.hadoop.metrics2.util.Quantile)4 ServiceException (com.google.protobuf.ServiceException)3 InterruptedIOException (java.io.InterruptedIOException)2 GarbageCollectorMXBean (java.lang.management.GarbageCollectorMXBean)2 Map (java.util.Map)2 CacheDirectiveInfo (org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo)2 CachePoolInfo (org.apache.hadoop.hdfs.protocol.CachePoolInfo)2