use of org.apache.hadoop.metrics2.annotation.Metrics in project hadoop by apache.
the class TestStatsDMetrics method testPutMetrics2.
@Test(timeout = 3000)
public void testPutMetrics2() throws IOException {
StatsDSink sink = new StatsDSink();
List<MetricsTag> tags = new ArrayList<MetricsTag>();
tags.add(new MetricsTag(MsInfo.Hostname, null));
tags.add(new MetricsTag(MsInfo.Context, "jvm"));
tags.add(new MetricsTag(MsInfo.ProcessName, "process"));
Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
metrics.add(makeMetric("foo1", 1, MetricType.COUNTER));
metrics.add(makeMetric("foo2", 2, MetricType.GAUGE));
MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 10000, tags, metrics);
try (DatagramSocket sock = new DatagramSocket()) {
sock.setReceiveBufferSize(8192);
final StatsDSink.StatsD mockStatsD = new StatsD(sock.getLocalAddress().getHostName(), sock.getLocalPort());
Whitebox.setInternalState(sink, "statsd", mockStatsD);
final DatagramPacket p = new DatagramPacket(new byte[8192], 8192);
sink.putMetrics(record);
sock.receive(p);
String result = new String(p.getData(), 0, p.getLength(), Charset.forName("UTF-8"));
assertTrue("Received data did not match data sent", result.equals("process.jvm.Context.foo1:1|c") || result.equals("process.jvm.Context.foo2:2|g"));
} finally {
sink.close();
}
}
use of org.apache.hadoop.metrics2.annotation.Metrics in project hadoop by apache.
the class TestRollingFileSystemSinkWithLocal method testFailedWrite.
/**
* Test that writing fails when the directory isn't writable.
*/
@Test
public void testFailedWrite() {
String path = methodDir.getAbsolutePath();
MetricsSystem ms = initMetricsSystem(path, false, false);
new MyMetrics1().registerWith(ms);
methodDir.setWritable(false);
MockSink.errored = false;
try {
// publish the metrics
ms.publishMetricsNow();
assertTrue("No exception was generated while writing metrics " + "even though the target directory was not writable", MockSink.errored);
ms.stop();
ms.shutdown();
} finally {
// Make sure the dir is writable again so we can delete it at the end
methodDir.setWritable(true);
}
}
use of org.apache.hadoop.metrics2.annotation.Metrics in project hadoop by apache.
the class TestGraphiteMetrics method testPutMetrics.
@Test
public void testPutMetrics() {
GraphiteSink sink = new GraphiteSink();
List<MetricsTag> tags = new ArrayList<MetricsTag>();
tags.add(new MetricsTag(MsInfo.Context, "all"));
tags.add(new MetricsTag(MsInfo.Hostname, "host"));
Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
metrics.add(makeMetric("foo1", 1.25));
metrics.add(makeMetric("foo2", 2.25));
MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 10000, tags, metrics);
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
final GraphiteSink.Graphite mockGraphite = makeGraphite();
Whitebox.setInternalState(sink, "graphite", mockGraphite);
sink.putMetrics(record);
try {
verify(mockGraphite).write(argument.capture());
} catch (IOException e) {
e.printStackTrace();
}
String result = argument.getValue();
assertEquals(true, result.equals("null.all.Context.Context=all.Hostname=host.foo1 1.25 10\n" + "null.all.Context.Context=all.Hostname=host.foo2 2.25 10\n") || result.equals("null.all.Context.Context=all.Hostname=host.foo2 2.25 10\n" + "null.all.Context.Context=all.Hostname=host.foo1 1.25 10\n"));
}
use of org.apache.hadoop.metrics2.annotation.Metrics in project hadoop by apache.
the class TestGraphiteMetrics method testFailureAndPutMetrics.
@Test
public void testFailureAndPutMetrics() throws IOException {
GraphiteSink sink = new GraphiteSink();
List<MetricsTag> tags = new ArrayList<MetricsTag>();
tags.add(new MetricsTag(MsInfo.Context, "all"));
tags.add(new MetricsTag(MsInfo.Hostname, "host"));
Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
metrics.add(makeMetric("foo1", 1.25));
metrics.add(makeMetric("foo2", 2.25));
MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 10000, tags, metrics);
final GraphiteSink.Graphite mockGraphite = makeGraphite();
Whitebox.setInternalState(sink, "graphite", mockGraphite);
// throw exception when first try
doThrow(new IOException("IO exception")).when(mockGraphite).write(anyString());
sink.putMetrics(record);
verify(mockGraphite).write(anyString());
verify(mockGraphite).close();
// reset mock and try again
reset(mockGraphite);
when(mockGraphite.isConnected()).thenReturn(false);
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
sink.putMetrics(record);
verify(mockGraphite).write(argument.capture());
String result = argument.getValue();
assertEquals(true, result.equals("null.all.Context.Context=all.Hostname=host.foo1 1.25 10\n" + "null.all.Context.Context=all.Hostname=host.foo2 2.25 10\n") || result.equals("null.all.Context.Context=all.Hostname=host.foo2 2.25 10\n" + "null.all.Context.Context=all.Hostname=host.foo1 1.25 10\n"));
}
use of org.apache.hadoop.metrics2.annotation.Metrics in project hadoop by apache.
the class TestMetricsAnnotations method testHybrid.
@Test
public void testHybrid() {
HybridMetrics metrics = new HybridMetrics();
MetricsSource source = MetricsAnnotations.makeSource(metrics);
assertSame(metrics, source);
metrics.C0.incr();
MetricsRecordBuilder rb = getMetrics(source);
MetricsCollector collector = rb.parent();
verify(collector).addRecord("foo");
verify(collector).addRecord("bar");
verify(collector).addRecord(info("HybridMetrics", "HybridMetrics"));
verify(rb).setContext("foocontext");
verify(rb).addCounter(info("C1", "C1 desc"), 1);
verify(rb).setContext("barcontext");
verify(rb).addGauge(info("G1", "G1 desc"), 1);
verify(rb).add(tag(MsInfo.Context, "hybrid"));
verify(rb).addCounter(info("C0", "C0 desc"), 1);
verify(rb).addGauge(info("G0", "G0"), 0);
}
Aggregations