use of org.apache.hadoop.metrics2.impl.ConfigBuilder in project hadoop by apache.
the class TestPatternFilter method excludeOnlyShouldOnlyExcludeMatched.
/**
* Filters should handle black-listing correctly
*/
@Test
public void excludeOnlyShouldOnlyExcludeMatched() {
SubsetConfiguration bl = new ConfigBuilder().add("p.exclude", "foo").add("p.exclude.tags", "foo:f").subset("p");
shouldAccept(bl, "bar");
shouldAccept(bl, Arrays.asList(tag("bar", "", "")));
shouldAccept(bl, mockMetricsRecord("bar", Arrays.asList(tag("bar", "", ""))));
shouldReject(bl, "foo");
shouldReject(bl, Arrays.asList(tag("bar", "", ""), tag("foo", "", "f")), new boolean[] { true, false });
shouldReject(bl, mockMetricsRecord("foo", Arrays.asList(tag("bar", "", ""))));
shouldReject(bl, mockMetricsRecord("bar", Arrays.asList(tag("bar", "", ""), tag("foo", "", "f"))));
}
use of org.apache.hadoop.metrics2.impl.ConfigBuilder in project hadoop by apache.
the class TestPatternFilter method emptyConfigShouldAccept.
/**
* Filters should default to accept
*/
@Test
public void emptyConfigShouldAccept() {
SubsetConfiguration empty = new ConfigBuilder().subset("");
shouldAccept(empty, "anything");
shouldAccept(empty, Arrays.asList(tag("key", "desc", "value")));
shouldAccept(empty, mockMetricsRecord("anything", Arrays.asList(tag("key", "desc", "value"))));
}
use of org.apache.hadoop.metrics2.impl.ConfigBuilder in project hadoop by apache.
the class RollingFileSystemSinkTestBase method initMetricsSystem.
/**
* Set up the metrics system, start it, and return it.
* @param path the base path for the sink
* @param ignoreErrors whether the sink should ignore errors
* @param allowAppend whether the sink is allowed to append to existing files
* @param useSecureParams whether to set the principal and keytab properties
* @return the org.apache.hadoop.metrics2.MetricsSystem
*/
protected MetricsSystem initMetricsSystem(String path, boolean ignoreErrors, boolean allowAppend, boolean useSecureParams) {
// If the prefix is not lower case, the metrics system won't be able to
// read any of the properties.
String prefix = methodName.getMethodName().toLowerCase();
ConfigBuilder builder = new ConfigBuilder().add("*.period", 10000).add(prefix + ".sink.mysink0.class", MockSink.class.getName()).add(prefix + ".sink.mysink0.basepath", path).add(prefix + ".sink.mysink0.source", "testsrc").add(prefix + ".sink.mysink0.context", "test1").add(prefix + ".sink.mysink0.ignore-error", ignoreErrors).add(prefix + ".sink.mysink0.allow-append", allowAppend).add(prefix + ".sink.mysink0.roll-offset-interval-millis", 0).add(prefix + ".sink.mysink0.roll-interval", "1h");
if (useSecureParams) {
builder.add(prefix + ".sink.mysink0.keytab-key", SINK_KEYTAB_FILE_KEY).add(prefix + ".sink.mysink0.principal-key", SINK_PRINCIPAL_KEY);
}
builder.save(TestMetricsConfig.getTestFilename("hadoop-metrics2-" + prefix));
MetricsSystemImpl ms = new MetricsSystemImpl(prefix);
ms.start();
return ms;
}
use of org.apache.hadoop.metrics2.impl.ConfigBuilder in project hadoop by apache.
the class TestFileSink method testFileSink.
@Test(timeout = 6000)
public void testFileSink() throws IOException {
outFile = getTestTempFile("test-file-sink-", ".out");
final String outPath = outFile.getAbsolutePath();
// NB: specify large period to avoid multiple metrics snapshotting:
new ConfigBuilder().add("*.period", 10000).add("test.sink.mysink0.class", FileSink.class.getName()).add("test.sink.mysink0.filename", outPath).add("test.sink.mysink0.context", "test1").save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
MetricsSystemImpl ms = new MetricsSystemImpl("test");
ms.start();
final MyMetrics1 mm1 = new MyMetrics1().registerWith(ms);
new MyMetrics2().registerWith(ms);
mm1.testMetric1.incr();
mm1.testMetric2.incr(2);
// publish the metrics
ms.publishMetricsNow();
ms.stop();
ms.shutdown();
InputStream is = null;
ByteArrayOutputStream baos = null;
String outFileContent = null;
try {
is = new FileInputStream(outFile);
baos = new ByteArrayOutputStream((int) outFile.length());
IOUtils.copyBytes(is, baos, 1024, true);
outFileContent = new String(baos.toByteArray(), "UTF-8");
} finally {
IOUtils.cleanup(null, baos, is);
}
// Check the out file content. Should be something like the following:
//1360244820087 test1.testRecord1: Context=test1, testTag1=testTagValue1, testTag2=testTagValue2, Hostname=myhost, testMetric1=1, testMetric2=2
//1360244820089 test1.testRecord2: Context=test1, testTag22=testTagValue22, Hostname=myhost
// Note that in the below expression we allow tags and metrics to go in arbitrary order.
Pattern expectedContentPattern = Pattern.compile(// line #1:
"^\\d+\\s+test1.testRecord1:\\s+Context=test1,\\s+" + "(testTag1=testTagValue1,\\s+testTag2=testTagValue2|testTag2=testTagValue2,\\s+testTag1=testTagValue1)," + "\\s+Hostname=.*,\\s+(testMetric1=1,\\s+testMetric2=2|testMetric2=2,\\s+testMetric1=1)" + // line #2:
"$[\\n\\r]*^\\d+\\s+test1.testRecord2:\\s+Context=test1," + "\\s+testTag22=testTagValue22,\\s+Hostname=.*$[\\n\\r]*", Pattern.MULTILINE);
assertTrue(expectedContentPattern.matcher(outFileContent).matches());
}
use of org.apache.hadoop.metrics2.impl.ConfigBuilder in project hadoop by apache.
the class TestRollingFileSystemSink method doTestGetRollInterval.
/**
* Test the basic unit conversions with the given unit name modifier applied.
*
* @param mod a unit name modifier
*/
private void doTestGetRollInterval(int num, String[] units, long expected) {
RollingFileSystemSink sink = new RollingFileSystemSink();
ConfigBuilder builder = new ConfigBuilder();
for (String unit : units) {
sink.init(builder.add("sink.roll-interval", num + unit).subset("sink"));
assertEquals(expected, sink.getRollInterval());
sink.init(builder.add("sink.roll-interval", num + unit.toUpperCase()).subset("sink"));
assertEquals(expected, sink.getRollInterval());
sink.init(builder.add("sink.roll-interval", num + " " + unit).subset("sink"));
assertEquals(expected, sink.getRollInterval());
sink.init(builder.add("sink.roll-interval", num + " " + unit.toUpperCase()).subset("sink"));
assertEquals(expected, sink.getRollInterval());
}
}
Aggregations