use of com.codahale.metrics.MetricRegistry in project ambry by linkedin.
the class PublicAccessLogHandlerTest method createChannel.
// helpers
// general
/**
* Creates an {@link EmbeddedChannel} that incorporates an instance of {@link PublicAccessLogHandler}
* and {@link EchoMethodHandler}.
* @param useSSL {@code true} to add an {@link SslHandler} to the pipeline.
* @return an {@link EmbeddedChannel} that incorporates an instance of {@link PublicAccessLogHandler}
* and {@link EchoMethodHandler}, and an {@link SslHandler} if needed.
*/
private EmbeddedChannel createChannel(boolean useSSL) {
EmbeddedChannel channel = new EmbeddedChannel();
if (useSSL) {
SSLEngine sslEngine = SSL_CONTEXT.newEngine(channel.alloc());
// HttpRequests pass through the SslHandler without a handshake (it only operates on ByteBuffers) so we have
// to mock certain methods of SSLEngine and SSLSession to ensure that we can test certificate logging.
SSLEngine mockSSLEngine = new MockSSLEngine(sslEngine, new MockSSLSession(sslEngine.getSession(), new Certificate[] { PEER_CERT }));
channel.pipeline().addLast(new SslHandler(mockSSLEngine));
}
channel.pipeline().addLast(new PublicAccessLogHandler(publicAccessLogger, new NettyMetrics(new MetricRegistry()))).addLast(new EchoMethodHandler());
return channel;
}
use of com.codahale.metrics.MetricRegistry in project ambry by linkedin.
the class AmbryUrlSigningServiceTest method factoryTest.
/**
* Tests for {@link AmbryUrlSigningServiceFactory}.
*/
@Test
public void factoryTest() {
Properties properties = new Properties();
properties.setProperty("frontend.url.signer.upload.endpoint", UPLOAD_ENDPOINT);
properties.setProperty("frontend.url.signer.download.endpoint", DOWNLOAD_ENDPOINT);
properties.setProperty("frontend.url.signer.default.url.ttl.secs", Long.toString(DEFAULT_URL_TTL_SECS));
properties.setProperty("frontend.url.signer.default.max.upload.size.bytes", Long.toString(DEFAULT_MAX_UPLOAD_SIZE));
properties.setProperty("frontend.url.signer.max.url.ttl.secs", Long.toString(MAX_URL_TTL_SECS));
UrlSigningService signer = new AmbryUrlSigningServiceFactory(new VerifiableProperties(properties), new MetricRegistry()).getUrlSigningService();
assertNotNull("UrlSigningService is null", signer);
assertTrue("UrlSigningService is AmbryUrlSigningService", signer instanceof AmbryUrlSigningService);
}
use of com.codahale.metrics.MetricRegistry in project ambry by linkedin.
the class TestMetrics method withDefaultsTest.
// commonCaseTest() helpers
/**
* Tests recording of metrics without setting a custom {@link RestRequestMetrics}.
* @param induceFailure if {@code true}, the request is marked as failed.
*/
private void withDefaultsTest(boolean induceFailure) throws InterruptedException {
MetricRegistry metricRegistry = new MetricRegistry();
RestRequestMetricsTracker.setDefaults(metricRegistry);
RestRequestMetricsTracker requestMetrics = new RestRequestMetricsTracker();
TestMetrics testMetrics = new TestMetrics(requestMetrics, induceFailure);
requestMetrics.recordMetrics();
String metricPrefix = RestRequestMetricsTracker.class.getCanonicalName() + "." + RestRequestMetricsTracker.DEFAULT_REQUEST_TYPE;
testMetrics.compareMetrics(metricPrefix, metricRegistry);
}
use of com.codahale.metrics.MetricRegistry in project ambry by linkedin.
the class HelixClusterManagerTest method badInstantiationTest.
/**
* Test bad instantiation.
* @throws Exception
*/
@Test
public void badInstantiationTest() throws Exception {
// Good test happened in the constructor
assertEquals(0L, metricRegistry.getGauges().get(HelixClusterManager.class.getName() + ".instantiationFailed").getValue());
// Bad test
Set<com.github.ambry.utils.TestUtils.ZkInfo> zkInfos = new HashSet<>(dcsToZkInfo.values());
zkInfos.iterator().next().setPort(0);
JSONObject invalidZkJson = constructZkLayoutJSON(zkInfos);
Properties props = new Properties();
props.setProperty("clustermap.host.name", hostname);
props.setProperty("clustermap.cluster.name", clusterNamePrefixInHelix + clusterNameStatic);
props.setProperty("clustermap.datacenter.name", dcs[0]);
props.setProperty("clustermap.dcs.zk.connect.strings", invalidZkJson.toString(2));
ClusterMapConfig invalidClusterMapConfig = new ClusterMapConfig(new VerifiableProperties(props));
metricRegistry = new MetricRegistry();
try {
new HelixClusterManager(invalidClusterMapConfig, hostname, new MockHelixManagerFactory(helixCluster, null), metricRegistry);
fail("Instantiation should have failed with invalid zk addresses");
} catch (IOException e) {
assertEquals(1L, metricRegistry.getGauges().get(HelixClusterManager.class.getName() + ".instantiationFailed").getValue());
}
metricRegistry = new MetricRegistry();
try {
new HelixClusterManager(clusterMapConfig, hostname, new MockHelixManagerFactory(helixCluster, new Exception("beBad")), metricRegistry);
fail("Instantiation should fail with a HelixManager factory that throws exception on listener registrations");
} catch (Exception e) {
assertEquals(1L, metricRegistry.getGauges().get(HelixClusterManager.class.getName() + ".instantiationFailed").getValue());
assertEquals("beBad", e.getCause().getMessage());
}
}
use of com.codahale.metrics.MetricRegistry in project ambry by linkedin.
the class MessageFormatSendTest method sendWriteTestWithBadId.
@Test
public void sendWriteTestWithBadId() throws IOException, MessageFormatException {
// add header,system metadata, user metadata and data to the buffers
ByteBuffer buf1 = ByteBuffer.allocate(1010);
// fill header
// version
buf1.putShort((short) 1);
// total size
buf1.putLong(950);
// put relative offsets
// blob property relative offset
buf1.putInt(60);
// delete relative offset
buf1.putInt(-1);
// user metadata relative offset
buf1.putInt(81);
// data relative offset
buf1.putInt(191);
Crc32 crc = new Crc32();
crc.update(buf1.array(), 0, buf1.position());
// crc
buf1.putLong(crc.getValue());
// blob id
String id = new String("012345678910123456789012");
buf1.putShort((short) id.length());
buf1.put(id.getBytes());
// blob property version
buf1.putShort((short) 1);
String attribute1 = "ttl";
String attribute2 = "del";
// ttl name
buf1.put(attribute1.getBytes());
// ttl value
buf1.putLong(12345);
// delete name
buf1.put(attribute2.getBytes());
byte b = 1;
// delete flag
buf1.put(b);
// crc
buf1.putInt(456);
// user metadata version
buf1.putShort((short) 1);
buf1.putInt(100);
byte[] usermetadata = new byte[100];
new Random().nextBytes(usermetadata);
buf1.put(usermetadata);
buf1.putInt(123);
// blob version
buf1.putShort((short) 0);
// blob size
buf1.putLong(805);
// blob
byte[] data = new byte[805];
new Random().nextBytes(data);
buf1.put(data);
// blob crc
buf1.putInt(123);
buf1.flip();
ArrayList<ByteBuffer> listbuf = new ArrayList<ByteBuffer>();
listbuf.add(buf1);
ArrayList<StoreKey> storeKeys = new ArrayList<StoreKey>();
storeKeys.add(new MockId("012345678910123223233456789012"));
MessageReadSet readSet = new MockMessageReadSet(listbuf, storeKeys);
MetricRegistry registry = new MetricRegistry();
MessageFormatMetrics metrics = new MessageFormatMetrics(registry);
// get all
MessageFormatSend send = new MessageFormatSend(readSet, MessageFormatFlags.All, metrics, new MockIdFactory());
Assert.assertEquals(send.sizeInBytes(), 1010);
ByteBuffer bufresult = ByteBuffer.allocate(1010);
WritableByteChannel channel1 = Channels.newChannel(new ByteBufferOutputStream(bufresult));
while (!send.isSendComplete()) {
send.writeTo(channel1);
}
Assert.assertArrayEquals(buf1.array(), bufresult.array());
try {
// get blob
MessageFormatSend send1 = new MessageFormatSend(readSet, MessageFormatFlags.Blob, metrics, new MockIdFactory());
Assert.assertTrue(false);
} catch (MessageFormatException e) {
Assert.assertTrue(e.getErrorCode() == MessageFormatErrorCodes.Store_Key_Id_MisMatch);
}
}
Aggregations