use of org.apache.hadoop.metrics2.util.MetricsCache.Record in project hadoop by apache.
the class TestKafkaMetrics method recordToJson.
StringBuilder recordToJson(MetricsRecord record) {
// Create a json object from a metrics record.
StringBuilder jsonLines = new StringBuilder();
Long timestamp = record.timestamp();
Date currDate = new Date(timestamp);
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
String date = dateFormat.format(currDate);
SimpleDateFormat timeFormat = new SimpleDateFormat("hh:mm:ss");
String time = timeFormat.format(currDate);
String hostname = new String("null");
try {
hostname = InetAddress.getLocalHost().getHostName();
} catch (Exception e) {
LOG.warn("Error getting Hostname, going to continue");
}
jsonLines.append("{\"hostname\": \"" + hostname);
jsonLines.append("\", \"timestamp\": " + timestamp);
jsonLines.append(", \"date\": \"" + date);
jsonLines.append("\",\"time\": \"" + time);
jsonLines.append("\",\"name\": \"" + record.name() + "\" ");
for (MetricsTag tag : record.tags()) {
jsonLines.append(", \"" + tag.name().toString().replaceAll("[\\p{Cc}]", "") + "\": ");
jsonLines.append(" \"" + tag.value().toString() + "\"");
}
for (AbstractMetric m : record.metrics()) {
jsonLines.append(", \"" + m.name().toString().replaceAll("[\\p{Cc}]", "") + "\": ");
jsonLines.append(" \"" + m.value().toString() + "\"");
}
jsonLines.append("}");
return jsonLines;
}
use of org.apache.hadoop.metrics2.util.MetricsCache.Record in project hadoop by apache.
the class TestKafkaMetrics method testPutMetrics.
@Test
@SuppressWarnings({ "unchecked", "rawtypes" })
public void testPutMetrics() throws Exception {
// Create a record by mocking MetricsRecord class.
MetricsRecord record = mock(MetricsRecord.class);
when(record.tags()).thenReturn(Lists.newArrayList(new MetricsTag(KafkaMetricsInfo.KafkaTag, "test_tag")));
when(record.timestamp()).thenReturn(System.currentTimeMillis());
// Create a metric using AbstractMetric class.
AbstractMetric metric = new AbstractMetric(KafkaMetricsInfo.KafkaCounter) {
@Override
public Number value() {
return new Integer(123);
}
@Override
public MetricType type() {
return null;
}
@Override
public void visit(MetricsVisitor visitor) {
}
};
// Create a list of metrics.
Iterable<AbstractMetric> metrics = Lists.newArrayList(metric);
when(record.name()).thenReturn("Kafka record name");
when(record.metrics()).thenReturn(metrics);
SubsetConfiguration conf = mock(SubsetConfiguration.class);
when(conf.getString(KafkaSink.BROKER_LIST)).thenReturn("localhost:9092");
String topic = "myTestKafkaTopic";
when(conf.getString(KafkaSink.TOPIC)).thenReturn(topic);
// Create the KafkaSink object and initialize it.
kafkaSink = new KafkaSink();
kafkaSink.init(conf);
// Create a mock KafkaProducer as a producer for KafkaSink.
Producer<Integer, byte[]> mockProducer = mock(KafkaProducer.class);
kafkaSink.setProducer(mockProducer);
// Create the json object from the record.
StringBuilder jsonLines = recordToJson(record);
if (LOG.isDebugEnabled()) {
LOG.debug("kafka message: " + jsonLines.toString());
}
// Send the record and store the result in a mock Future.
Future<RecordMetadata> f = mock(Future.class);
when(mockProducer.send((ProducerRecord) anyObject())).thenReturn(f);
kafkaSink.putMetrics(record);
// Get the argument and verity it.
ArgumentCaptor<ProducerRecord> argument = ArgumentCaptor.forClass(ProducerRecord.class);
verify(mockProducer).send(argument.capture());
// Compare the received data with the original one.
ProducerRecord<Integer, byte[]> data = (argument.getValue());
String jsonResult = new String(data.value());
if (LOG.isDebugEnabled()) {
LOG.debug("kafka result: " + jsonResult);
}
assertEquals(jsonLines.toString(), jsonResult);
}
use of org.apache.hadoop.metrics2.util.MetricsCache.Record in project hadoop by apache.
the class MetricsCache method update.
/**
* Update the cache and return the current cached record
* @param mr the update record
* @param includingTags cache tag values (for later lookup by name) if true
* @return the updated cache record
*/
public Record update(MetricsRecord mr, boolean includingTags) {
String name = mr.name();
RecordCache recordCache = map.get(name);
if (recordCache == null) {
recordCache = new RecordCache();
map.put(name, recordCache);
}
Collection<MetricsTag> tags = mr.tags();
Record record = recordCache.get(tags);
if (record == null) {
record = new Record();
recordCache.put(tags, record);
}
for (AbstractMetric m : mr.metrics()) {
record.metrics.put(m.name(), m);
}
if (includingTags) {
// mostly for some sinks that include tags as part of a dense schema
for (MetricsTag t : mr.tags()) {
record.tags.put(t.name(), t.value());
}
}
return record;
}
use of org.apache.hadoop.metrics2.util.MetricsCache.Record in project hadoop by apache.
the class MetricsSourceAdapter method updateAttrCache.
private int updateAttrCache(Iterable<MetricsRecordImpl> lastRecs) {
Preconditions.checkNotNull(lastRecs, "LastRecs should not be null");
LOG.debug("Updating attr cache...");
int recNo = 0;
int numMetrics = 0;
for (MetricsRecordImpl record : lastRecs) {
for (MetricsTag t : record.tags()) {
setAttrCacheTag(t, recNo);
++numMetrics;
}
for (AbstractMetric m : record.metrics()) {
setAttrCacheMetric(m, recNo);
++numMetrics;
}
++recNo;
}
LOG.debug("Done. # tags & metrics=" + numMetrics);
return numMetrics;
}
use of org.apache.hadoop.metrics2.util.MetricsCache.Record in project hadoop by apache.
the class RollingFileSystemSink method putMetrics.
@Override
public void putMetrics(MetricsRecord record) {
synchronized (lock) {
rollLogDirIfNeeded();
if (currentOutStream != null) {
currentOutStream.printf("%d %s.%s", record.timestamp(), record.context(), record.name());
String separator = ": ";
for (MetricsTag tag : record.tags()) {
currentOutStream.printf("%s%s=%s", separator, tag.name(), tag.value());
separator = ", ";
}
for (AbstractMetric metric : record.metrics()) {
currentOutStream.printf("%s%s=%s", separator, metric.name(), metric.value());
}
currentOutStream.println();
// is complete at the end of the interval.
try {
currentFSOutStream.hflush();
} catch (IOException ex) {
throwMetricsException("Failed flushing the stream", ex);
}
checkForErrors("Unable to write to log file");
} else if (!ignoreError) {
throwMetricsException("Unable to write to log file");
}
}
}
Aggregations