use of org.apache.hadoop.metrics2.AbstractMetric in project phoenix by apache.
the class BaseTracingTestIT method createRecord.
public static MetricsRecord createRecord(long traceid, long parentid, long spanid, String desc, long startTime, long endTime, String hostname, String... tags) {
List<AbstractMetric> metrics = new ArrayList<AbstractMetric>();
AbstractMetric span = new ExposedMetricCounterLong(asInfo(MetricInfo.SPAN.traceName), spanid);
metrics.add(span);
AbstractMetric parent = new ExposedMetricCounterLong(asInfo(MetricInfo.PARENT.traceName), parentid);
metrics.add(parent);
AbstractMetric start = new ExposedMetricCounterLong(asInfo(MetricInfo.START.traceName), startTime);
metrics.add(start);
AbstractMetric end = new ExposedMetricCounterLong(asInfo(MetricInfo.END.traceName), endTime);
metrics.add(end);
List<MetricsTag> tagsList = new ArrayList<MetricsTag>();
int tagCount = 0;
for (String annotation : tags) {
MetricsTag tag = new PhoenixTagImpl(MetricInfo.ANNOTATION.traceName, Integer.toString(tagCount++), annotation);
tagsList.add(tag);
}
String hostnameValue = "host-name.value";
MetricsTag hostnameTag = new PhoenixTagImpl(MetricInfo.HOSTNAME.traceName, "", hostnameValue);
tagsList.add(hostnameTag);
MetricsRecord record = new ExposedMetricsRecordImpl(new ExposedMetricsInfoImpl(TracingUtils.getTraceMetricName(traceid), desc), System.currentTimeMillis(), tagsList, metrics);
return record;
}
use of org.apache.hadoop.metrics2.AbstractMetric in project phoenix by apache.
the class PhoenixMetricsSink method putMetrics.
/**
* Add a new metric record to be written.
*
* @param record
*/
@Override
public void putMetrics(MetricsRecord record) {
// to do it here, in case it gets misconfigured
if (!record.name().startsWith(TracingUtils.METRIC_SOURCE_KEY)) {
return;
}
// don't initialize until we actually have something to write
lazyInitialize();
String stmt = "UPSERT INTO " + table + " (";
// drop it into the queue of things that should be written
List<String> keys = new ArrayList<String>();
List<Object> values = new ArrayList<Object>();
// we need to keep variable values in a separate set since they may have spaces, which
// causes the parser to barf. Instead, we need to add them after the statement is prepared
List<String> variableValues = new ArrayList<String>(record.tags().size());
keys.add(TRACE.columnName);
values.add(Long.parseLong(record.name().substring(TracingUtils.METRIC_SOURCE_KEY.length())));
keys.add(DESCRIPTION.columnName);
values.add(VARIABLE_VALUE);
variableValues.add(record.description());
// add each of the metrics
for (AbstractMetric metric : record.metrics()) {
// name of the metric is also the column name to which we write
keys.add(MetricInfo.getColumnName(metric.name()));
values.add(metric.value());
}
// get the tags out so we can set them later (otherwise, need to be a single value)
int annotationCount = 0;
int tagCount = 0;
for (MetricsTag tag : record.tags()) {
if (tag.name().equals(ANNOTATION.traceName)) {
addDynamicEntry(keys, values, variableValues, ANNOTATION_FAMILY, tag, ANNOTATION, annotationCount);
annotationCount++;
} else if (tag.name().equals(TAG.traceName)) {
addDynamicEntry(keys, values, variableValues, TAG_FAMILY, tag, TAG, tagCount);
tagCount++;
} else if (tag.name().equals(HOSTNAME.traceName)) {
keys.add(HOSTNAME.columnName);
values.add(VARIABLE_VALUE);
variableValues.add(tag.value());
} else if (tag.name().equals("Context")) {
// ignored
} else {
LOG.error("Got an unexpected tag: " + tag);
}
}
// add the tag count, now that we know it
keys.add(TAG_COUNT);
// ignore the hostname in the tags, if we know it
values.add(tagCount);
keys.add(ANNOTATION_COUNT);
values.add(annotationCount);
// compile the statement together
stmt += COMMAS.join(keys);
stmt += ") VALUES (" + COMMAS.join(values) + ")";
if (LOG.isTraceEnabled()) {
LOG.trace("Logging metrics to phoenix table via: " + stmt);
LOG.trace("With tags: " + variableValues);
}
try {
PreparedStatement ps = conn.prepareStatement(stmt);
// add everything that wouldn't/may not parse
int index = 1;
for (String tag : variableValues) {
ps.setString(index++, tag);
}
// Not going through the standard route of using statement.execute() as that code path
// is blocked if the metadata hasn't been been upgraded to the new minor release.
MutationPlan plan = ps.unwrap(PhoenixPreparedStatement.class).compileMutation(stmt);
MutationState state = conn.unwrap(PhoenixConnection.class).getMutationState();
MutationState newState = plan.execute();
state.join(newState);
} catch (SQLException e) {
LOG.error("Could not write metric: \n" + record + " to prepared statement:\n" + stmt, e);
}
}
use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.
the class AzureBlobStorageTestAccount method getLatestMetricValue.
public Number getLatestMetricValue(String metricName, Number defaultValue) throws IndexOutOfBoundsException {
boolean found = false;
Number ret = null;
for (MetricsRecord currentRecord : allMetrics) {
// First check if this record is coming for my file system.
if (wasGeneratedByMe(currentRecord)) {
for (AbstractMetric currentMetric : currentRecord.metrics()) {
if (currentMetric.name().equalsIgnoreCase(metricName)) {
found = true;
ret = currentMetric.value();
break;
}
}
}
}
if (!found) {
if (defaultValue != null) {
return defaultValue;
}
throw new IndexOutOfBoundsException(metricName);
}
return ret;
}
Aggregations