use of com.codahale.metrics.Timer.Context in project Singularity by HubSpot.
the class SingularityUploader method uploadBatch.
int uploadBatch(List<Path> toUpload) {
final long start = System.currentTimeMillis();
LOG.info("{} Uploading {} item(s)", logIdentifier, toUpload.size());
int success = 0;
for (int i = 0; i < toUpload.size(); i++) {
final Context context = metrics.getUploadTimer().time();
final Path file = toUpload.get(i);
if (!configuration.isCheckForOpenFiles() || !isFileOpen(file)) {
try {
uploadSingle(i, file);
metrics.upload();
success++;
Files.delete(file);
} catch (RetryException re) {
metrics.error();
LOG.warn("{} Couldn't upload or delete {}", logIdentifier, file, re);
exceptionNotifier.notify(String.format("%s exception during upload", re.getCause().getClass()), re.getCause(), ImmutableMap.of("logIdentifier", logIdentifier, "file", file.toString(), "failedAttempts", Integer.toString(re.getNumberOfFailedAttempts())));
} catch (Exception e) {
metrics.error();
LOG.warn("{} Couldn't upload or delete {}", logIdentifier, file, e);
exceptionNotifier.notify(String.format("Error during upload (%s)", e.getMessage()), e, ImmutableMap.of("logIdentifier", logIdentifier, "file", file.toString()));
} finally {
context.stop();
}
} else {
LOG.info("{} is in use by another process, will retry upload later", file);
}
}
LOG.info("{} Uploaded {} out of {} item(s) in {}", logIdentifier, success, toUpload.size(), JavaUtils.duration(start));
return toUpload.size();
}
use of com.codahale.metrics.Timer.Context in project sidewinder by srotya.
the class GrafanaQueryApi method query.
@Path("/query")
@POST
@Produces({ MediaType.APPLICATION_JSON })
@Consumes({ MediaType.APPLICATION_JSON })
public List<Target> query(@PathParam(DatabaseOpsApi.DB_NAME) String dbName, String query) throws ParseException {
grafanaQueryCounter.mark();
Context time = grafanaQueryLatency.time();
Gson gson = new GsonBuilder().setPrettyPrinting().create();
logger.log(Level.FINE, () -> "Grafana query:" + dbName + "\t" + gson.toJson(gson.fromJson(query, JsonObject.class)));
JsonObject json = gson.fromJson(query, JsonObject.class);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
JsonObject range = json.get("range").getAsJsonObject();
long startTs = sdf.parse(range.get("from").getAsString()).getTime();
long endTs = sdf.parse(range.get("to").getAsString()).getTime();
startTs = tz.getOffset(startTs) + startTs;
endTs = tz.getOffset(endTs) + endTs;
List<TargetSeries> targetSeries = new ArrayList<>();
List<Target> output = new ArrayList<>();
try {
GrafanaUtils.extractTargetsFromJson(json, targetSeries);
} catch (IllegalArgumentException e) {
return output;
}
logger.log(Level.FINE, "Extracted targets from query json, target count:" + targetSeries.size() + " " + new Date(startTs));
for (TargetSeries targetSeriesEntry : targetSeries) {
logger.log(Level.FINE, () -> "Running grafana query fetch for:" + targetSeriesEntry);
try {
GrafanaUtils.queryAndGetData(engine, dbName, startTs, endTs, output, targetSeriesEntry);
} catch (IOException e) {
throw new InternalServerErrorException(e);
}
}
time.stop();
return output;
}
use of com.codahale.metrics.Timer.Context in project wikidata-query-rdf by wikimedia.
the class RecentChangesPoller method fetchRecentChanges.
/**
* Fetch recent changes from Wikibase.
* If we're close to current time, we back off a bit from last timestamp,
* and fetch by timestamp. If it's back in the past, we fetch by continuation.
*
* @throws RetryableException on fetch failure
*/
private RecentChangeResponse fetchRecentChanges(Instant lastNextStartTime, Batch lastBatch) throws RetryableException {
try (Context timerContext = recentChangesTimer.time()) {
RecentChangeResponse recentChanges = doFetchRecentChanges(lastNextStartTime, lastBatch);
recentChangesCounter.inc(recentChanges.getQuery().getRecentChanges().size());
return recentChanges;
}
}
use of com.codahale.metrics.Timer.Context in project newts by OpenNMS.
the class FileImport method run.
@Override
public void run() {
String line;
try {
// Throw away the first (header).
m_reader.readLine();
while ((line = m_reader.readLine()) != null) {
try {
List<Sample> samples = m_lineParser.parseLine(line);
Context timerCtx = m_writeTimer.time();
try {
m_repository.insert(samples);
} finally {
timerCtx.stop();
}
m_numRows.inc();
m_numSamples.inc(10);
} catch (ParseException e) {
LOG.error("Unable to parse date from line '{}'", line);
}
}
} catch (IOException e) {
LOG.error("Error reading GSOD data file: {]", e);
}
}
use of com.codahale.metrics.Timer.Context in project metrics by dropwizard.
the class ExponentiallyDecayingReservoirTest method removeZeroWeightsInSamplesToPreventNaNInMeanValues.
@Test
public void removeZeroWeightsInSamplesToPreventNaNInMeanValues() {
final ManualClock clock = new ManualClock();
final Reservoir reservoir = reservoirFactory.create(1028, 0.015, clock);
Timer timer = new Timer(reservoir, clock);
Context context = timer.time();
clock.addMillis(100);
context.stop();
for (int i = 1; i < 48; i++) {
clock.addHours(1);
assertThat(reservoir.getSnapshot().getMean()).isBetween(0.0, Double.MAX_VALUE);
}
}
Aggregations