use of io.opencensus.common.Scope in project beam by apache.
the class DataChangeRecordAction method run.
/**
* This is the main processing function for a {@link DataChangeRecord}. It returns an {@link
* Optional} of {@link ProcessContinuation} to indicate if the calling function should stop or
* not. If the {@link Optional} returned is empty, it means that the calling function can continue
* with the processing. If an {@link Optional} of {@link ProcessContinuation#stop()} is returned,
* it means that this function was unable to claim the timestamp of the {@link DataChangeRecord},
* so the caller should stop.
*
* <p>When processing the {@link DataChangeRecord} the following procedure is applied:
*
* <ol>
* <li>We try to cliam the data change record commit timestamp. If it is not possible, we stop
* here and return.
* <li>We emit the data change record through the {@link OutputReceiver}.
* <li>We update the watermark to the data change record commit timestamp.
* </ol>
*
* @param partition the current partition being processed
* @param record the change stream data record received
* @param tracker the restriction tracker of the {@link
* org.apache.beam.sdk.io.gcp.spanner.changestreams.dofn.ReadChangeStreamPartitionDoFn} SDF
* @param outputReceiver the output receiver of the {@link
* org.apache.beam.sdk.io.gcp.spanner.changestreams.dofn.ReadChangeStreamPartitionDoFn} SDF
* @param watermarkEstimator the watermark estimator of the {@link
* org.apache.beam.sdk.io.gcp.spanner.changestreams.dofn.ReadChangeStreamPartitionDoFn} SDF
* @return {@link Optional#empty()} if the caller can continue processing more records. A non
* empty {@link Optional} with {@link ProcessContinuation#stop()} if this function was unable
* to claim the {@link ChildPartitionsRecord} timestamp
*/
@VisibleForTesting
public Optional<ProcessContinuation> run(PartitionMetadata partition, DataChangeRecord record, RestrictionTracker<OffsetRange, Long> tracker, OutputReceiver<DataChangeRecord> outputReceiver, ManualWatermarkEstimator<Instant> watermarkEstimator) {
try (Scope scope = TRACER.spanBuilder("DataChangeRecordAction").setRecordEvents(true).startScopedSpan()) {
TRACER.getCurrentSpan().putAttribute(PARTITION_ID_ATTRIBUTE_LABEL, AttributeValue.stringAttributeValue(partition.getPartitionToken()));
final String token = partition.getPartitionToken();
LOG.debug("[" + token + "] Processing data record " + record.getCommitTimestamp());
final Timestamp commitTimestamp = record.getCommitTimestamp();
final Instant commitInstant = new Instant(commitTimestamp.toSqlTimestamp().getTime());
final long commitMicros = TimestampConverter.timestampToMicros(commitTimestamp);
if (!tracker.tryClaim(commitMicros)) {
LOG.debug("[" + token + "] Could not claim queryChangeStream(" + commitTimestamp + "), stopping");
return Optional.of(ProcessContinuation.stop());
}
outputReceiver.outputWithTimestamp(record, commitInstant);
watermarkEstimator.setWatermark(commitInstant);
LOG.debug("[" + token + "] Data record action completed successfully");
return Optional.empty();
}
}
use of io.opencensus.common.Scope in project beam by apache.
the class HeartbeatRecordAction method run.
/**
* This is the main processing function for a {@link HeartbeatRecord}. It returns an {@link
* Optional} of {@link ProcessContinuation} to indicate if the calling function should stop or
* not. If the {@link Optional} returned is empty, it means that the calling function can continue
* with the processing. If an {@link Optional} of {@link ProcessContinuation#stop()} is returned,
* it means that this function was unable to claim the timestamp of the {@link HeartbeatRecord},
* so the caller should stop.
*
* <p>When processing the {@link HeartbeatRecord} the following procedure is applied:
*
* <ol>
* <li>We try to claim the heartbeat record timestamp. If it is not possible, we stop here and
* return.
* <li>We update the necessary metrics.
* <li>We update the watermark to the heartbeat record timestamp.
* </ol>
*/
@VisibleForTesting
public Optional<ProcessContinuation> run(PartitionMetadata partition, HeartbeatRecord record, RestrictionTracker<OffsetRange, Long> tracker, ManualWatermarkEstimator<Instant> watermarkEstimator) {
try (Scope scope = TRACER.spanBuilder("HeartbeatRecordAction").setRecordEvents(true).startScopedSpan()) {
TRACER.getCurrentSpan().putAttribute(PARTITION_ID_ATTRIBUTE_LABEL, AttributeValue.stringAttributeValue(partition.getPartitionToken()));
final String token = partition.getPartitionToken();
LOG.debug("[" + token + "] Processing heartbeat record " + record);
final Timestamp timestamp = record.getTimestamp();
final Instant timestampInstant = new Instant(timestamp.toSqlTimestamp().getTime());
final long timestampMicros = TimestampConverter.timestampToMicros(timestamp);
if (!tracker.tryClaim(timestampMicros)) {
LOG.debug("[" + token + "] Could not claim queryChangeStream(" + timestamp + "), stopping");
return Optional.of(ProcessContinuation.stop());
}
metrics.incHeartbeatRecordCount();
watermarkEstimator.setWatermark(timestampInstant);
LOG.debug("[" + token + "] Heartbeat record action completed successfully");
return Optional.empty();
}
}
use of io.opencensus.common.Scope in project ignite by apache.
the class OpenCensusMetricExporterSpi method export.
/**
* {@inheritDoc}
*/
@Override
public void export() {
StatsRecorder recorder = Stats.getStatsRecorder();
try (Scope globalScope = tagScope()) {
MeasureMap mmap = recorder.newMeasureMap();
mreg.forEach(mreg -> {
if (filter != null && !filter.test(mreg))
return;
mreg.forEach(metric -> {
if (metric instanceof LongMetric || metric instanceof IntMetric || metric instanceof BooleanMetric || (metric instanceof ObjectMetric && ((ObjectMetric) metric).type() == Date.class) || (metric instanceof ObjectMetric && ((ObjectMetric) metric).type() == OffsetDateTime.class)) {
long val;
if (metric instanceof LongMetric)
val = ((LongMetric) metric).value();
else if (metric instanceof IntMetric)
val = ((IntMetric) metric).value();
else if (metric instanceof BooleanMetric)
val = ((BooleanMetric) metric).value() ? 1 : 0;
else if (metric instanceof ObjectMetric && ((ObjectMetric) metric).type() == Date.class)
val = ((ObjectMetric<Date>) metric).value().getTime();
else
val = ((ObjectMetric<OffsetDateTime>) metric).value().toInstant().toEpochMilli();
if (val < 0) {
if (log.isDebugEnabled())
log.debug("OpenCensus doesn't support negative values. Skip record of " + metric.name());
return;
}
MeasureLong msr = (MeasureLong) measures.computeIfAbsent(metric.name(), k -> createMeasure(metric, CREATE_LONG));
mmap.put(msr, val);
} else if (metric instanceof DoubleMetric) {
double val = ((DoubleMetric) metric).value();
if (val < 0) {
if (log.isDebugEnabled())
log.debug("OpenCensus doesn't support negative values. Skip record of " + metric.name());
return;
}
MeasureDouble msr = (MeasureDouble) measures.computeIfAbsent(metric.name(), k -> createMeasure(metric, CREATE_DOUBLE));
mmap.put(msr, val);
} else if (metric instanceof HistogramMetric) {
String[] names = histogramBucketNames((HistogramMetric) metric);
long[] vals = ((HistogramMetric) metric).value();
assert names.length == vals.length;
for (int i = 0; i < vals.length; i++) {
String name = names[i];
MeasureLong msr = (MeasureLong) measures.computeIfAbsent(name, k -> createMeasureLong(name, metric.description()));
mmap.put(msr, vals[i]);
}
} else if (log.isDebugEnabled()) {
log.debug(metric.name() + "[" + metric.getClass() + "] not supported by Opencensus exporter");
}
});
});
mmap.record();
}
}
use of io.opencensus.common.Scope in project ignite by apache.
the class TimeLimitedHandler method export.
/**
* {@inheritDoc}
*/
@Override
public void export(final Collection<SpanData> spanDataList) {
final Scope exportScope = newExportScope();
try {
TimeLimiter timeLimiter = SimpleTimeLimiter.create(Executors.newSingleThreadExecutor());
timeLimiter.callWithTimeout(new Callable<Void>() {
@Override
public Void call() throws Exception {
timeLimitedExport(spanDataList);
return null;
}
}, deadline.toMillis(), TimeUnit.MILLISECONDS);
} catch (TimeoutException e) {
handleException(e, "Timeout when exporting traces: " + e);
} catch (InterruptedException e) {
handleException(e, "Interrupted when exporting traces: " + e);
} catch (Exception e) {
handleException(e, "Failed to export traces: " + e);
} finally {
exportScope.close();
}
}
use of io.opencensus.common.Scope in project instrumentation-java by census-instrumentation.
the class TagContextBenchmark method scopeTagContext.
/**
* Open and close a tag context scope.
*/
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public Scope scopeTagContext(Data data) {
Scope scope = data.tagger.withTagContext(data.tagContext);
scope.close();
return scope;
}
Aggregations