use of org.apache.flink.metrics.Counter in project flink by apache.
the class KafkaSourceReaderMetricsTest method testCommitOffsetTracking.
@Test
public void testCommitOffsetTracking() {
MetricListener metricListener = new MetricListener();
final KafkaSourceReaderMetrics kafkaSourceReaderMetrics = new KafkaSourceReaderMetrics(InternalSourceReaderMetricGroup.mock(metricListener.getMetricGroup()));
kafkaSourceReaderMetrics.registerTopicPartition(FOO_0);
kafkaSourceReaderMetrics.registerTopicPartition(FOO_1);
kafkaSourceReaderMetrics.registerTopicPartition(BAR_0);
kafkaSourceReaderMetrics.registerTopicPartition(BAR_1);
kafkaSourceReaderMetrics.recordCommittedOffset(FOO_0, 15213L);
kafkaSourceReaderMetrics.recordCommittedOffset(FOO_1, 18213L);
kafkaSourceReaderMetrics.recordCommittedOffset(BAR_0, 18613L);
kafkaSourceReaderMetrics.recordCommittedOffset(BAR_1, 15513L);
assertCommittedOffset(FOO_0, 15213L, metricListener);
assertCommittedOffset(FOO_1, 18213L, metricListener);
assertCommittedOffset(BAR_0, 18613L, metricListener);
assertCommittedOffset(BAR_1, 15513L, metricListener);
final Optional<Counter> commitsSucceededCounter = metricListener.getCounter(KafkaSourceReaderMetrics.KAFKA_SOURCE_READER_METRIC_GROUP, KafkaSourceReaderMetrics.COMMITS_SUCCEEDED_METRIC_COUNTER);
assertTrue(commitsSucceededCounter.isPresent());
assertEquals(0L, commitsSucceededCounter.get().getCount());
kafkaSourceReaderMetrics.recordSucceededCommit();
assertEquals(1L, commitsSucceededCounter.get().getCount());
}
use of org.apache.flink.metrics.Counter in project flink by apache.
the class FlinkMetricContainer method updateCounterOrMeter.
private void updateCounterOrMeter(Iterable<MetricResult<Long>> counters) {
for (MetricResult<Long> metricResult : counters) {
if (!isUserMetric(metricResult)) {
continue;
}
// get identifier
String flinkMetricIdentifier = getFlinkMetricIdentifierString(metricResult.getKey());
// get metric type
ArrayList<String> scopeComponents = getNameSpaceArray(metricResult.getKey());
if ((scopeComponents.size() % 2) != 0) {
Meter meter = flinkMeterCache.get(flinkMetricIdentifier);
if (null == meter) {
int timeSpanInSeconds = Integer.parseInt(scopeComponents.get(scopeComponents.size() - 1));
MetricGroup metricGroup = registerMetricGroup(metricResult.getKey(), baseMetricGroup);
meter = metricGroup.meter(metricResult.getKey().metricName().getName(), new MeterView(timeSpanInSeconds));
flinkMeterCache.put(flinkMetricIdentifier, meter);
}
Long update = metricResult.getAttempted();
meter.markEvent(update - meter.getCount());
} else {
Counter counter = flinkCounterCache.get(flinkMetricIdentifier);
if (null == counter) {
MetricGroup metricGroup = registerMetricGroup(metricResult.getKey(), baseMetricGroup);
counter = metricGroup.counter(metricResult.getKey().metricName().getName());
flinkCounterCache.put(flinkMetricIdentifier, counter);
}
Long update = metricResult.getAttempted();
counter.inc(update - counter.getCount());
}
}
}
use of org.apache.flink.metrics.Counter in project flink by apache.
the class AbstractCachedBuildSideJoinDriver method run.
@Override
public void run() throws Exception {
final Counter numRecordsOut = taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsOutCounter();
final FlatJoinFunction<IT1, IT2, OT> matchStub = this.taskContext.getStub();
final Collector<OT> collector = new CountingCollector<>(this.taskContext.getOutputCollector(), numRecordsOut);
while (this.running && matchIterator != null && matchIterator.callWithNextKey(matchStub, collector)) {
}
}
use of org.apache.flink.metrics.Counter in project flink by apache.
the class CrossDriver method runBlockedOuterFirst.
private void runBlockedOuterFirst() throws Exception {
if (LOG.isDebugEnabled()) {
LOG.debug(this.taskContext.formatLogString("Running Cross with Block-Nested-Loops: " + "First input is outer (blocking) side, second input is inner (spilling) side."));
}
final Counter numRecordsIn = taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsInCounter();
final Counter numRecordsOut = taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsOutCounter();
final MutableObjectIterator<T1> in1 = new CountingMutableObjectIterator<>(this.taskContext.<T1>getInput(0), numRecordsIn);
final MutableObjectIterator<T2> in2 = new CountingMutableObjectIterator<>(this.taskContext.<T2>getInput(1), numRecordsIn);
final TypeSerializer<T1> serializer1 = this.taskContext.<T1>getInputSerializer(0).getSerializer();
final TypeSerializer<T2> serializer2 = this.taskContext.<T2>getInputSerializer(1).getSerializer();
final BlockResettableMutableObjectIterator<T1> blockVals = new BlockResettableMutableObjectIterator<T1>(this.memManager, in1, serializer1, this.memPagesForBlockSide, this.taskContext.getContainingTask());
this.blockIter = blockVals;
final SpillingResettableMutableObjectIterator<T2> spillVals = new SpillingResettableMutableObjectIterator<T2>(in2, serializer2, this.memManager, this.taskContext.getIOManager(), this.memPagesForSpillingSide, this.taskContext.getContainingTask());
this.spillIter = spillVals;
final CrossFunction<T1, T2, OT> crosser = this.taskContext.getStub();
final Collector<OT> collector = new CountingCollector<>(this.taskContext.getOutputCollector(), numRecordsOut);
if (objectReuseEnabled) {
final T1 val1Reuse = serializer1.createInstance();
final T2 val2Reuse = serializer2.createInstance();
T1 val1;
T2 val2;
// for all blocks
do {
// for all values from the spilling side
while (this.running && ((val2 = spillVals.next(val2Reuse)) != null)) {
// for all values in the block
while ((val1 = blockVals.next(val1Reuse)) != null) {
collector.collect(crosser.cross(val1, val2));
}
blockVals.reset();
}
spillVals.reset();
} while (this.running && blockVals.nextBlock());
} else {
T1 val1;
T2 val2;
// for all blocks
do {
// for all values from the spilling side
while (this.running && ((val2 = spillVals.next()) != null)) {
// for all values in the block
while ((val1 = blockVals.next()) != null) {
collector.collect(crosser.cross(val1, serializer2.copy(val2)));
}
blockVals.reset();
}
spillVals.reset();
} while (this.running && blockVals.nextBlock());
}
}
use of org.apache.flink.metrics.Counter in project flink by apache.
the class CrossDriver method runStreamedOuterSecond.
private void runStreamedOuterSecond() throws Exception {
if (LOG.isDebugEnabled()) {
LOG.debug(this.taskContext.formatLogString("Running Cross with Nested-Loops: " + "First input is inner (spilling) side, second input is outer side."));
}
final Counter numRecordsIn = taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsInCounter();
final Counter numRecordsOut = taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsOutCounter();
final MutableObjectIterator<T1> in1 = new CountingMutableObjectIterator<>(this.taskContext.<T1>getInput(0), numRecordsIn);
final MutableObjectIterator<T2> in2 = new CountingMutableObjectIterator<>(this.taskContext.<T2>getInput(1), numRecordsIn);
final TypeSerializer<T1> serializer1 = this.taskContext.<T1>getInputSerializer(0).getSerializer();
final TypeSerializer<T2> serializer2 = this.taskContext.<T2>getInputSerializer(1).getSerializer();
final SpillingResettableMutableObjectIterator<T1> spillVals = new SpillingResettableMutableObjectIterator<T1>(in1, serializer1, this.memManager, this.taskContext.getIOManager(), this.memPagesForSpillingSide, this.taskContext.getContainingTask());
this.spillIter = spillVals;
final CrossFunction<T1, T2, OT> crosser = this.taskContext.getStub();
final Collector<OT> collector = new CountingCollector<>(this.taskContext.getOutputCollector(), numRecordsOut);
if (objectReuseEnabled) {
final T1 val1Reuse = serializer1.createInstance();
final T2 val2Reuse = serializer2.createInstance();
T1 val1;
T2 val2;
// for all blocks
while (this.running && (val2 = in2.next(val2Reuse)) != null) {
// for all values from the spilling side
while (this.running && (val1 = spillVals.next(val1Reuse)) != null) {
collector.collect(crosser.cross(val1, val2));
// crosser.cross(val1, val2Copy, collector);
}
spillVals.reset();
}
} else {
T1 val1;
T2 val2;
// for all blocks
while (this.running && (val2 = in2.next()) != null) {
// for all values from the spilling side
while (this.running && (val1 = spillVals.next()) != null) {
collector.collect(crosser.cross(val1, serializer2.copy(val2)));
}
spillVals.reset();
}
}
}
Aggregations