use of org.apache.flink.metrics.Counter in project flink by apache.
the class MetricQueryService method onReceive.
@Override
public void onReceive(Object message) {
try {
if (message instanceof AddMetric) {
AddMetric added = (AddMetric) message;
String metricName = added.metricName;
Metric metric = added.metric;
AbstractMetricGroup group = added.group;
QueryScopeInfo info = group.getQueryServiceMetricInfo(FILTER);
if (metric instanceof Counter) {
counters.put((Counter) metric, new Tuple2<>(info, FILTER.filterCharacters(metricName)));
} else if (metric instanceof Gauge) {
gauges.put((Gauge<?>) metric, new Tuple2<>(info, FILTER.filterCharacters(metricName)));
} else if (metric instanceof Histogram) {
histograms.put((Histogram) metric, new Tuple2<>(info, FILTER.filterCharacters(metricName)));
} else if (metric instanceof Meter) {
meters.put((Meter) metric, new Tuple2<>(info, FILTER.filterCharacters(metricName)));
}
} else if (message instanceof RemoveMetric) {
Metric metric = (((RemoveMetric) message).metric);
if (metric instanceof Counter) {
this.counters.remove(metric);
} else if (metric instanceof Gauge) {
this.gauges.remove(metric);
} else if (metric instanceof Histogram) {
this.histograms.remove(metric);
} else if (metric instanceof Meter) {
this.meters.remove(metric);
}
} else if (message instanceof CreateDump) {
MetricDumpSerialization.MetricSerializationResult dump = serializer.serialize(counters, gauges, histograms, meters);
getSender().tell(dump, getSelf());
} else {
LOG.warn("MetricQueryServiceActor received an invalid message. " + message.toString());
getSender().tell(new Status.Failure(new IOException("MetricQueryServiceActor received an invalid message. " + message.toString())), getSelf());
}
} catch (Exception e) {
LOG.warn("An exception occurred while processing a message.", e);
}
}
use of org.apache.flink.metrics.Counter in project flink by apache.
the class AbstractCachedBuildSideJoinDriver method initialize.
@Override
public void initialize() throws Exception {
TaskConfig config = this.taskContext.getTaskConfig();
final Counter numRecordsIn = taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsInCounter();
TypeSerializer<IT1> serializer1 = this.taskContext.<IT1>getInputSerializer(0).getSerializer();
TypeSerializer<IT2> serializer2 = this.taskContext.<IT2>getInputSerializer(1).getSerializer();
TypeComparator<IT1> comparator1 = this.taskContext.getDriverComparator(0);
TypeComparator<IT2> comparator2 = this.taskContext.getDriverComparator(1);
MutableObjectIterator<IT1> input1 = new CountingMutableObjectIterator<>(this.taskContext.<IT1>getInput(0), numRecordsIn);
MutableObjectIterator<IT2> input2 = new CountingMutableObjectIterator<>(this.taskContext.<IT2>getInput(1), numRecordsIn);
TypePairComparatorFactory<IT1, IT2> pairComparatorFactory = this.taskContext.getTaskConfig().getPairComparatorFactory(this.taskContext.getUserCodeClassLoader());
double availableMemory = config.getRelativeMemoryDriver();
boolean hashJoinUseBitMaps = taskContext.getTaskManagerInfo().getConfiguration().getBoolean(ConfigConstants.RUNTIME_HASH_JOIN_BLOOM_FILTERS_KEY, ConfigConstants.DEFAULT_RUNTIME_HASH_JOIN_BLOOM_FILTERS);
ExecutionConfig executionConfig = taskContext.getExecutionConfig();
objectReuseEnabled = executionConfig.isObjectReuseEnabled();
if (objectReuseEnabled) {
if (buildSideIndex == 0 && probeSideIndex == 1) {
matchIterator = new ReusingBuildFirstReOpenableHashJoinIterator<IT1, IT2, OT>(input1, input2, serializer1, comparator1, serializer2, comparator2, pairComparatorFactory.createComparator21(comparator1, comparator2), this.taskContext.getMemoryManager(), this.taskContext.getIOManager(), this.taskContext.getContainingTask(), availableMemory, false, false, hashJoinUseBitMaps);
} else if (buildSideIndex == 1 && probeSideIndex == 0) {
matchIterator = new ReusingBuildSecondReOpenableHashJoinIterator<IT1, IT2, OT>(input1, input2, serializer1, comparator1, serializer2, comparator2, pairComparatorFactory.createComparator12(comparator1, comparator2), this.taskContext.getMemoryManager(), this.taskContext.getIOManager(), this.taskContext.getContainingTask(), availableMemory, false, false, hashJoinUseBitMaps);
} else {
throw new Exception("Error: Inconsistent setup for repeatable hash join driver.");
}
} else {
if (buildSideIndex == 0 && probeSideIndex == 1) {
matchIterator = new NonReusingBuildFirstReOpenableHashJoinIterator<IT1, IT2, OT>(input1, input2, serializer1, comparator1, serializer2, comparator2, pairComparatorFactory.createComparator21(comparator1, comparator2), this.taskContext.getMemoryManager(), this.taskContext.getIOManager(), this.taskContext.getContainingTask(), availableMemory, false, false, hashJoinUseBitMaps);
} else if (buildSideIndex == 1 && probeSideIndex == 0) {
matchIterator = new NonReusingBuildSecondReOpenableHashJoinIterator<IT1, IT2, OT>(input1, input2, serializer1, comparator1, serializer2, comparator2, pairComparatorFactory.createComparator12(comparator1, comparator2), this.taskContext.getMemoryManager(), this.taskContext.getIOManager(), this.taskContext.getContainingTask(), availableMemory, false, false, hashJoinUseBitMaps);
} else {
throw new Exception("Error: Inconsistent setup for repeatable hash join driver.");
}
}
this.matchIterator.open();
}
use of org.apache.flink.metrics.Counter in project flink by apache.
the class AbstractOuterJoinDriver method run.
@Override
public void run() throws Exception {
final Counter numRecordsOut = this.taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsOutCounter();
final FlatJoinFunction<IT1, IT2, OT> joinStub = this.taskContext.getStub();
final Collector<OT> collector = new CountingCollector<>(this.taskContext.getOutputCollector(), numRecordsOut);
final JoinTaskIterator<IT1, IT2, OT> outerJoinIterator = this.outerJoinIterator;
while (this.running && outerJoinIterator.callWithNextKey(joinStub, collector)) ;
}
use of org.apache.flink.metrics.Counter in project flink by apache.
the class AllGroupCombineDriver method run.
@Override
public void run() throws Exception {
if (LOG.isDebugEnabled()) {
LOG.debug("AllGroupCombine starting.");
}
final Counter numRecordsIn = this.taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsInCounter();
final Counter numRecordsOut = this.taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsOutCounter();
final TypeSerializerFactory<IN> serializerFactory = this.taskContext.getInputSerializer(0);
TypeSerializer<IN> serializer = serializerFactory.getSerializer();
final MutableObjectIterator<IN> in = new CountingMutableObjectIterator<>(this.taskContext.<IN>getInput(0), numRecordsIn);
final GroupCombineFunction<IN, OUT> reducer = this.taskContext.getStub();
final Collector<OUT> output = new CountingCollector<>(this.taskContext.getOutputCollector(), numRecordsOut);
if (objectReuseEnabled) {
final ReusingMutableToRegularIteratorWrapper<IN> inIter = new ReusingMutableToRegularIteratorWrapper<IN>(in, serializer);
if (inIter.hasNext()) {
reducer.combine(inIter, output);
}
} else {
final NonReusingMutableToRegularIteratorWrapper<IN> inIter = new NonReusingMutableToRegularIteratorWrapper<IN>(in, serializer);
if (inIter.hasNext()) {
reducer.combine(inIter, output);
}
}
}
use of org.apache.flink.metrics.Counter in project flink by apache.
the class AllReduceDriver method run.
@Override
public void run() throws Exception {
if (LOG.isDebugEnabled()) {
LOG.debug(this.taskContext.formatLogString("AllReduce preprocessing done. Running Reducer code."));
}
final Counter numRecordsIn = this.taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsInCounter();
final Counter numRecordsOut = this.taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsOutCounter();
final ReduceFunction<T> stub = this.taskContext.getStub();
final MutableObjectIterator<T> input = this.input;
final TypeSerializer<T> serializer = this.serializer;
final Collector<T> collector = new CountingCollector<>(this.taskContext.getOutputCollector(), numRecordsOut);
T val1;
if ((val1 = input.next()) == null) {
return;
}
numRecordsIn.inc();
if (objectReuseEnabled) {
// We only need two objects. The first reference stores results and is
// eventually collected. New values are read into the second.
T val2 = serializer.createInstance();
T value = val1;
while (running && (val2 = input.next(val2)) != null) {
numRecordsIn.inc();
value = stub.reduce(value, val2);
// by the user, so swap the reuse objects,
if (value == val2) {
T tmp = val1;
val1 = val2;
val2 = tmp;
}
}
collector.collect(value);
} else {
T val2;
while (running && (val2 = input.next()) != null) {
numRecordsIn.inc();
val1 = stub.reduce(val1, val2);
}
collector.collect(val1);
}
}
Aggregations