use of org.apache.beam.runners.dataflow.worker.TestOperationContext.TestDataflowExecutionState in project beam by apache.
the class GroupingShuffleEntryIteratorTest method setCurrentExecutionState.
private void setCurrentExecutionState(String mockOriginalName) {
DataflowExecutionState state = new TestDataflowExecutionState(NameContext.create(MOCK_STAGE_NAME, mockOriginalName, MOCK_SYSTEM_NAME, MOCK_USER_NAME), "activity");
tracker.enterState(state);
}
use of org.apache.beam.runners.dataflow.worker.TestOperationContext.TestDataflowExecutionState in project beam by apache.
the class MapTaskExecutorTest method testPerElementProcessingTimeCounters.
/**
* Verify counts for the per-element-output-time counter are correct.
*/
@Test
public void testPerElementProcessingTimeCounters() throws Exception {
PipelineOptions options = PipelineOptionsFactory.create();
options.as(DataflowPipelineDebugOptions.class).setExperiments(Lists.newArrayList(DataflowElementExecutionTracker.TIME_PER_ELEMENT_EXPERIMENT));
ExecutionStateSampler stateSampler = ExecutionStateSampler.newForTest();
DataflowExecutionStateTracker stateTracker = new DataflowExecutionStateTracker(stateSampler, new TestDataflowExecutionState(NameContext.forStage("test-stage"), "other", null, /* requestingStepName */
null, /* sideInputIndex */
null, /* metricsContainer */
NoopProfileScope.NOOP), counterSet, options, "test-work-item-id");
NameContext parDoName = nameForStep("s1");
// Wire a read operation with 3 elements to a ParDoOperation and assert that we count
// the correct number of elements.
ReadOperation read = ReadOperation.forTest(new TestReader("a", "b", "c"), new OutputReceiver(), TestOperationContext.create(counterSet, nameForStep("s0"), null, stateTracker));
ParDoOperation parDo = new ParDoOperation(new NoopParDoFn(), new OutputReceiver[0], TestOperationContext.create(counterSet, parDoName, null, stateTracker));
parDo.attachInput(read, 0);
List<Operation> operations = Lists.newArrayList(read, parDo);
try (MapTaskExecutor executor = new MapTaskExecutor(operations, counterSet, stateTracker)) {
executor.execute();
}
stateSampler.doSampling(100L);
CounterName counterName = CounterName.named("per-element-processing-time").withOriginalName(parDoName);
Counter<Long, CounterDistribution> counter = (Counter<Long, CounterDistribution>) counterSet.getExistingCounter(counterName);
assertThat(counter.getAggregate().getCount(), equalTo(3L));
}
use of org.apache.beam.runners.dataflow.worker.TestOperationContext.TestDataflowExecutionState in project beam by apache.
the class IntrinsicMapTaskExecutorTest method testGetMetricContainers.
@Test
@SuppressWarnings("unchecked")
public /**
* This test makes sure that any metrics reported within an operation are part of the metric
* containers returned by {@link getMetricContainers}.
*/
void testGetMetricContainers() throws Exception {
ExecutionStateTracker stateTracker = new DataflowExecutionStateTracker(ExecutionStateSampler.newForTest(), new TestDataflowExecutionState(NameContext.forStage("testStage"), "other", null, /* requestingStepName */
null, /* sideInputIndex */
null, /* metricsContainer */
NoopProfileScope.NOOP), new CounterSet(), PipelineOptionsFactory.create(), "test-work-item-id");
final String o1 = "o1";
TestOperationContext context1 = createContext(o1, stateTracker);
final String o2 = "o2";
TestOperationContext context2 = createContext(o2, stateTracker);
final String o3 = "o3";
TestOperationContext context3 = createContext(o3, stateTracker);
List<Operation> operations = Arrays.asList(new Operation(new OutputReceiver[] {}, context1) {
@Override
public void start() throws Exception {
super.start();
try (Closeable scope = context.enterStart()) {
Metrics.counter("TestMetric", "MetricCounter").inc(1L);
}
}
}, new Operation(new OutputReceiver[] {}, context2) {
@Override
public void start() throws Exception {
super.start();
try (Closeable scope = context.enterStart()) {
Metrics.counter("TestMetric", "MetricCounter").inc(2L);
}
}
}, new Operation(new OutputReceiver[] {}, context3) {
@Override
public void start() throws Exception {
super.start();
try (Closeable scope = context.enterStart()) {
Metrics.counter("TestMetric", "MetricCounter").inc(3L);
}
}
});
try (IntrinsicMapTaskExecutor executor = IntrinsicMapTaskExecutor.withSharedCounterSet(operations, counterSet, stateTracker)) {
// Call execute so that we run all the counters
executor.execute();
assertThat(context1.metricsContainer().getUpdates().counterUpdates(), contains(metricUpdate("TestMetric", "MetricCounter", o1, 1L)));
assertThat(context2.metricsContainer().getUpdates().counterUpdates(), contains(metricUpdate("TestMetric", "MetricCounter", o2, 2L)));
assertThat(context3.metricsContainer().getUpdates().counterUpdates(), contains(metricUpdate("TestMetric", "MetricCounter", o3, 3L)));
}
}
use of org.apache.beam.runners.dataflow.worker.TestOperationContext.TestDataflowExecutionState in project beam by apache.
the class GroupingShuffleReaderTest method setCurrentExecutionState.
private void setCurrentExecutionState(String mockOriginalName) {
DataflowExecutionState state = new TestDataflowExecutionState(NameContext.create(MOCK_STAGE_NAME, mockOriginalName, MOCK_SYSTEM_NAME, MOCK_USER_NAME), "activity");
tracker.enterState(state);
}
use of org.apache.beam.runners.dataflow.worker.TestOperationContext.TestDataflowExecutionState in project beam by apache.
the class IntrinsicMapTaskExecutorTest method testPerElementProcessingTimeCounters.
/**
* Verify counts for the per-element-output-time counter are correct.
*/
@Test
public void testPerElementProcessingTimeCounters() throws Exception {
PipelineOptions options = PipelineOptionsFactory.create();
options.as(DataflowPipelineDebugOptions.class).setExperiments(Lists.newArrayList(DataflowElementExecutionTracker.TIME_PER_ELEMENT_EXPERIMENT));
DataflowExecutionStateTracker stateTracker = new DataflowExecutionStateTracker(ExecutionStateSampler.newForTest(), new TestDataflowExecutionState(NameContext.forStage("test-stage"), "other", null, /* requestingStepName */
null, /* sideInputIndex */
null, /* metricsContainer */
NoopProfileScope.NOOP), counterSet, options, "test-work-item-id");
NameContext parDoName = nameForStep("s1");
// Wire a read operation with 3 elements to a ParDoOperation and assert that we count
// the correct number of elements.
ReadOperation read = ReadOperation.forTest(new TestReader("a", "b", "c"), new OutputReceiver(), TestOperationContext.create(counterSet, nameForStep("s0"), null, stateTracker));
ParDoOperation parDo = new ParDoOperation(new NoopParDoFn(), new OutputReceiver[0], TestOperationContext.create(counterSet, parDoName, null, stateTracker));
parDo.attachInput(read, 0);
List<Operation> operations = Lists.newArrayList(read, parDo);
try (IntrinsicMapTaskExecutor executor = IntrinsicMapTaskExecutor.withSharedCounterSet(operations, counterSet, stateTracker)) {
executor.execute();
}
CounterName counterName = CounterName.named("per-element-processing-time").withOriginalName(parDoName);
Counter<Long, CounterDistribution> counter = (Counter<Long, CounterDistribution>) counterSet.getExistingCounter(counterName);
assertThat(counter.getAggregate().getCount(), equalTo(3L));
}
Aggregations