use of org.apache.flink.streaming.api.graph.StreamConfig in project flink by apache.
the class AbstractTwoInputStreamTask method init.
@Override
public void init() throws Exception {
StreamConfig configuration = getConfiguration();
ClassLoader userClassLoader = getUserCodeClassLoader();
int numberOfInputs = configuration.getNumberOfNetworkInputs();
ArrayList<IndexedInputGate> inputList1 = new ArrayList<>();
ArrayList<IndexedInputGate> inputList2 = new ArrayList<>();
List<StreamEdge> inEdges = configuration.getInPhysicalEdges(userClassLoader);
for (int i = 0; i < numberOfInputs; i++) {
int inputType = inEdges.get(i).getTypeNumber();
IndexedInputGate reader = getEnvironment().getInputGate(i);
switch(inputType) {
case 1:
inputList1.add(reader);
break;
case 2:
inputList2.add(reader);
break;
default:
throw new RuntimeException("Invalid input type number: " + inputType);
}
}
createInputProcessor(inputList1, inputList2, gateIndex -> inEdges.get(gateIndex).getPartitioner());
mainOperator.getMetricGroup().gauge(MetricNames.IO_CURRENT_INPUT_WATERMARK, minInputWatermarkGauge);
mainOperator.getMetricGroup().gauge(MetricNames.IO_CURRENT_INPUT_1_WATERMARK, input1WatermarkGauge);
mainOperator.getMetricGroup().gauge(MetricNames.IO_CURRENT_INPUT_2_WATERMARK, input2WatermarkGauge);
// wrap watermark gauge since registered metrics must be unique
getEnvironment().getMetricGroup().gauge(MetricNames.IO_CURRENT_INPUT_WATERMARK, minInputWatermarkGauge::getValue);
}
use of org.apache.flink.streaming.api.graph.StreamConfig in project flink by apache.
the class StreamTaskTest method testStateBackendClosingOnFailure.
@Test
public void testStateBackendClosingOnFailure() throws Exception {
Configuration taskManagerConfig = new Configuration();
taskManagerConfig.setString(STATE_BACKEND, TestMemoryStateBackendFactory.class.getName());
StreamConfig cfg = new StreamConfig(new Configuration());
cfg.setStateKeySerializer(mock(TypeSerializer.class));
cfg.setOperatorID(new OperatorID(4711L, 42L));
TestStreamSource<Long, MockSourceFunction> streamSource = new TestStreamSource<>(new MockSourceFunction());
cfg.setStreamOperator(streamSource);
cfg.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
try (NettyShuffleEnvironment shuffleEnvironment = new NettyShuffleEnvironmentBuilder().build()) {
Task task = createTask(StateBackendTestSource.class, shuffleEnvironment, cfg, taskManagerConfig);
StateBackendTestSource.fail = true;
task.startTaskThread();
// wait for clean termination
task.getExecutingThread().join();
// ensure that the state backends and stream iterables are closed ...
verify(TestStreamSource.operatorStateBackend).close();
verify(TestStreamSource.keyedStateBackend).close();
verify(TestStreamSource.rawOperatorStateInputs).close();
verify(TestStreamSource.rawKeyedStateInputs).close();
// ... and disposed
verify(TestStreamSource.operatorStateBackend).dispose();
verify(TestStreamSource.keyedStateBackend).dispose();
assertEquals(ExecutionState.FAILED, task.getExecutionState());
}
}
use of org.apache.flink.streaming.api.graph.StreamConfig in project flink by apache.
the class StreamTaskTest method testStateBackendLoadingAndClosing.
@Test
public void testStateBackendLoadingAndClosing() throws Exception {
Configuration taskManagerConfig = new Configuration();
taskManagerConfig.setString(STATE_BACKEND, TestMemoryStateBackendFactory.class.getName());
StreamConfig cfg = new StreamConfig(new Configuration());
cfg.setStateKeySerializer(mock(TypeSerializer.class));
cfg.setOperatorID(new OperatorID(4711L, 42L));
TestStreamSource<Long, MockSourceFunction> streamSource = new TestStreamSource<>(new MockSourceFunction());
cfg.setStreamOperator(streamSource);
cfg.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
try (ShuffleEnvironment shuffleEnvironment = new NettyShuffleEnvironmentBuilder().build()) {
Task task = createTask(StateBackendTestSource.class, shuffleEnvironment, cfg, taskManagerConfig);
StateBackendTestSource.fail = false;
task.startTaskThread();
// wait for clean termination
task.getExecutingThread().join();
// ensure that the state backends and stream iterables are closed ...
verify(TestStreamSource.operatorStateBackend).close();
verify(TestStreamSource.keyedStateBackend).close();
verify(TestStreamSource.rawOperatorStateInputs).close();
verify(TestStreamSource.rawKeyedStateInputs).close();
// ... and disposed
verify(TestStreamSource.operatorStateBackend).dispose();
verify(TestStreamSource.keyedStateBackend).dispose();
assertEquals(ExecutionState.FINISHED, task.getExecutionState());
}
}
use of org.apache.flink.streaming.api.graph.StreamConfig in project flink by apache.
the class StreamTaskTest method testEarlyCanceling.
/**
* This test checks that cancel calls that are issued before the operator is instantiated still
* lead to proper canceling.
*/
@Test
public void testEarlyCanceling() throws Exception {
final StreamConfig cfg = new StreamConfig(new Configuration());
cfg.setOperatorID(new OperatorID(4711L, 42L));
cfg.setStreamOperator(new SlowlyDeserializingOperator());
cfg.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
final TaskManagerActions taskManagerActions = spy(new NoOpTaskManagerActions());
try (NettyShuffleEnvironment shuffleEnvironment = new NettyShuffleEnvironmentBuilder().build()) {
final Task task = new TestTaskBuilder(shuffleEnvironment).setInvokable(SourceStreamTask.class).setTaskConfig(cfg.getConfiguration()).setTaskManagerActions(taskManagerActions).build();
final TaskExecutionState state = new TaskExecutionState(task.getExecutionId(), ExecutionState.RUNNING);
task.startTaskThread();
verify(taskManagerActions, timeout(2000L)).updateTaskExecutionState(eq(state));
// send a cancel. because the operator takes a long time to deserialize, this should
// hit the task before the operator is deserialized
task.cancelExecution();
task.getExecutingThread().join();
assertFalse("Task did not cancel", task.getExecutingThread().isAlive());
assertEquals(ExecutionState.CANCELED, task.getExecutionState());
}
}
use of org.apache.flink.streaming.api.graph.StreamConfig in project flink by apache.
the class StreamTaskTest method outputEdgeConfiguration.
/**
* Make sure that there is some output edge in the config so that some RecordWriter is created.
*/
private void outputEdgeConfiguration(Configuration taskConfiguration) {
StreamConfig streamConfig = new StreamConfig(taskConfiguration);
streamConfig.setStreamOperatorFactory(new UnusedOperatorFactory());
StreamConfigChainer cfg = new StreamConfigChainer(new OperatorID(42, 42), streamConfig, this, 1);
// The OutputFlusher thread is started only if the buffer timeout more than 0(default value
// is 0).
cfg.setBufferTimeout(1);
cfg.chain(new OperatorID(44, 44), new UnusedOperatorFactory(), StringSerializer.INSTANCE, StringSerializer.INSTANCE, false);
cfg.finish();
}
Aggregations