use of org.apache.flink.runtime.operators.util.TaskConfig in project flink by apache.
the class OutputFormatVertex method initializeOnMaster.
@Override
public void initializeOnMaster(ClassLoader loader) throws Exception {
final TaskConfig cfg = new TaskConfig(getConfiguration());
UserCodeWrapper<OutputFormat<?>> wrapper;
try {
wrapper = cfg.<OutputFormat<?>>getStubWrapper(loader);
} catch (Throwable t) {
throw new Exception("Deserializing the OutputFormat (" + formatDescription + ") failed: " + t.getMessage(), t);
}
if (wrapper == null) {
throw new Exception("No input format present in InputFormatVertex's task configuration.");
}
OutputFormat<?> outputFormat;
try {
outputFormat = wrapper.getUserCodeObject(OutputFormat.class, loader);
} catch (Throwable t) {
throw new Exception("Instantiating the OutputFormat (" + formatDescription + ") failed: " + t.getMessage(), t);
}
try {
outputFormat.configure(cfg.getStubParameters());
} catch (Throwable t) {
throw new Exception("Configuring the OutputFormat (" + formatDescription + ") failed: " + t.getMessage(), t);
}
if (outputFormat instanceof InitializeOnMaster) {
((InitializeOnMaster) outputFormat).initializeGlobal(getParallelism());
}
}
use of org.apache.flink.runtime.operators.util.TaskConfig in project flink by apache.
the class BatchTask method initOutputs.
/**
* Creates a writer for each output. Creates an OutputCollector which forwards its input to all writers.
* The output collector applies the configured shipping strategy.
*/
@SuppressWarnings("unchecked")
public static <T> Collector<T> initOutputs(AbstractInvokable containingTask, ClassLoader cl, TaskConfig config, List<ChainedDriver<?, ?>> chainedTasksTarget, List<RecordWriter<?>> eventualOutputs, ExecutionConfig executionConfig, Map<String, Accumulator<?, ?>> accumulatorMap) throws Exception {
final int numOutputs = config.getNumOutputs();
// check whether we got any chained tasks
final int numChained = config.getNumberOfChainedStubs();
if (numChained > 0) {
// got chained stubs. that means that this one may only have a single forward connection
if (numOutputs != 1 || config.getOutputShipStrategy(0) != ShipStrategyType.FORWARD) {
throw new RuntimeException("Plan Generation Bug: Found a chained stub that is not connected via an only forward connection.");
}
// instantiate each task
@SuppressWarnings("rawtypes") Collector previous = null;
for (int i = numChained - 1; i >= 0; --i) {
// get the task first
final ChainedDriver<?, ?> ct;
try {
Class<? extends ChainedDriver<?, ?>> ctc = config.getChainedTask(i);
ct = ctc.newInstance();
} catch (Exception ex) {
throw new RuntimeException("Could not instantiate chained task driver.", ex);
}
// get the configuration for the task
final TaskConfig chainedStubConf = config.getChainedStubConfig(i);
final String taskName = config.getChainedTaskName(i);
if (i == numChained - 1) {
// last in chain, instantiate the output collector for this task
previous = getOutputCollector(containingTask, chainedStubConf, cl, eventualOutputs, 0, chainedStubConf.getNumOutputs());
}
ct.setup(chainedStubConf, taskName, previous, containingTask, cl, executionConfig, accumulatorMap);
chainedTasksTarget.add(0, ct);
if (i == numChained - 1) {
ct.getIOMetrics().reuseOutputMetricsForTask();
}
previous = ct;
}
// the collector of the first in the chain is the collector for the task
return (Collector<T>) previous;
}
// instantiate the output collector the default way from this configuration
return getOutputCollector(containingTask, config, cl, eventualOutputs, 0, numOutputs);
}
use of org.apache.flink.runtime.operators.util.TaskConfig in project flink by apache.
the class TaskTestBase method registerFileOutputTask.
public void registerFileOutputTask(AbstractInvokable outTask, FileOutputFormat<Record> outputFormat, String outPath) {
TaskConfig dsConfig = new TaskConfig(this.mockEnv.getTaskConfiguration());
outputFormat.setOutputFilePath(new Path(outPath));
outputFormat.setWriteMode(WriteMode.OVERWRITE);
dsConfig.setStubWrapper(new UserCodeObjectWrapper<>(outputFormat));
outTask.setEnvironment(this.mockEnv);
}
use of org.apache.flink.runtime.operators.util.TaskConfig in project flink by apache.
the class JobTaskVertexTest method testOutputFormatVertex.
@Test
public void testOutputFormatVertex() {
try {
final TestingOutputFormat outputFormat = new TestingOutputFormat();
final OutputFormatVertex of = new OutputFormatVertex("Name");
new TaskConfig(of.getConfiguration()).setStubWrapper(new UserCodeObjectWrapper<OutputFormat<?>>(outputFormat));
final ClassLoader cl = getClass().getClassLoader();
try {
of.initializeOnMaster(cl);
fail("Did not throw expected exception.");
} catch (TestException e) {
// all good
}
OutputFormatVertex copy = SerializationUtils.clone(of);
try {
copy.initializeOnMaster(cl);
fail("Did not throw expected exception.");
} catch (TestException e) {
// all good
}
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.apache.flink.runtime.operators.util.TaskConfig in project flink by apache.
the class JobTaskVertexTest method testInputFormatVertex.
@Test
public void testInputFormatVertex() {
try {
final TestInputFormat inputFormat = new TestInputFormat();
final InputFormatVertex vertex = new InputFormatVertex("Name");
new TaskConfig(vertex.getConfiguration()).setStubWrapper(new UserCodeObjectWrapper<InputFormat<?, ?>>(inputFormat));
final ClassLoader cl = getClass().getClassLoader();
vertex.initializeOnMaster(cl);
InputSplit[] splits = vertex.getInputSplitSource().createInputSplits(77);
assertNotNull(splits);
assertEquals(1, splits.length);
assertEquals(TestSplit.class, splits[0].getClass());
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
Aggregations