Search in sources :

Example 1 with TaskConfig

use of org.apache.flink.runtime.operators.util.TaskConfig in project flink by apache.

the class OutputFormatVertex method initializeOnMaster.

@Override
public void initializeOnMaster(ClassLoader loader) throws Exception {
    final TaskConfig cfg = new TaskConfig(getConfiguration());
    UserCodeWrapper<OutputFormat<?>> wrapper;
    try {
        wrapper = cfg.<OutputFormat<?>>getStubWrapper(loader);
    } catch (Throwable t) {
        throw new Exception("Deserializing the OutputFormat (" + formatDescription + ") failed: " + t.getMessage(), t);
    }
    if (wrapper == null) {
        throw new Exception("No input format present in InputFormatVertex's task configuration.");
    }
    OutputFormat<?> outputFormat;
    try {
        outputFormat = wrapper.getUserCodeObject(OutputFormat.class, loader);
    } catch (Throwable t) {
        throw new Exception("Instantiating the OutputFormat (" + formatDescription + ") failed: " + t.getMessage(), t);
    }
    try {
        outputFormat.configure(cfg.getStubParameters());
    } catch (Throwable t) {
        throw new Exception("Configuring the OutputFormat (" + formatDescription + ") failed: " + t.getMessage(), t);
    }
    if (outputFormat instanceof InitializeOnMaster) {
        ((InitializeOnMaster) outputFormat).initializeGlobal(getParallelism());
    }
}
Also used : OutputFormat(org.apache.flink.api.common.io.OutputFormat) TaskConfig(org.apache.flink.runtime.operators.util.TaskConfig) InitializeOnMaster(org.apache.flink.api.common.io.InitializeOnMaster)

Example 2 with TaskConfig

use of org.apache.flink.runtime.operators.util.TaskConfig in project flink by apache.

the class BatchTask method initOutputs.

/**
	 * Creates a writer for each output. Creates an OutputCollector which forwards its input to all writers.
	 * The output collector applies the configured shipping strategy.
	 */
@SuppressWarnings("unchecked")
public static <T> Collector<T> initOutputs(AbstractInvokable containingTask, ClassLoader cl, TaskConfig config, List<ChainedDriver<?, ?>> chainedTasksTarget, List<RecordWriter<?>> eventualOutputs, ExecutionConfig executionConfig, Map<String, Accumulator<?, ?>> accumulatorMap) throws Exception {
    final int numOutputs = config.getNumOutputs();
    // check whether we got any chained tasks
    final int numChained = config.getNumberOfChainedStubs();
    if (numChained > 0) {
        // got chained stubs. that means that this one may only have a single forward connection
        if (numOutputs != 1 || config.getOutputShipStrategy(0) != ShipStrategyType.FORWARD) {
            throw new RuntimeException("Plan Generation Bug: Found a chained stub that is not connected via an only forward connection.");
        }
        // instantiate each task
        @SuppressWarnings("rawtypes") Collector previous = null;
        for (int i = numChained - 1; i >= 0; --i) {
            // get the task first
            final ChainedDriver<?, ?> ct;
            try {
                Class<? extends ChainedDriver<?, ?>> ctc = config.getChainedTask(i);
                ct = ctc.newInstance();
            } catch (Exception ex) {
                throw new RuntimeException("Could not instantiate chained task driver.", ex);
            }
            // get the configuration for the task
            final TaskConfig chainedStubConf = config.getChainedStubConfig(i);
            final String taskName = config.getChainedTaskName(i);
            if (i == numChained - 1) {
                // last in chain, instantiate the output collector for this task
                previous = getOutputCollector(containingTask, chainedStubConf, cl, eventualOutputs, 0, chainedStubConf.getNumOutputs());
            }
            ct.setup(chainedStubConf, taskName, previous, containingTask, cl, executionConfig, accumulatorMap);
            chainedTasksTarget.add(0, ct);
            if (i == numChained - 1) {
                ct.getIOMetrics().reuseOutputMetricsForTask();
            }
            previous = ct;
        }
        // the collector of the first in the chain is the collector for the task
        return (Collector<T>) previous;
    }
    // instantiate the output collector the default way from this configuration
    return getOutputCollector(containingTask, config, cl, eventualOutputs, 0, numOutputs);
}
Also used : OutputCollector(org.apache.flink.runtime.operators.shipping.OutputCollector) Collector(org.apache.flink.util.Collector) TaskConfig(org.apache.flink.runtime.operators.util.TaskConfig) ExceptionInChainedStubException(org.apache.flink.runtime.operators.chaining.ExceptionInChainedStubException) CancelTaskException(org.apache.flink.runtime.execution.CancelTaskException) IOException(java.io.IOException)

Example 3 with TaskConfig

use of org.apache.flink.runtime.operators.util.TaskConfig in project flink by apache.

the class TaskTestBase method registerFileOutputTask.

public void registerFileOutputTask(AbstractInvokable outTask, FileOutputFormat<Record> outputFormat, String outPath) {
    TaskConfig dsConfig = new TaskConfig(this.mockEnv.getTaskConfiguration());
    outputFormat.setOutputFilePath(new Path(outPath));
    outputFormat.setWriteMode(WriteMode.OVERWRITE);
    dsConfig.setStubWrapper(new UserCodeObjectWrapper<>(outputFormat));
    outTask.setEnvironment(this.mockEnv);
}
Also used : Path(org.apache.flink.core.fs.Path) TaskConfig(org.apache.flink.runtime.operators.util.TaskConfig)

Example 4 with TaskConfig

use of org.apache.flink.runtime.operators.util.TaskConfig in project flink by apache.

the class JobTaskVertexTest method testOutputFormatVertex.

@Test
public void testOutputFormatVertex() {
    try {
        final TestingOutputFormat outputFormat = new TestingOutputFormat();
        final OutputFormatVertex of = new OutputFormatVertex("Name");
        new TaskConfig(of.getConfiguration()).setStubWrapper(new UserCodeObjectWrapper<OutputFormat<?>>(outputFormat));
        final ClassLoader cl = getClass().getClassLoader();
        try {
            of.initializeOnMaster(cl);
            fail("Did not throw expected exception.");
        } catch (TestException e) {
        // all good
        }
        OutputFormatVertex copy = SerializationUtils.clone(of);
        try {
            copy.initializeOnMaster(cl);
            fail("Did not throw expected exception.");
        } catch (TestException e) {
        // all good
        }
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : DiscardingOutputFormat(org.apache.flink.api.java.io.DiscardingOutputFormat) OutputFormat(org.apache.flink.api.common.io.OutputFormat) TaskConfig(org.apache.flink.runtime.operators.util.TaskConfig) IOException(java.io.IOException) Test(org.junit.Test)

Example 5 with TaskConfig

use of org.apache.flink.runtime.operators.util.TaskConfig in project flink by apache.

the class JobTaskVertexTest method testInputFormatVertex.

@Test
public void testInputFormatVertex() {
    try {
        final TestInputFormat inputFormat = new TestInputFormat();
        final InputFormatVertex vertex = new InputFormatVertex("Name");
        new TaskConfig(vertex.getConfiguration()).setStubWrapper(new UserCodeObjectWrapper<InputFormat<?, ?>>(inputFormat));
        final ClassLoader cl = getClass().getClassLoader();
        vertex.initializeOnMaster(cl);
        InputSplit[] splits = vertex.getInputSplitSource().createInputSplits(77);
        assertNotNull(splits);
        assertEquals(1, splits.length);
        assertEquals(TestSplit.class, splits[0].getClass());
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : GenericInputFormat(org.apache.flink.api.common.io.GenericInputFormat) InputFormat(org.apache.flink.api.common.io.InputFormat) TaskConfig(org.apache.flink.runtime.operators.util.TaskConfig) GenericInputSplit(org.apache.flink.core.io.GenericInputSplit) InputSplit(org.apache.flink.core.io.InputSplit) IOException(java.io.IOException) Test(org.junit.Test)

Aggregations

TaskConfig (org.apache.flink.runtime.operators.util.TaskConfig)61 IOException (java.io.IOException)17 Configuration (org.apache.flink.configuration.Configuration)16 JobVertex (org.apache.flink.runtime.jobgraph.JobVertex)14 Test (org.junit.Test)14 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)13 BulkIterationPlanNode (org.apache.flink.optimizer.plan.BulkIterationPlanNode)9 BulkPartialSolutionPlanNode (org.apache.flink.optimizer.plan.BulkPartialSolutionPlanNode)9 DualInputPlanNode (org.apache.flink.optimizer.plan.DualInputPlanNode)9 IterationPlanNode (org.apache.flink.optimizer.plan.IterationPlanNode)9 NAryUnionPlanNode (org.apache.flink.optimizer.plan.NAryUnionPlanNode)9 PlanNode (org.apache.flink.optimizer.plan.PlanNode)9 SingleInputPlanNode (org.apache.flink.optimizer.plan.SingleInputPlanNode)9 SinkPlanNode (org.apache.flink.optimizer.plan.SinkPlanNode)9 SolutionSetPlanNode (org.apache.flink.optimizer.plan.SolutionSetPlanNode)9 SourcePlanNode (org.apache.flink.optimizer.plan.SourcePlanNode)9 WorksetIterationPlanNode (org.apache.flink.optimizer.plan.WorksetIterationPlanNode)9 WorksetPlanNode (org.apache.flink.optimizer.plan.WorksetPlanNode)9 CompilerException (org.apache.flink.optimizer.CompilerException)8 OperatorID (org.apache.flink.runtime.jobgraph.OperatorID)8