use of org.apache.flink.runtime.operators.util.TaskConfig in project flink by apache.
the class CoGroupWithSolutionSetFirstDriver method initialize.
// --------------------------------------------------------------------------------------------
@Override
@SuppressWarnings("unchecked")
public void initialize() {
final TypeComparator<IT1> solutionSetComparator;
// grab a handle to the hash table from the iteration broker
if (taskContext instanceof AbstractIterativeTask) {
AbstractIterativeTask<?, ?> iterativeTaskContext = (AbstractIterativeTask<?, ?>) taskContext;
String identifier = iterativeTaskContext.brokerKey();
Object table = SolutionSetBroker.instance().get(identifier);
if (table instanceof CompactingHashTable) {
this.hashTable = (CompactingHashTable<IT1>) table;
solutionSetSerializer = this.hashTable.getBuildSideSerializer();
solutionSetComparator = this.hashTable.getBuildSideComparator().duplicate();
} else if (table instanceof JoinHashMap) {
this.objectMap = (JoinHashMap<IT1>) table;
solutionSetSerializer = this.objectMap.getBuildSerializer();
solutionSetComparator = this.objectMap.getBuildComparator().duplicate();
} else {
throw new RuntimeException("Unrecognized solution set index: " + table);
}
} else {
throw new RuntimeException("The task context of this driver is no iterative task context.");
}
TaskConfig config = taskContext.getTaskConfig();
ClassLoader classLoader = taskContext.getUserCodeClassLoader();
TypeComparatorFactory<IT2> probeSideComparatorFactory = config.getDriverComparator(0, classLoader);
this.probeSideSerializer = taskContext.<IT2>getInputSerializer(0).getSerializer();
this.probeSideComparator = probeSideComparatorFactory.createComparator();
ExecutionConfig executionConfig = taskContext.getExecutionConfig();
objectReuseEnabled = executionConfig.isObjectReuseEnabled();
if (objectReuseEnabled) {
solutionSideRecord = solutionSetSerializer.createInstance();
}
TypePairComparatorFactory<IT1, IT2> factory = taskContext.getTaskConfig().getPairComparatorFactory(taskContext.getUserCodeClassLoader());
pairComparator = factory.createComparator21(solutionSetComparator, this.probeSideComparator);
}
use of org.apache.flink.runtime.operators.util.TaskConfig in project flink by apache.
the class DataSourceTask method initInputFormat.
/**
* Initializes the InputFormat implementation and configuration.
*
* @throws RuntimeException Throws if instance of InputFormat implementation can not be
* obtained.
*/
private void initInputFormat() {
ClassLoader userCodeClassLoader = getUserCodeClassLoader();
// obtain task configuration (including stub parameters)
Configuration taskConf = getTaskConfiguration();
this.config = new TaskConfig(taskConf);
final Pair<OperatorID, InputFormat<OT, InputSplit>> operatorIdAndInputFormat;
InputOutputFormatContainer formatContainer = new InputOutputFormatContainer(config, userCodeClassLoader);
try {
operatorIdAndInputFormat = formatContainer.getUniqueInputFormat();
this.format = operatorIdAndInputFormat.getValue();
// check if the class is a subclass, if the check is required
if (!InputFormat.class.isAssignableFrom(this.format.getClass())) {
throw new RuntimeException("The class '" + this.format.getClass().getName() + "' is not a subclass of '" + InputFormat.class.getName() + "' as is required.");
}
} catch (ClassCastException ccex) {
throw new RuntimeException("The stub class is not a proper subclass of " + InputFormat.class.getName(), ccex);
}
Thread thread = Thread.currentThread();
ClassLoader original = thread.getContextClassLoader();
// user code
try {
thread.setContextClassLoader(userCodeClassLoader);
this.format.configure(formatContainer.getParameters(operatorIdAndInputFormat.getKey()));
} catch (Throwable t) {
throw new RuntimeException("The user defined 'configure()' method caused an error: " + t.getMessage(), t);
} finally {
thread.setContextClassLoader(original);
}
// get the factory for the type serializer
this.serializerFactory = this.config.getOutputSerializer(userCodeClassLoader);
}
use of org.apache.flink.runtime.operators.util.TaskConfig in project flink by apache.
the class JoinWithSolutionSetFirstDriver method initialize.
// --------------------------------------------------------------------------------------------
@Override
@SuppressWarnings("unchecked")
public void initialize() {
final TypeSerializer<IT1> solutionSetSerializer;
final TypeComparator<IT1> solutionSetComparator;
// grab a handle to the hash table from the iteration broker
if (taskContext instanceof AbstractIterativeTask) {
AbstractIterativeTask<?, ?> iterativeTaskContext = (AbstractIterativeTask<?, ?>) taskContext;
String identifier = iterativeTaskContext.brokerKey();
Object table = SolutionSetBroker.instance().get(identifier);
if (table instanceof CompactingHashTable) {
this.hashTable = (CompactingHashTable<IT1>) table;
solutionSetSerializer = this.hashTable.getBuildSideSerializer();
solutionSetComparator = this.hashTable.getBuildSideComparator().duplicate();
} else if (table instanceof JoinHashMap) {
this.objectMap = (JoinHashMap<IT1>) table;
solutionSetSerializer = this.objectMap.getBuildSerializer();
solutionSetComparator = this.objectMap.getBuildComparator().duplicate();
} else {
throw new RuntimeException("Unrecognized solution set index: " + table);
}
} else {
throw new RuntimeException("The task context of this driver is no iterative task context.");
}
TaskConfig config = taskContext.getTaskConfig();
ClassLoader classLoader = taskContext.getUserCodeClassLoader();
TypeSerializer<IT2> probeSideSerializer = taskContext.<IT2>getInputSerializer(0).getSerializer();
TypeComparatorFactory<IT2> probeSideComparatorFactory = config.getDriverComparator(0, classLoader);
this.probeSideComparator = probeSideComparatorFactory.createComparator();
ExecutionConfig executionConfig = taskContext.getExecutionConfig();
objectReuseEnabled = executionConfig.isObjectReuseEnabled();
if (objectReuseEnabled) {
solutionSideRecord = solutionSetSerializer.createInstance();
probeSideRecord = probeSideSerializer.createInstance();
}
TypePairComparatorFactory<IT1, IT2> factory = taskContext.getTaskConfig().getPairComparatorFactory(taskContext.getUserCodeClassLoader());
pairComparator = factory.createComparator21(solutionSetComparator, this.probeSideComparator);
}
use of org.apache.flink.runtime.operators.util.TaskConfig in project flink by apache.
the class AllGroupReduceDriver method prepare.
// --------------------------------------------------------------------------------------------
@Override
public void prepare() throws Exception {
final TaskConfig config = this.taskContext.getTaskConfig();
this.strategy = config.getDriverStrategy();
switch(this.strategy) {
case ALL_GROUP_REDUCE_COMBINE:
if (!(this.taskContext.getStub() instanceof GroupCombineFunction)) {
throw new Exception("Using combiner on a UDF that does not implement the combiner interface " + GroupCombineFunction.class.getName());
}
case ALL_GROUP_REDUCE:
case ALL_GROUP_COMBINE:
break;
default:
throw new Exception("Unrecognized driver strategy for AllGroupReduce driver: " + this.strategy.name());
}
this.serializer = this.taskContext.<IT>getInputSerializer(0).getSerializer();
this.input = this.taskContext.getInput(0);
ExecutionConfig executionConfig = taskContext.getExecutionConfig();
this.objectReuseEnabled = executionConfig.isObjectReuseEnabled();
if (LOG.isDebugEnabled()) {
LOG.debug("AllGroupReduceDriver object reuse: " + (this.objectReuseEnabled ? "ENABLED" : "DISABLED") + ".");
}
}
use of org.apache.flink.runtime.operators.util.TaskConfig in project flink by apache.
the class BatchTask method invoke.
// --------------------------------------------------------------------------------------------
// Task Interface
// --------------------------------------------------------------------------------------------
/**
* The main work method.
*/
@Override
public void invoke() throws Exception {
// --------------------------------------------------------------------
if (LOG.isDebugEnabled()) {
LOG.debug(formatLogString("Start registering input and output."));
}
// obtain task configuration (including stub parameters)
Configuration taskConf = getTaskConfiguration();
this.config = new TaskConfig(taskConf);
// now get the operator class which drives the operation
final Class<? extends Driver<S, OT>> driverClass = this.config.getDriver();
this.driver = InstantiationUtil.instantiate(driverClass, Driver.class);
String headName = getEnvironment().getTaskInfo().getTaskName().split("->")[0].trim();
this.metrics = getEnvironment().getMetricGroup().getOrAddOperator(headName.startsWith("CHAIN") ? headName.substring(6) : headName);
this.metrics.getIOMetricGroup().reuseInputMetricsForTask();
if (config.getNumberOfChainedStubs() == 0) {
this.metrics.getIOMetricGroup().reuseOutputMetricsForTask();
}
// initialize the readers.
// this does not yet trigger any stream consuming or processing.
initInputReaders();
initBroadcastInputReaders();
// initialize the writers.
initOutputs();
if (LOG.isDebugEnabled()) {
LOG.debug(formatLogString("Finished registering input and output."));
}
// --------------------------------------------------------------------
if (LOG.isDebugEnabled()) {
LOG.debug(formatLogString("Start task code."));
}
this.runtimeUdfContext = createRuntimeContext(metrics);
// canceling).
try {
// the local processing includes building the dams / caches
try {
int numInputs = driver.getNumberOfInputs();
int numComparators = driver.getNumberOfDriverComparators();
int numBroadcastInputs = this.config.getNumBroadcastInputs();
initInputsSerializersAndComparators(numInputs, numComparators);
initBroadcastInputsSerializers(numBroadcastInputs);
// set the iterative status for inputs and broadcast inputs
{
List<Integer> iterativeInputs = new ArrayList<>();
for (int i = 0; i < numInputs; i++) {
final int numberOfEventsUntilInterrupt = getTaskConfig().getNumberOfEventsUntilInterruptInIterativeGate(i);
if (numberOfEventsUntilInterrupt < 0) {
throw new IllegalArgumentException();
} else if (numberOfEventsUntilInterrupt > 0) {
this.inputReaders[i].setIterativeReader();
iterativeInputs.add(i);
if (LOG.isDebugEnabled()) {
LOG.debug(formatLogString("Input [" + i + "] reads in supersteps with [" + numberOfEventsUntilInterrupt + "] event(s) till next superstep."));
}
}
}
this.iterativeInputs = asArray(iterativeInputs);
}
{
List<Integer> iterativeBcInputs = new ArrayList<>();
for (int i = 0; i < numBroadcastInputs; i++) {
final int numberOfEventsUntilInterrupt = getTaskConfig().getNumberOfEventsUntilInterruptInIterativeBroadcastGate(i);
if (numberOfEventsUntilInterrupt < 0) {
throw new IllegalArgumentException();
} else if (numberOfEventsUntilInterrupt > 0) {
this.broadcastInputReaders[i].setIterativeReader();
iterativeBcInputs.add(i);
if (LOG.isDebugEnabled()) {
LOG.debug(formatLogString("Broadcast input [" + i + "] reads in supersteps with [" + numberOfEventsUntilInterrupt + "] event(s) till next superstep."));
}
}
}
this.iterativeBroadcastInputs = asArray(iterativeBcInputs);
}
initLocalStrategies(numInputs);
} catch (Exception e) {
throw new RuntimeException("Initializing the input processing failed" + (e.getMessage() == null ? "." : ": " + e.getMessage()), e);
}
if (!this.running) {
if (LOG.isDebugEnabled()) {
LOG.debug(formatLogString("Task cancelled before task code was started."));
}
return;
}
// pre main-function initialization
initialize();
// read the broadcast variables. they will be released in the finally clause
for (int i = 0; i < this.config.getNumBroadcastInputs(); i++) {
final String name = this.config.getBroadcastInputName(i);
readAndSetBroadcastInput(i, name, this.runtimeUdfContext, 1);
}
// the work goes here
run();
} finally {
// clean up in any case!
closeLocalStrategiesAndCaches();
clearReaders(inputReaders);
clearWriters(eventualOutputs);
}
if (this.running) {
if (LOG.isDebugEnabled()) {
LOG.debug(formatLogString("Finished task code."));
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug(formatLogString("Task code cancelled."));
}
}
}
Aggregations