use of org.apache.flink.api.common.InvalidProgramException in project flink by apache.
the class CoGroupOperator method translateToDataFlow.
@Override
@Internal
protected org.apache.flink.api.common.operators.base.CoGroupOperatorBase<?, ?, OUT, ?> translateToDataFlow(Operator<I1> input1, Operator<I2> input2) {
String name = getName() != null ? getName() : "CoGroup at " + defaultName;
try {
keys1.areCompatible(keys2);
} catch (IncompatibleKeysException e) {
throw new InvalidProgramException("The types of the key fields do not match.", e);
}
final org.apache.flink.api.common.operators.base.CoGroupOperatorBase<?, ?, OUT, ?> po;
if (keys1 instanceof SelectorFunctionKeys && keys2 instanceof SelectorFunctionKeys) {
@SuppressWarnings("unchecked") SelectorFunctionKeys<I1, ?> selectorKeys1 = (SelectorFunctionKeys<I1, ?>) keys1;
@SuppressWarnings("unchecked") SelectorFunctionKeys<I2, ?> selectorKeys2 = (SelectorFunctionKeys<I2, ?>) keys2;
po = translateSelectorFunctionCoGroup(selectorKeys1, selectorKeys2, function, getResultType(), name, input1, input2);
po.setParallelism(getParallelism());
po.setCustomPartitioner(customPartitioner);
} else if (keys2 instanceof SelectorFunctionKeys) {
int[] logicalKeyPositions1 = keys1.computeLogicalKeyPositions();
@SuppressWarnings("unchecked") SelectorFunctionKeys<I2, ?> selectorKeys2 = (SelectorFunctionKeys<I2, ?>) keys2;
po = translateSelectorFunctionCoGroupRight(logicalKeyPositions1, selectorKeys2, function, getInput1Type(), getResultType(), name, input1, input2);
po.setParallelism(getParallelism());
po.setCustomPartitioner(customPartitioner);
} else if (keys1 instanceof SelectorFunctionKeys) {
@SuppressWarnings("unchecked") SelectorFunctionKeys<I1, ?> selectorKeys1 = (SelectorFunctionKeys<I1, ?>) keys1;
int[] logicalKeyPositions2 = keys2.computeLogicalKeyPositions();
po = translateSelectorFunctionCoGroupLeft(selectorKeys1, logicalKeyPositions2, function, getInput2Type(), getResultType(), name, input1, input2);
} else if (keys1 instanceof Keys.ExpressionKeys && keys2 instanceof Keys.ExpressionKeys) {
try {
keys1.areCompatible(keys2);
} catch (IncompatibleKeysException e) {
throw new InvalidProgramException("The types of the key fields do not match.", e);
}
int[] logicalKeyPositions1 = keys1.computeLogicalKeyPositions();
int[] logicalKeyPositions2 = keys2.computeLogicalKeyPositions();
CoGroupOperatorBase<I1, I2, OUT, CoGroupFunction<I1, I2, OUT>> op = new CoGroupOperatorBase<>(function, new BinaryOperatorInformation<>(getInput1Type(), getInput2Type(), getResultType()), logicalKeyPositions1, logicalKeyPositions2, name);
op.setFirstInput(input1);
op.setSecondInput(input2);
po = op;
} else {
throw new UnsupportedOperationException("Unrecognized or incompatible key types.");
}
// configure shared characteristics
po.setParallelism(getParallelism());
po.setCustomPartitioner(customPartitioner);
if (groupSortKeyOrderFirst.size() > 0) {
Ordering o = new Ordering();
for (Pair<Integer, Order> entry : groupSortKeyOrderFirst) {
o.appendOrdering(entry.getLeft(), null, entry.getRight());
}
po.setGroupOrderForInputOne(o);
}
if (groupSortKeyOrderSecond.size() > 0) {
Ordering o = new Ordering();
for (Pair<Integer, Order> entry : groupSortKeyOrderSecond) {
o.appendOrdering(entry.getLeft(), null, entry.getRight());
}
po.setGroupOrderForInputTwo(o);
}
return po;
}
use of org.apache.flink.api.common.InvalidProgramException in project flink by apache.
the class LocalStreamEnvironment method validateAndGetConfiguration.
private static Configuration validateAndGetConfiguration(final Configuration configuration) {
if (!areExplicitEnvironmentsAllowed()) {
throw new InvalidProgramException("The LocalStreamEnvironment cannot be used when submitting a program through a client, " + "or running in a TestEnvironment context.");
}
final Configuration effectiveConfiguration = new Configuration(checkNotNull(configuration));
effectiveConfiguration.set(DeploymentOptions.TARGET, "local");
effectiveConfiguration.set(DeploymentOptions.ATTACHED, true);
return effectiveConfiguration;
}
use of org.apache.flink.api.common.InvalidProgramException in project flink by apache.
the class CompileUtils method doCompile.
private static <T> Class<T> doCompile(ClassLoader cl, String name, String code) {
checkNotNull(cl, "Classloader must not be null.");
CODE_LOG.debug("Compiling: {} \n\n Code:\n{}", name, code);
SimpleCompiler compiler = new SimpleCompiler();
compiler.setParentClassLoader(cl);
try {
compiler.cook(code);
} catch (Throwable t) {
System.out.println(addLineNumber(code));
throw new InvalidProgramException("Table program cannot be compiled. This is a bug. Please file an issue.", t);
}
try {
// noinspection unchecked
return (Class<T>) compiler.getClassLoader().loadClass(name);
} catch (ClassNotFoundException e) {
throw new RuntimeException("Can not load class " + name, e);
}
}
use of org.apache.flink.api.common.InvalidProgramException in project flink by apache.
the class CompileUtils method compileExpression.
/**
* Compiles an expression code to a janino {@link ExpressionEvaluator}.
*
* @param code the expression code
* @param argumentNames the expression argument names
* @param argumentClasses the expression argument classes
* @param returnClass the return type of the expression
* @return the compiled class
*/
public static ExpressionEvaluator compileExpression(String code, List<String> argumentNames, List<Class<?>> argumentClasses, Class<?> returnClass) {
try {
ExpressionEntry key = new ExpressionEntry(code, argumentNames, argumentClasses, returnClass);
return COMPILED_EXPRESSION_CACHE.get(key, () -> {
ExpressionEvaluator expressionEvaluator = new ExpressionEvaluator();
// Input args
expressionEvaluator.setParameters(argumentNames.toArray(new String[0]), argumentClasses.toArray(new Class[0]));
// Result type
expressionEvaluator.setExpressionType(returnClass);
try {
// Compile
expressionEvaluator.cook(code);
} catch (CompileException e) {
throw new InvalidProgramException("Table program cannot be compiled. This is a bug. Please file an issue.\nExpression: " + code, e);
}
return expressionEvaluator;
});
} catch (Exception e) {
throw new FlinkRuntimeException(e.getMessage(), e);
}
}
use of org.apache.flink.api.common.InvalidProgramException in project flink by apache.
the class JoinITCase method testDefaultJoinOnTwoCustomTypeInputsWithInnerClassKeyExtractorsDisabledClosureCleaner.
@Test
public void testDefaultJoinOnTwoCustomTypeInputsWithInnerClassKeyExtractorsDisabledClosureCleaner() throws Exception {
/*
* (Default) Join on two custom type inputs with key extractors, check if disabling closure cleaning works
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.getConfig().disableClosureCleaner();
DataSet<CustomType> ds1 = CollectionDataSets.getCustomTypeDataSet(env);
DataSet<CustomType> ds2 = CollectionDataSets.getSmallCustomTypeDataSet(env);
boolean correctExceptionTriggered = false;
try {
DataSet<Tuple2<CustomType, CustomType>> joinDs = ds1.join(ds2).where(new KeySelector<CustomType, Integer>() {
@Override
public Integer getKey(CustomType value) {
return value.myInt;
}
}).equalTo(new KeySelector<CustomType, Integer>() {
@Override
public Integer getKey(CustomType value) throws Exception {
return value.myInt;
}
});
} catch (InvalidProgramException ex) {
correctExceptionTriggered = (ex.getCause() instanceof java.io.NotSerializableException);
}
Assert.assertTrue(correctExceptionTriggered);
}
Aggregations