use of org.apache.flink.api.common.operators.Keys.ExpressionKeys in project flink by apache.
the class UdfAnalyzer method analyze.
public boolean analyze() throws CodeAnalyzerException {
if (state == STATE_END_OF_ANALYZING) {
throw new IllegalStateException("Analyzing is already done.");
}
boolean discardReturnValues = false;
if (isIterableInput) {
if (keys1 == null || (keys2 == null && isBinary)) {
throw new IllegalArgumentException("This type of function requires key information for analysis.");
} else if (!(keys1 instanceof ExpressionKeys) || (!(keys2 instanceof ExpressionKeys) && isBinary)) {
// TODO currently only ExpressionKeys are supported as keys
discardReturnValues = true;
}
}
try {
final Object[] mn = findMethodNode(internalUdfClassName, baseClassMethod);
final NestedMethodAnalyzer nma = new NestedMethodAnalyzer(this, (String) mn[1], (MethodNode) mn[0], null, MAX_NESTING, true);
final TaggedValue result = nma.analyze();
setState(STATE_END_OF_ANALYZING);
// special case: FilterFunction
if (isFilterFunction) {
discardReturnValues = true;
// check for input modification
if (!filterInputCopy.equals(filterInputRef)) {
addHintOrThrowException("Function modifies the input. This can lead to unexpected behaviour during runtime.");
}
}
if (!discardReturnValues) {
// merge return values of a collector
if (hasCollector) {
returnValue = mergeReturnValues(collectorValues);
} else {
returnValue = result;
}
// or is a reduce function
if ((isIterableInput || isReduceFunction) && returnValue != null) {
if (returnValue.canContainFields()) {
removeUngroupedInputsFromContainer(returnValue);
} else if (returnValue.isInput() && !returnValue.isGrouped()) {
returnValue = null;
}
}
} else // any return value is invalid
{
returnValue = null;
}
} catch (Exception e) {
Throwable cause = e.getCause();
while (cause != null && !(cause instanceof CodeErrorException)) {
cause = cause.getCause();
}
if ((cause != null && cause instanceof CodeErrorException) || e instanceof CodeErrorException) {
throw new CodeErrorException("Function code contains obvious errors. " + "If you think the code analysis is wrong at this point you can " + "disable the entire code analyzer in ExecutionConfig or add" + " @SkipCodeAnalysis to your function to disable the analysis.", (cause != null) ? cause : e);
}
throw new CodeAnalyzerException("Exception occurred during code analysis.", e);
}
return true;
}
use of org.apache.flink.api.common.operators.Keys.ExpressionKeys in project flink by apache.
the class ExpressionKeysTest method testAreCompatible8.
@Test
public void testAreCompatible8() throws Keys.IncompatibleKeysException {
TypeInformation<String> t1 = BasicTypeInfo.STRING_TYPE_INFO;
TypeInformation<Pojo2> t2 = TypeExtractor.getForClass(Pojo2.class);
ExpressionKeys<String> ek1 = new ExpressionKeys<>("*", t1);
Keys<Pojo2> ek2 = new Keys.SelectorFunctionKeys<>(new KeySelector1(), t2, BasicTypeInfo.STRING_TYPE_INFO);
Assert.assertTrue(ek1.areCompatible(ek2));
}
use of org.apache.flink.api.common.operators.Keys.ExpressionKeys in project flink by apache.
the class ExpressionKeysTest method testAreCompatible9.
@Test
public void testAreCompatible9() throws Keys.IncompatibleKeysException {
TypeInformation<Tuple3<String, Long, Integer>> t1 = new TupleTypeInfo<>(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO);
TypeInformation<PojoWithMultiplePojos> t2 = TypeExtractor.getForClass(PojoWithMultiplePojos.class);
ExpressionKeys<Tuple3<String, Long, Integer>> ek1 = new ExpressionKeys<>(new int[] { 2, 0 }, t1);
Keys<PojoWithMultiplePojos> ek2 = new Keys.SelectorFunctionKeys<>(new KeySelector3(), t2, new TupleTypeInfo<Tuple2<Integer, String>>(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO));
Assert.assertTrue(ek1.areCompatible(ek2));
}
use of org.apache.flink.api.common.operators.Keys.ExpressionKeys in project flink by apache.
the class PojoSubclassComparatorTest method createComparator.
@Override
protected TypeComparator<PojoContainingTuple> createComparator(boolean ascending) {
Assert.assertTrue(type instanceof CompositeType);
CompositeType<PojoContainingTuple> cType = (CompositeType<PojoContainingTuple>) type;
ExpressionKeys<PojoContainingTuple> keys = new ExpressionKeys<PojoContainingTuple>(new String[] { "theTuple.*" }, cType);
boolean[] orders = new boolean[keys.getNumberOfKeyFields()];
Arrays.fill(orders, ascending);
return cType.createComparator(keys.computeLogicalKeyPositions(), orders, 0, new ExecutionConfig());
}
use of org.apache.flink.api.common.operators.Keys.ExpressionKeys in project flink by apache.
the class GroupReduceOperator method translateToDataFlow.
// --------------------------------------------------------------------------------------------
// Translation
// --------------------------------------------------------------------------------------------
@Override
@SuppressWarnings("unchecked")
protected GroupReduceOperatorBase<?, OUT, ?> translateToDataFlow(Operator<IN> input) {
String name = getName() != null ? getName() : "GroupReduce at " + defaultName;
// wrap CombineFunction in GroupCombineFunction if combinable
if (combinable && function instanceof CombineFunction<?, ?>) {
this.function = function instanceof RichGroupReduceFunction<?, ?> ? new RichCombineToGroupCombineWrapper((RichGroupReduceFunction<?, ?>) function) : new CombineToGroupCombineWrapper((CombineFunction<?, ?>) function);
}
// distinguish between grouped reduce and non-grouped reduce
if (grouper == null) {
// non grouped reduce
UnaryOperatorInformation<IN, OUT> operatorInfo = new UnaryOperatorInformation<>(getInputType(), getResultType());
GroupReduceOperatorBase<IN, OUT, GroupReduceFunction<IN, OUT>> po = new GroupReduceOperatorBase<>(function, operatorInfo, new int[0], name);
po.setCombinable(combinable);
po.setInput(input);
// the parallelism for a non grouped reduce can only be 1
po.setParallelism(1);
return po;
}
if (grouper.getKeys() instanceof SelectorFunctionKeys) {
@SuppressWarnings("unchecked") SelectorFunctionKeys<IN, ?> selectorKeys = (SelectorFunctionKeys<IN, ?>) grouper.getKeys();
if (grouper instanceof SortedGrouping) {
SortedGrouping<IN> sortedGrouping = (SortedGrouping<IN>) grouper;
SelectorFunctionKeys<IN, ?> sortKeys = sortedGrouping.getSortSelectionFunctionKey();
Ordering groupOrder = sortedGrouping.getGroupOrdering();
PlanUnwrappingSortedReduceGroupOperator<IN, OUT, ?, ?> po = translateSelectorFunctionSortedReducer(selectorKeys, sortKeys, groupOrder, function, getResultType(), name, input, isCombinable());
po.setParallelism(this.getParallelism());
po.setCustomPartitioner(grouper.getCustomPartitioner());
return po;
} else {
PlanUnwrappingReduceGroupOperator<IN, OUT, ?> po = translateSelectorFunctionReducer(selectorKeys, function, getResultType(), name, input, isCombinable());
po.setParallelism(this.getParallelism());
po.setCustomPartitioner(grouper.getCustomPartitioner());
return po;
}
} else if (grouper.getKeys() instanceof ExpressionKeys) {
int[] logicalKeyPositions = grouper.getKeys().computeLogicalKeyPositions();
UnaryOperatorInformation<IN, OUT> operatorInfo = new UnaryOperatorInformation<>(getInputType(), getResultType());
GroupReduceOperatorBase<IN, OUT, GroupReduceFunction<IN, OUT>> po = new GroupReduceOperatorBase<>(function, operatorInfo, logicalKeyPositions, name);
po.setCombinable(combinable);
po.setInput(input);
po.setParallelism(getParallelism());
po.setCustomPartitioner(grouper.getCustomPartitioner());
// set group order
if (grouper instanceof SortedGrouping) {
SortedGrouping<IN> sortedGrouper = (SortedGrouping<IN>) grouper;
int[] sortKeyPositions = sortedGrouper.getGroupSortKeyPositions();
Order[] sortOrders = sortedGrouper.getGroupSortOrders();
Ordering o = new Ordering();
for (int i = 0; i < sortKeyPositions.length; i++) {
o.appendOrdering(sortKeyPositions[i], null, sortOrders[i]);
}
po.setGroupOrder(o);
}
return po;
} else {
throw new UnsupportedOperationException("Unrecognized key type.");
}
}
Aggregations