use of org.apache.flink.annotation.PublicEvolving in project flink by apache.
the class WindowedStream method fold.
/**
* Applies the given window function to each window. The window function is called for each
* evaluation of the window for each key individually. The output of the window function is
* interpreted as a regular non-windowed stream.
*
* <p>
* Arriving data is incrementally aggregated using the given fold function.
*
* @param initialValue The initial value of the fold.
* @param foldFunction The fold function that is used for incremental aggregation.
* @param function The window function.
* @param foldAccumulatorType Type information for the result type of the fold function
* @param resultType Type information for the result type of the window function
* @return The data stream that is the result of applying the window function to the window.
*/
@PublicEvolving
public <ACC, R> SingleOutputStreamOperator<R> fold(ACC initialValue, FoldFunction<T, ACC> foldFunction, WindowFunction<ACC, R, K, W> function, TypeInformation<ACC> foldAccumulatorType, TypeInformation<R> resultType) {
if (foldFunction instanceof RichFunction) {
throw new UnsupportedOperationException("FoldFunction of fold can not be a RichFunction.");
}
if (windowAssigner instanceof MergingWindowAssigner) {
throw new UnsupportedOperationException("Fold cannot be used with a merging WindowAssigner.");
}
if (windowAssigner instanceof BaseAlignedWindowAssigner) {
throw new UnsupportedOperationException("Fold cannot be used with a " + windowAssigner.getClass().getSimpleName() + " assigner.");
}
//clean the closures
function = input.getExecutionEnvironment().clean(function);
foldFunction = input.getExecutionEnvironment().clean(foldFunction);
String callLocation = Utils.getCallLocationName();
String udfName = "WindowedStream." + callLocation;
String opName;
KeySelector<T, K> keySel = input.getKeySelector();
OneInputStreamOperator<T, R> operator;
if (evictor != null) {
@SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<T>> streamRecordSerializer = (TypeSerializer<StreamRecord<T>>) new StreamElementSerializer(input.getType().createSerializer(getExecutionEnvironment().getConfig()));
ListStateDescriptor<StreamRecord<T>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
opName = "TriggerWindow(" + windowAssigner + ", " + stateDesc + ", " + trigger + ", " + evictor + ", " + udfName + ")";
operator = new EvictingWindowOperator<>(windowAssigner, windowAssigner.getWindowSerializer(getExecutionEnvironment().getConfig()), keySel, input.getKeyType().createSerializer(getExecutionEnvironment().getConfig()), stateDesc, new InternalIterableWindowFunction<>(new FoldApplyWindowFunction<>(initialValue, foldFunction, function, foldAccumulatorType)), trigger, evictor, allowedLateness, lateDataOutputTag);
} else {
FoldingStateDescriptor<T, ACC> stateDesc = new FoldingStateDescriptor<>("window-contents", initialValue, foldFunction, foldAccumulatorType.createSerializer(getExecutionEnvironment().getConfig()));
opName = "TriggerWindow(" + windowAssigner + ", " + stateDesc + ", " + trigger + ", " + udfName + ")";
operator = new WindowOperator<>(windowAssigner, windowAssigner.getWindowSerializer(getExecutionEnvironment().getConfig()), keySel, input.getKeyType().createSerializer(getExecutionEnvironment().getConfig()), stateDesc, new InternalSingleValueWindowFunction<>(function), trigger, allowedLateness, lateDataOutputTag);
}
return input.transform(opName, resultType, operator);
}
use of org.apache.flink.annotation.PublicEvolving in project flink by apache.
the class TypeExtractor method getUnaryOperatorReturnType.
/**
* Returns the unary operator's return type.
*
* @param function Function to extract the return type from
* @param baseClass Base class of the function
* @param inputTypeArgumentIndex Index of the type argument of function's first parameter
* specifying the input type if it is wrapped (Iterable, Map,
* etc.). Otherwise -1.
* @param outputTypeArgumentIndex Index of the type argument of functions second parameter
* specifying the output type if it is wrapped in a Collector.
* Otherwise -1.
* @param inType Type of the input elements (In case of an iterable, it is the element type)
* @param functionName Function name
* @param allowMissing Can the type information be missing
* @param <IN> Input type
* @param <OUT> Output type
* @return TypeInformation of the return type of the function
*/
@SuppressWarnings("unchecked")
@PublicEvolving
public static <IN, OUT> TypeInformation<OUT> getUnaryOperatorReturnType(Function function, Class<?> baseClass, int inputTypeArgumentIndex, int outputTypeArgumentIndex, TypeInformation<IN> inType, String functionName, boolean allowMissing) {
try {
final LambdaExecutable exec;
try {
exec = checkAndExtractLambda(function);
} catch (TypeExtractionException e) {
throw new InvalidTypesException("Internal error occurred.", e);
}
if (exec != null) {
// check for lambda type erasure
validateLambdaGenericParameters(exec);
// parameters must be accessed from behind, since JVM can add additional parameters e.g. when using local variables inside lambda function
final int paramLen = exec.getParameterTypes().length - 1;
// executable references "this" implicitly
if (paramLen < 0) {
// executable declaring class can also be a super class of the input type
// we only validate if the executable exists in input type
validateInputContainsExecutable(exec, inType);
} else {
final Type input = (outputTypeArgumentIndex >= 0) ? exec.getParameterTypes()[paramLen - 1] : exec.getParameterTypes()[paramLen];
validateInputType((inputTypeArgumentIndex >= 0) ? extractTypeArgument(input, inputTypeArgumentIndex) : input, inType);
}
if (function instanceof ResultTypeQueryable) {
return ((ResultTypeQueryable<OUT>) function).getProducedType();
}
return new TypeExtractor().privateCreateTypeInfo((outputTypeArgumentIndex >= 0) ? extractTypeArgument(exec.getParameterTypes()[paramLen], outputTypeArgumentIndex) : exec.getReturnType(), inType, null);
} else {
validateInputType(baseClass, function.getClass(), 0, inType);
if (function instanceof ResultTypeQueryable) {
return ((ResultTypeQueryable<OUT>) function).getProducedType();
}
return new TypeExtractor().privateCreateTypeInfo(baseClass, function.getClass(), 1, inType, null);
}
} catch (InvalidTypesException e) {
if (allowMissing) {
return (TypeInformation<OUT>) new MissingTypeInfo(functionName != null ? functionName : function.toString(), e);
} else {
throw e;
}
}
}
use of org.apache.flink.annotation.PublicEvolving in project flink by apache.
the class ParameterTool method fromGenericOptionsParser.
/**
* Returns {@link ParameterTool} for the arguments parsed by {@link GenericOptionsParser}
*
* @param args Input array arguments. It should be parsable by {@link GenericOptionsParser}
* @return A {@link ParameterTool}
* @throws IOException If arguments cannot be parsed by {@link GenericOptionsParser}
* @see GenericOptionsParser
* @deprecated Please use {@link org.apache.flink.hadoopcompatibility.HadoopUtils#paramsFromGenericOptionsParser(String[])}
* from project flink-hadoop-compatibility
*/
@Deprecated
@PublicEvolving
public static ParameterTool fromGenericOptionsParser(String[] args) throws IOException {
Option[] options = new GenericOptionsParser(args).getCommandLine().getOptions();
Map<String, String> map = new HashMap<String, String>();
for (Option option : options) {
String[] split = option.getValue().split("=");
map.put(split[0], split[1]);
}
return fromMap(map);
}
use of org.apache.flink.annotation.PublicEvolving in project flink by apache.
the class PojoTypeInfo method getTypeAt.
@SuppressWarnings("unchecked")
@Override
@PublicEvolving
public <X> TypeInformation<X> getTypeAt(String fieldExpression) {
Matcher matcher = PATTERN_NESTED_FIELDS.matcher(fieldExpression);
if (!matcher.matches()) {
if (fieldExpression.startsWith(ExpressionKeys.SELECT_ALL_CHAR) || fieldExpression.startsWith(ExpressionKeys.SELECT_ALL_CHAR_SCALA)) {
throw new InvalidFieldReferenceException("Wildcard expressions are not allowed here.");
} else {
throw new InvalidFieldReferenceException("Invalid format of POJO field expression \"" + fieldExpression + "\".");
}
}
String field = matcher.group(1);
// get field
int fieldPos = -1;
TypeInformation<?> fieldType = null;
for (int i = 0; i < fields.length; i++) {
if (fields[i].getField().getName().equals(field)) {
fieldPos = i;
fieldType = fields[i].getTypeInformation();
break;
}
}
if (fieldPos == -1) {
throw new InvalidFieldReferenceException("Unable to find field \"" + field + "\" in type " + this + ".");
}
String tail = matcher.group(3);
if (tail == null) {
// we found the type
return (TypeInformation<X>) fieldType;
} else {
if (fieldType instanceof CompositeType<?>) {
return ((CompositeType<?>) fieldType).getTypeAt(tail);
} else {
throw new InvalidFieldReferenceException("Nested field expression \"" + tail + "\" not possible on atomic type " + fieldType + ".");
}
}
}
use of org.apache.flink.annotation.PublicEvolving in project flink by apache.
the class PojoTypeInfo method getFlatFields.
@Override
@PublicEvolving
public void getFlatFields(String fieldExpression, int offset, List<FlatFieldDescriptor> result) {
Matcher matcher = PATTERN_NESTED_FIELDS_WILDCARD.matcher(fieldExpression);
if (!matcher.matches()) {
throw new InvalidFieldReferenceException("Invalid POJO field reference \"" + fieldExpression + "\".");
}
String field = matcher.group(0);
if (field.equals(ExpressionKeys.SELECT_ALL_CHAR) || field.equals(ExpressionKeys.SELECT_ALL_CHAR_SCALA)) {
// handle select all
int keyPosition = 0;
for (PojoField pField : fields) {
if (pField.getTypeInformation() instanceof CompositeType) {
CompositeType<?> cType = (CompositeType<?>) pField.getTypeInformation();
cType.getFlatFields(String.valueOf(ExpressionKeys.SELECT_ALL_CHAR), offset + keyPosition, result);
keyPosition += cType.getTotalFields() - 1;
} else {
result.add(new NamedFlatFieldDescriptor(pField.getField().getName(), offset + keyPosition, pField.getTypeInformation()));
}
keyPosition++;
}
return;
} else {
field = matcher.group(1);
}
// get field
int fieldPos = -1;
TypeInformation<?> fieldType = null;
for (int i = 0; i < fields.length; i++) {
if (fields[i].getField().getName().equals(field)) {
fieldPos = i;
fieldType = fields[i].getTypeInformation();
break;
}
}
if (fieldPos == -1) {
throw new InvalidFieldReferenceException("Unable to find field \"" + field + "\" in type " + this + ".");
}
String tail = matcher.group(3);
if (tail == null) {
if (fieldType instanceof CompositeType) {
// forward offset
for (int i = 0; i < fieldPos; i++) {
offset += this.getTypeAt(i).getTotalFields();
}
// add all fields of composite type
((CompositeType<?>) fieldType).getFlatFields("*", offset, result);
} else {
// we found the field to add
// compute flat field position by adding skipped fields
int flatFieldPos = offset;
for (int i = 0; i < fieldPos; i++) {
flatFieldPos += this.getTypeAt(i).getTotalFields();
}
result.add(new FlatFieldDescriptor(flatFieldPos, fieldType));
}
} else {
if (fieldType instanceof CompositeType<?>) {
// forward offset
for (int i = 0; i < fieldPos; i++) {
offset += this.getTypeAt(i).getTotalFields();
}
((CompositeType<?>) fieldType).getFlatFields(tail, offset, result);
} else {
throw new InvalidFieldReferenceException("Nested field expression \"" + tail + "\" not possible on atomic type " + fieldType + ".");
}
}
}
Aggregations