Search in sources :

Example 11 with PublicEvolving

use of org.apache.flink.annotation.PublicEvolving in project flink by apache.

the class BinaryInputFormat method reopen.

@PublicEvolving
@Override
public void reopen(FileInputSplit split, Tuple2<Long, Long> state) throws IOException {
    Preconditions.checkNotNull(split, "reopen() cannot be called on a null split.");
    Preconditions.checkNotNull(state, "reopen() cannot be called with a null initial state.");
    try {
        this.open(split);
    } finally {
        this.blockInfo = this.createAndReadBlockInfo();
        long blockPos = state.f0;
        this.readRecords = state.f1;
        this.stream.seek(this.splitStart + blockPos);
        this.blockBasedInput = new BlockBasedInput(this.stream, (int) blockPos, this.splitLength);
        this.dataInputStream = new DataInputViewStreamWrapper(blockBasedInput);
    }
}
Also used : DataInputViewStreamWrapper(org.apache.flink.core.memory.DataInputViewStreamWrapper) PublicEvolving(org.apache.flink.annotation.PublicEvolving)

Example 12 with PublicEvolving

use of org.apache.flink.annotation.PublicEvolving in project flink by apache.

the class TypeExtractor method getAllDeclaredFields.

/**
	 * Recursively determine all declared fields
	 * This is required because class.getFields() is not returning fields defined
	 * in parent classes.
	 *
	 * @param clazz class to be analyzed
	 * @param ignoreDuplicates if true, in case of duplicate field names only the lowest one
	 *                            in a hierarchy will be returned; throws an exception otherwise
	 * @return list of fields
	 */
@PublicEvolving
public static List<Field> getAllDeclaredFields(Class<?> clazz, boolean ignoreDuplicates) {
    List<Field> result = new ArrayList<Field>();
    while (clazz != null) {
        Field[] fields = clazz.getDeclaredFields();
        for (Field field : fields) {
            if (Modifier.isTransient(field.getModifiers()) || Modifier.isStatic(field.getModifiers())) {
                // we have no use for transient or static fields
                continue;
            }
            if (hasFieldWithSameName(field.getName(), result)) {
                if (ignoreDuplicates) {
                    continue;
                } else {
                    throw new InvalidTypesException("The field " + field + " is already contained in the hierarchy of the " + clazz + "." + "Please use unique field names through your classes hierarchy");
                }
            }
            result.add(field);
        }
        clazz = clazz.getSuperclass();
    }
    return result;
}
Also used : Field(java.lang.reflect.Field) ArrayList(java.util.ArrayList) InvalidTypesException(org.apache.flink.api.common.functions.InvalidTypesException) PublicEvolving(org.apache.flink.annotation.PublicEvolving)

Example 13 with PublicEvolving

use of org.apache.flink.annotation.PublicEvolving in project flink by apache.

the class TypeExtractor method getBinaryOperatorReturnType.

/**
	 * Returns the binary operator's return type.
	 *
	 * @param function Function to extract the return type from
	 * @param baseClass Base class of the function
	 * @param inputTypeArgumentIndex Index of the type argument of function's first parameter
	 *                               specifying the input type if it is wrapped (Iterable, Map,
	 *                               etc.). Otherwise -1.
	 * @param outputTypeArgumentIndex Index of the type argument of functions second parameter
	 *                                specifying the output type if it is wrapped in a Collector.
	 *                                Otherwise -1.
	 * @param in1Type Type of the left side input elements (In case of an iterable, it is the element type)
	 * @param in2Type Type of the right side input elements (In case of an iterable, it is the element type)
	 * @param functionName Function name
	 * @param allowMissing Can the type information be missing
	 * @param <IN1> Left side input type
	 * @param <IN2> Right side input type
	 * @param <OUT> Output type
	 * @return TypeInformation of the return type of the function
	 */
@SuppressWarnings("unchecked")
@PublicEvolving
public static <IN1, IN2, OUT> TypeInformation<OUT> getBinaryOperatorReturnType(Function function, Class<?> baseClass, int inputTypeArgumentIndex, int outputTypeArgumentIndex, TypeInformation<IN1> in1Type, TypeInformation<IN2> in2Type, String functionName, boolean allowMissing) {
    try {
        final LambdaExecutable exec;
        try {
            exec = checkAndExtractLambda(function);
        } catch (TypeExtractionException e) {
            throw new InvalidTypesException("Internal error occurred.", e);
        }
        if (exec != null) {
            // check for lambda type erasure
            validateLambdaGenericParameters(exec);
            // parameters must be accessed from behind, since JVM can add additional parameters e.g. when using local variables inside lambda function
            final int paramLen = exec.getParameterTypes().length - 1;
            final Type input1 = (outputTypeArgumentIndex >= 0) ? exec.getParameterTypes()[paramLen - 2] : exec.getParameterTypes()[paramLen - 1];
            final Type input2 = (outputTypeArgumentIndex >= 0) ? exec.getParameterTypes()[paramLen - 1] : exec.getParameterTypes()[paramLen];
            validateInputType((inputTypeArgumentIndex >= 0) ? extractTypeArgument(input1, inputTypeArgumentIndex) : input1, in1Type);
            validateInputType((inputTypeArgumentIndex >= 0) ? extractTypeArgument(input2, inputTypeArgumentIndex) : input2, in2Type);
            if (function instanceof ResultTypeQueryable) {
                return ((ResultTypeQueryable<OUT>) function).getProducedType();
            }
            return new TypeExtractor().privateCreateTypeInfo((outputTypeArgumentIndex >= 0) ? extractTypeArgument(exec.getParameterTypes()[paramLen], outputTypeArgumentIndex) : exec.getReturnType(), in1Type, in2Type);
        } else {
            validateInputType(baseClass, function.getClass(), 0, in1Type);
            validateInputType(baseClass, function.getClass(), 1, in2Type);
            if (function instanceof ResultTypeQueryable) {
                return ((ResultTypeQueryable<OUT>) function).getProducedType();
            }
            return new TypeExtractor().privateCreateTypeInfo(baseClass, function.getClass(), 2, in1Type, in2Type);
        }
    } catch (InvalidTypesException e) {
        if (allowMissing) {
            return (TypeInformation<OUT>) new MissingTypeInfo(functionName != null ? functionName : function.toString(), e);
        } else {
            throw e;
        }
    }
}
Also used : GenericArrayType(java.lang.reflect.GenericArrayType) TypeExtractionUtils.isClassType(org.apache.flink.api.java.typeutils.TypeExtractionUtils.isClassType) Type(java.lang.reflect.Type) CompositeType(org.apache.flink.api.common.typeutils.CompositeType) ParameterizedType(java.lang.reflect.ParameterizedType) LambdaExecutable(org.apache.flink.api.java.typeutils.TypeExtractionUtils.LambdaExecutable) InvalidTypesException(org.apache.flink.api.common.functions.InvalidTypesException) PublicEvolving(org.apache.flink.annotation.PublicEvolving)

Example 14 with PublicEvolving

use of org.apache.flink.annotation.PublicEvolving in project flink by apache.

the class AllWindowedStream method aggregate.

/**
	 * Applies the given window function to each window. The window function is called for each
	 * evaluation of the window for each key individually. The output of the window function is
	 * interpreted as a regular non-windowed stream.
	 *
	 * <p>Arriving data is incrementally aggregated using the given aggregate function. This means
	 * that the window function typically has only a single value to process when called.
	 *
	 * @param aggregateFunction The aggregation function that is used for incremental aggregation.
	 * @param windowFunction The process window function.
	 * @param accumulatorType Type information for the internal accumulator type of the aggregation function
	 * @param resultType Type information for the result type of the window function
	 *
	 * @return The data stream that is the result of applying the window function to the window.
	 *
	 * @param <ACC> The type of the AggregateFunction's accumulator
	 * @param <V> The type of AggregateFunction's result, and the WindowFunction's input
	 * @param <R> The type of the elements in the resulting stream, equal to the
	 *            WindowFunction's result type
	 */
@PublicEvolving
public <ACC, V, R> SingleOutputStreamOperator<R> aggregate(AggregateFunction<T, ACC, V> aggregateFunction, ProcessAllWindowFunction<V, R, W> windowFunction, TypeInformation<ACC> accumulatorType, TypeInformation<V> aggregateResultType, TypeInformation<R> resultType) {
    checkNotNull(aggregateFunction, "aggregateFunction");
    checkNotNull(windowFunction, "windowFunction");
    checkNotNull(accumulatorType, "accumulatorType");
    checkNotNull(aggregateResultType, "aggregateResultType");
    checkNotNull(resultType, "resultType");
    if (aggregateFunction instanceof RichFunction) {
        throw new UnsupportedOperationException("This aggregate function cannot be a RichFunction.");
    }
    //clean the closures
    windowFunction = input.getExecutionEnvironment().clean(windowFunction);
    aggregateFunction = input.getExecutionEnvironment().clean(aggregateFunction);
    final String callLocation = Utils.getCallLocationName();
    final String udfName = "AllWindowedStream." + callLocation;
    final String opName;
    final KeySelector<T, Byte> keySel = input.getKeySelector();
    OneInputStreamOperator<T, R> operator;
    if (evictor != null) {
        @SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<T>> streamRecordSerializer = (TypeSerializer<StreamRecord<T>>) new StreamElementSerializer(input.getType().createSerializer(getExecutionEnvironment().getConfig()));
        ListStateDescriptor<StreamRecord<T>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
        opName = "TriggerWindow(" + windowAssigner + ", " + stateDesc + ", " + trigger + ", " + evictor + ", " + udfName + ")";
        operator = new EvictingWindowOperator<>(windowAssigner, windowAssigner.getWindowSerializer(getExecutionEnvironment().getConfig()), keySel, input.getKeyType().createSerializer(getExecutionEnvironment().getConfig()), stateDesc, new InternalAggregateProcessAllWindowFunction<>(aggregateFunction, windowFunction), trigger, evictor, allowedLateness, lateDataOutputTag);
    } else {
        AggregatingStateDescriptor<T, ACC, V> stateDesc = new AggregatingStateDescriptor<>("window-contents", aggregateFunction, accumulatorType.createSerializer(getExecutionEnvironment().getConfig()));
        opName = "TriggerWindow(" + windowAssigner + ", " + stateDesc + ", " + trigger + ", " + udfName + ")";
        operator = new WindowOperator<>(windowAssigner, windowAssigner.getWindowSerializer(getExecutionEnvironment().getConfig()), keySel, input.getKeyType().createSerializer(getExecutionEnvironment().getConfig()), stateDesc, new InternalSingleValueProcessAllWindowFunction<>(windowFunction), trigger, allowedLateness, lateDataOutputTag);
    }
    return input.transform(opName, resultType, operator).forceNonParallel();
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) RichFunction(org.apache.flink.api.common.functions.RichFunction) AggregatingStateDescriptor(org.apache.flink.api.common.state.AggregatingStateDescriptor) ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) InternalSingleValueProcessAllWindowFunction(org.apache.flink.streaming.runtime.operators.windowing.functions.InternalSingleValueProcessAllWindowFunction) InternalAggregateProcessAllWindowFunction(org.apache.flink.streaming.runtime.operators.windowing.functions.InternalAggregateProcessAllWindowFunction) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) StreamElementSerializer(org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer) PublicEvolving(org.apache.flink.annotation.PublicEvolving)

Example 15 with PublicEvolving

use of org.apache.flink.annotation.PublicEvolving in project flink by apache.

the class AllWindowedStream method fold.

/**
	 * Applies the given window function to each window. The window function is called for each
	 * evaluation of the window for each key individually. The output of the window function is
	 * interpreted as a regular non-windowed stream.
	 *
	 * <p>
	 * Arriving data is incrementally aggregated using the given fold function.
	 *
	 * @param initialValue The initial value of the fold.
	 * @param foldFunction The fold function that is used for incremental aggregation.
	 * @param function The process window function.
	 * @param foldAccumulatorType Type information for the result type of the fold function
	 * @param resultType Type information for the result type of the window function
	 * @return The data stream that is the result of applying the window function to the window.
	 */
@PublicEvolving
public <ACC, R> SingleOutputStreamOperator<R> fold(ACC initialValue, FoldFunction<T, ACC> foldFunction, ProcessAllWindowFunction<ACC, R, W> function, TypeInformation<ACC> foldAccumulatorType, TypeInformation<R> resultType) {
    if (foldFunction instanceof RichFunction) {
        throw new UnsupportedOperationException("FoldFunction of fold can not be a RichFunction.");
    }
    if (windowAssigner instanceof MergingWindowAssigner) {
        throw new UnsupportedOperationException("Fold cannot be used with a merging WindowAssigner.");
    }
    //clean the closures
    function = input.getExecutionEnvironment().clean(function);
    foldFunction = input.getExecutionEnvironment().clean(foldFunction);
    String callLocation = Utils.getCallLocationName();
    String udfName = "AllWindowedStream." + callLocation;
    String opName;
    KeySelector<T, Byte> keySel = input.getKeySelector();
    OneInputStreamOperator<T, R> operator;
    if (evictor != null) {
        @SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<T>> streamRecordSerializer = (TypeSerializer<StreamRecord<T>>) new StreamElementSerializer(input.getType().createSerializer(getExecutionEnvironment().getConfig()));
        ListStateDescriptor<StreamRecord<T>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
        opName = "TriggerWindow(" + windowAssigner + ", " + stateDesc + ", " + trigger + ", " + evictor + ", " + udfName + ")";
        operator = new EvictingWindowOperator<>(windowAssigner, windowAssigner.getWindowSerializer(getExecutionEnvironment().getConfig()), keySel, input.getKeyType().createSerializer(getExecutionEnvironment().getConfig()), stateDesc, new InternalIterableProcessAllWindowFunction<>(new FoldApplyProcessAllWindowFunction<>(initialValue, foldFunction, function, foldAccumulatorType)), trigger, evictor, allowedLateness, lateDataOutputTag);
    } else {
        FoldingStateDescriptor<T, ACC> stateDesc = new FoldingStateDescriptor<>("window-contents", initialValue, foldFunction, foldAccumulatorType.createSerializer(getExecutionEnvironment().getConfig()));
        opName = "TriggerWindow(" + windowAssigner + ", " + stateDesc + ", " + trigger + ", " + udfName + ")";
        operator = new WindowOperator<>(windowAssigner, windowAssigner.getWindowSerializer(getExecutionEnvironment().getConfig()), keySel, input.getKeyType().createSerializer(getExecutionEnvironment().getConfig()), stateDesc, new InternalSingleValueProcessAllWindowFunction<>(function), trigger, allowedLateness, lateDataOutputTag);
    }
    return input.transform(opName, resultType, operator).forceNonParallel();
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) RichFunction(org.apache.flink.api.common.functions.RichFunction) ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) InternalSingleValueProcessAllWindowFunction(org.apache.flink.streaming.runtime.operators.windowing.functions.InternalSingleValueProcessAllWindowFunction) FoldingStateDescriptor(org.apache.flink.api.common.state.FoldingStateDescriptor) MergingWindowAssigner(org.apache.flink.streaming.api.windowing.assigners.MergingWindowAssigner) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) StreamElementSerializer(org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer) InternalIterableProcessAllWindowFunction(org.apache.flink.streaming.runtime.operators.windowing.functions.InternalIterableProcessAllWindowFunction) PublicEvolving(org.apache.flink.annotation.PublicEvolving)

Aggregations

PublicEvolving (org.apache.flink.annotation.PublicEvolving)19 RichFunction (org.apache.flink.api.common.functions.RichFunction)9 ListStateDescriptor (org.apache.flink.api.common.state.ListStateDescriptor)9 TypeSerializer (org.apache.flink.api.common.typeutils.TypeSerializer)9 StreamElementSerializer (org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer)9 StreamRecord (org.apache.flink.streaming.runtime.streamrecord.StreamRecord)9 InvalidTypesException (org.apache.flink.api.common.functions.InvalidTypesException)4 AggregatingStateDescriptor (org.apache.flink.api.common.state.AggregatingStateDescriptor)4 CompositeType (org.apache.flink.api.common.typeutils.CompositeType)4 FoldingStateDescriptor (org.apache.flink.api.common.state.FoldingStateDescriptor)3 MergingWindowAssigner (org.apache.flink.streaming.api.windowing.assigners.MergingWindowAssigner)3 InternalIterableAllWindowFunction (org.apache.flink.streaming.runtime.operators.windowing.functions.InternalIterableAllWindowFunction)3 InternalSingleValueAllWindowFunction (org.apache.flink.streaming.runtime.operators.windowing.functions.InternalSingleValueAllWindowFunction)3 InternalSingleValueProcessAllWindowFunction (org.apache.flink.streaming.runtime.operators.windowing.functions.InternalSingleValueProcessAllWindowFunction)3 GenericArrayType (java.lang.reflect.GenericArrayType)2 ParameterizedType (java.lang.reflect.ParameterizedType)2 Type (java.lang.reflect.Type)2 Matcher (java.util.regex.Matcher)2 ReducingStateDescriptor (org.apache.flink.api.common.state.ReducingStateDescriptor)2 LambdaExecutable (org.apache.flink.api.java.typeutils.TypeExtractionUtils.LambdaExecutable)2