Search in sources :

Example 6 with CompositeType

use of org.apache.flink.api.common.typeutils.CompositeType in project flink by apache.

the class RowTypeInfo method getFlatFields.

@Override
public void getFlatFields(String fieldExpression, int offset, List<FlatFieldDescriptor> result) {
    Matcher matcher = PATTERN_NESTED_FIELDS_WILDCARD.matcher(fieldExpression);
    if (!matcher.matches()) {
        throw new InvalidFieldReferenceException("Invalid tuple field reference \"" + fieldExpression + "\".");
    }
    String field = matcher.group(0);
    if ((field.equals(ExpressionKeys.SELECT_ALL_CHAR)) || (field.equals(ExpressionKeys.SELECT_ALL_CHAR_SCALA))) {
        // handle select all
        int keyPosition = 0;
        for (TypeInformation<?> fType : types) {
            if (fType instanceof CompositeType) {
                CompositeType<?> cType = (CompositeType<?>) fType;
                cType.getFlatFields(ExpressionKeys.SELECT_ALL_CHAR, offset + keyPosition, result);
                keyPosition += cType.getTotalFields() - 1;
            } else {
                result.add(new FlatFieldDescriptor(offset + keyPosition, fType));
            }
            keyPosition++;
        }
    } else {
        field = matcher.group(1);
        Matcher intFieldMatcher = PATTERN_INT_FIELD.matcher(field);
        int fieldIndex;
        if (intFieldMatcher.matches()) {
            // field expression is an integer
            fieldIndex = Integer.valueOf(field);
        } else {
            fieldIndex = this.getFieldIndex(field);
        }
        // fetch the field type will throw exception if the index is illegal
        TypeInformation<?> fieldType = this.getTypeAt(fieldIndex);
        // compute the offset,
        for (int i = 0; i < fieldIndex; i++) {
            offset += this.getTypeAt(i).getTotalFields();
        }
        String tail = matcher.group(3);
        if (tail == null) {
            // expression hasn't nested field
            if (fieldType instanceof CompositeType) {
                ((CompositeType) fieldType).getFlatFields("*", offset, result);
            } else {
                result.add(new FlatFieldDescriptor(offset, fieldType));
            }
        } else {
            // expression has nested field
            if (fieldType instanceof CompositeType) {
                ((CompositeType) fieldType).getFlatFields(tail, offset, result);
            } else {
                throw new InvalidFieldReferenceException("Nested field expression \"" + tail + "\" not possible on atomic type " + fieldType + ".");
            }
        }
    }
}
Also used : Matcher(java.util.regex.Matcher) CompositeType(org.apache.flink.api.common.typeutils.CompositeType)

Example 7 with CompositeType

use of org.apache.flink.api.common.typeutils.CompositeType in project flink by apache.

the class CollectionExecutor method executeDeltaIteration.

@SuppressWarnings("unchecked")
private <T> List<T> executeDeltaIteration(DeltaIterationBase<?, ?> iteration) throws Exception {
    Operator<?> solutionInput = iteration.getInitialSolutionSet();
    Operator<?> worksetInput = iteration.getInitialWorkset();
    if (solutionInput == null) {
        throw new InvalidProgramException("The delta iteration " + iteration.getName() + " has no initial solution set.");
    }
    if (worksetInput == null) {
        throw new InvalidProgramException("The delta iteration " + iteration.getName() + " has no initial workset.");
    }
    if (iteration.getSolutionSetDelta() == null) {
        throw new InvalidProgramException("The iteration " + iteration.getName() + " has no solution set delta defined (is not closed).");
    }
    if (iteration.getNextWorkset() == null) {
        throw new InvalidProgramException("The iteration " + iteration.getName() + " has no workset defined (is not closed).");
    }
    List<T> solutionInputData = (List<T>) execute(solutionInput);
    List<T> worksetInputData = (List<T>) execute(worksetInput);
    // get the operators that are iterative
    Set<Operator<?>> dynamics = new LinkedHashSet<Operator<?>>();
    DynamicPathCollector dynCollector = new DynamicPathCollector(dynamics);
    iteration.getSolutionSetDelta().accept(dynCollector);
    iteration.getNextWorkset().accept(dynCollector);
    BinaryOperatorInformation<?, ?, ?> operatorInfo = iteration.getOperatorInfo();
    TypeInformation<?> solutionType = operatorInfo.getFirstInputType();
    int[] keyColumns = iteration.getSolutionSetKeyFields();
    boolean[] inputOrderings = new boolean[keyColumns.length];
    TypeComparator<T> inputComparator = ((CompositeType<T>) solutionType).createComparator(keyColumns, inputOrderings, 0, executionConfig);
    Map<TypeComparable<T>, T> solutionMap = new HashMap<TypeComparable<T>, T>(solutionInputData.size());
    // fill the solution from the initial input
    for (T delta : solutionInputData) {
        TypeComparable<T> wrapper = new TypeComparable<T>(delta, inputComparator);
        solutionMap.put(wrapper, delta);
    }
    List<?> currentWorkset = worksetInputData;
    // register the aggregators
    for (AggregatorWithName<?> a : iteration.getAggregators().getAllRegisteredAggregators()) {
        aggregators.put(a.getName(), a.getAggregator());
    }
    String convCriterionAggName = iteration.getAggregators().getConvergenceCriterionAggregatorName();
    ConvergenceCriterion<Value> convCriterion = (ConvergenceCriterion<Value>) iteration.getAggregators().getConvergenceCriterion();
    final int maxIterations = iteration.getMaximumNumberOfIterations();
    for (int superstep = 1; superstep <= maxIterations; superstep++) {
        List<T> currentSolution = new ArrayList<T>(solutionMap.size());
        currentSolution.addAll(solutionMap.values());
        // set the input to the current partial solution
        this.intermediateResults.put(iteration.getSolutionSet(), currentSolution);
        this.intermediateResults.put(iteration.getWorkset(), currentWorkset);
        // set the superstep number
        iterationSuperstep = superstep;
        // grab the current iteration result
        List<T> solutionSetDelta = (List<T>) execute(iteration.getSolutionSetDelta(), superstep);
        this.intermediateResults.put(iteration.getSolutionSetDelta(), solutionSetDelta);
        // update the solution
        for (T delta : solutionSetDelta) {
            TypeComparable<T> wrapper = new TypeComparable<T>(delta, inputComparator);
            solutionMap.put(wrapper, delta);
        }
        currentWorkset = execute(iteration.getNextWorkset(), superstep);
        if (currentWorkset.isEmpty()) {
            break;
        }
        // evaluate the aggregator convergence criterion
        if (convCriterion != null && convCriterionAggName != null) {
            Value v = aggregators.get(convCriterionAggName).getAggregate();
            if (convCriterion.isConverged(superstep, v)) {
                break;
            }
        }
        // clear the dynamic results
        for (Operator<?> o : dynamics) {
            intermediateResults.remove(o);
        }
        // set the previous iteration's aggregates and reset the aggregators
        for (Map.Entry<String, Aggregator<?>> e : aggregators.entrySet()) {
            previousAggregates.put(e.getKey(), e.getValue().getAggregate());
            e.getValue().reset();
        }
    }
    previousAggregates.clear();
    aggregators.clear();
    List<T> currentSolution = new ArrayList<T>(solutionMap.size());
    currentSolution.addAll(solutionMap.values());
    return currentSolution;
}
Also used : LinkedHashSet(java.util.LinkedHashSet) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) TypeComparable(org.apache.flink.api.common.operators.util.TypeComparable) InvalidProgramException(org.apache.flink.api.common.InvalidProgramException) ArrayList(java.util.ArrayList) List(java.util.List) Aggregator(org.apache.flink.api.common.aggregators.Aggregator) ConvergenceCriterion(org.apache.flink.api.common.aggregators.ConvergenceCriterion) Value(org.apache.flink.types.Value) HashMap(java.util.HashMap) Map(java.util.Map) CompositeType(org.apache.flink.api.common.typeutils.CompositeType)

Example 8 with CompositeType

use of org.apache.flink.api.common.typeutils.CompositeType in project flink by apache.

the class PojoTypeInfo method getTypeAt.

@SuppressWarnings("unchecked")
@Override
@PublicEvolving
public <X> TypeInformation<X> getTypeAt(String fieldExpression) {
    Matcher matcher = PATTERN_NESTED_FIELDS.matcher(fieldExpression);
    if (!matcher.matches()) {
        if (fieldExpression.startsWith(ExpressionKeys.SELECT_ALL_CHAR) || fieldExpression.startsWith(ExpressionKeys.SELECT_ALL_CHAR_SCALA)) {
            throw new InvalidFieldReferenceException("Wildcard expressions are not allowed here.");
        } else {
            throw new InvalidFieldReferenceException("Invalid format of POJO field expression \"" + fieldExpression + "\".");
        }
    }
    String field = matcher.group(1);
    // get field
    int fieldPos = -1;
    TypeInformation<?> fieldType = null;
    for (int i = 0; i < fields.length; i++) {
        if (fields[i].getField().getName().equals(field)) {
            fieldPos = i;
            fieldType = fields[i].getTypeInformation();
            break;
        }
    }
    if (fieldPos == -1) {
        throw new InvalidFieldReferenceException("Unable to find field \"" + field + "\" in type " + this + ".");
    }
    String tail = matcher.group(3);
    if (tail == null) {
        // we found the type
        return (TypeInformation<X>) fieldType;
    } else {
        if (fieldType instanceof CompositeType<?>) {
            return ((CompositeType<?>) fieldType).getTypeAt(tail);
        } else {
            throw new InvalidFieldReferenceException("Nested field expression \"" + tail + "\" not possible on atomic type " + fieldType + ".");
        }
    }
}
Also used : Matcher(java.util.regex.Matcher) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) CompositeType(org.apache.flink.api.common.typeutils.CompositeType) PublicEvolving(org.apache.flink.annotation.PublicEvolving)

Example 9 with CompositeType

use of org.apache.flink.api.common.typeutils.CompositeType in project flink by apache.

the class PojoTypeInfo method getFlatFields.

@Override
@PublicEvolving
public void getFlatFields(String fieldExpression, int offset, List<FlatFieldDescriptor> result) {
    Matcher matcher = PATTERN_NESTED_FIELDS_WILDCARD.matcher(fieldExpression);
    if (!matcher.matches()) {
        throw new InvalidFieldReferenceException("Invalid POJO field reference \"" + fieldExpression + "\".");
    }
    String field = matcher.group(0);
    if (field.equals(ExpressionKeys.SELECT_ALL_CHAR) || field.equals(ExpressionKeys.SELECT_ALL_CHAR_SCALA)) {
        // handle select all
        int keyPosition = 0;
        for (PojoField pField : fields) {
            if (pField.getTypeInformation() instanceof CompositeType) {
                CompositeType<?> cType = (CompositeType<?>) pField.getTypeInformation();
                cType.getFlatFields(String.valueOf(ExpressionKeys.SELECT_ALL_CHAR), offset + keyPosition, result);
                keyPosition += cType.getTotalFields() - 1;
            } else {
                result.add(new NamedFlatFieldDescriptor(pField.getField().getName(), offset + keyPosition, pField.getTypeInformation()));
            }
            keyPosition++;
        }
        return;
    } else {
        field = matcher.group(1);
    }
    // get field
    int fieldPos = -1;
    TypeInformation<?> fieldType = null;
    for (int i = 0; i < fields.length; i++) {
        if (fields[i].getField().getName().equals(field)) {
            fieldPos = i;
            fieldType = fields[i].getTypeInformation();
            break;
        }
    }
    if (fieldPos == -1) {
        throw new InvalidFieldReferenceException("Unable to find field \"" + field + "\" in type " + this + ".");
    }
    String tail = matcher.group(3);
    if (tail == null) {
        if (fieldType instanceof CompositeType) {
            // forward offset
            for (int i = 0; i < fieldPos; i++) {
                offset += this.getTypeAt(i).getTotalFields();
            }
            // add all fields of composite type
            ((CompositeType<?>) fieldType).getFlatFields("*", offset, result);
        } else {
            // we found the field to add
            // compute flat field position by adding skipped fields
            int flatFieldPos = offset;
            for (int i = 0; i < fieldPos; i++) {
                flatFieldPos += this.getTypeAt(i).getTotalFields();
            }
            result.add(new FlatFieldDescriptor(flatFieldPos, fieldType));
        }
    } else {
        if (fieldType instanceof CompositeType<?>) {
            // forward offset
            for (int i = 0; i < fieldPos; i++) {
                offset += this.getTypeAt(i).getTotalFields();
            }
            ((CompositeType<?>) fieldType).getFlatFields(tail, offset, result);
        } else {
            throw new InvalidFieldReferenceException("Nested field expression \"" + tail + "\" not possible on atomic type " + fieldType + ".");
        }
    }
}
Also used : Matcher(java.util.regex.Matcher) CompositeType(org.apache.flink.api.common.typeutils.CompositeType) PublicEvolving(org.apache.flink.annotation.PublicEvolving)

Example 10 with CompositeType

use of org.apache.flink.api.common.typeutils.CompositeType in project flink by apache.

the class PojoComparatorTest method createComparator.

@Override
protected TypeComparator<PojoContainingTuple> createComparator(boolean ascending) {
    Assert.assertTrue(type instanceof CompositeType);
    CompositeType<PojoContainingTuple> cType = (CompositeType<PojoContainingTuple>) type;
    ExpressionKeys<PojoContainingTuple> keys = new ExpressionKeys<PojoContainingTuple>(new String[] { "theTuple.*" }, cType);
    boolean[] orders = new boolean[keys.getNumberOfKeyFields()];
    Arrays.fill(orders, ascending);
    return cType.createComparator(keys.computeLogicalKeyPositions(), orders, 0, new ExecutionConfig());
}
Also used : ExpressionKeys(org.apache.flink.api.common.operators.Keys.ExpressionKeys) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) CompositeType(org.apache.flink.api.common.typeutils.CompositeType)

Aggregations

CompositeType (org.apache.flink.api.common.typeutils.CompositeType)12 Matcher (java.util.regex.Matcher)6 ArrayList (java.util.ArrayList)2 PublicEvolving (org.apache.flink.annotation.PublicEvolving)2 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)2 InvalidProgramException (org.apache.flink.api.common.InvalidProgramException)2 ExpressionKeys (org.apache.flink.api.common.operators.Keys.ExpressionKeys)2 Test (org.junit.Test)2 Constructor (java.lang.reflect.Constructor)1 Field (java.lang.reflect.Field)1 GenericArrayType (java.lang.reflect.GenericArrayType)1 Method (java.lang.reflect.Method)1 ParameterizedType (java.lang.reflect.ParameterizedType)1 Type (java.lang.reflect.Type)1 HashMap (java.util.HashMap)1 LinkedHashSet (java.util.LinkedHashSet)1 List (java.util.List)1 Map (java.util.Map)1 Aggregator (org.apache.flink.api.common.aggregators.Aggregator)1 ConvergenceCriterion (org.apache.flink.api.common.aggregators.ConvergenceCriterion)1