Search in sources :

Example 1 with FlatFieldDescriptor

use of org.apache.flink.api.common.typeutils.CompositeType.FlatFieldDescriptor in project flink by apache.

the class PojoTypeExtractionTest method checkWCPojoAsserts.

@SuppressWarnings({ "unchecked", "rawtypes" })
private void checkWCPojoAsserts(TypeInformation<?> typeInfo) {
    Assert.assertFalse(typeInfo.isBasicType());
    Assert.assertFalse(typeInfo.isTupleType());
    Assert.assertEquals(10, typeInfo.getTotalFields());
    Assert.assertTrue(typeInfo instanceof PojoTypeInfo);
    PojoTypeInfo<?> pojoType = (PojoTypeInfo<?>) typeInfo;
    List<FlatFieldDescriptor> ffd = new ArrayList<FlatFieldDescriptor>();
    String[] fields = { "count", "complex.date", "complex.collection", "complex.nothing", "complex.someFloat", "complex.someNumberWithÜnicödeNäme", "complex.valueType", "complex.word.f0", "complex.word.f1", "complex.word.f2" };
    int[] positions = { 9, 1, 0, 2, 3, 4, 5, 6, 7, 8 };
    Assert.assertEquals(fields.length, positions.length);
    for (int i = 0; i < fields.length; i++) {
        pojoType.getFlatFields(fields[i], 0, ffd);
        Assert.assertEquals("Too many keys returned", 1, ffd.size());
        Assert.assertEquals("position of field " + fields[i] + " wrong", positions[i], ffd.get(0).getPosition());
        ffd.clear();
    }
    pojoType.getFlatFields("complex.word.*", 0, ffd);
    Assert.assertEquals(3, ffd.size());
    // check if it returns 5,6,7
    for (FlatFieldDescriptor ffdE : ffd) {
        final int pos = ffdE.getPosition();
        Assert.assertTrue(pos <= 8);
        Assert.assertTrue(6 <= pos);
        if (pos == 6) {
            Assert.assertEquals(Long.class, ffdE.getType().getTypeClass());
        }
        if (pos == 7) {
            Assert.assertEquals(Long.class, ffdE.getType().getTypeClass());
        }
        if (pos == 8) {
            Assert.assertEquals(String.class, ffdE.getType().getTypeClass());
        }
    }
    ffd.clear();
    // scala style full tuple selection for pojos
    pojoType.getFlatFields("complex.word._", 0, ffd);
    Assert.assertEquals(3, ffd.size());
    ffd.clear();
    pojoType.getFlatFields("complex.*", 0, ffd);
    Assert.assertEquals(9, ffd.size());
    // check if it returns 0-7
    for (FlatFieldDescriptor ffdE : ffd) {
        final int pos = ffdE.getPosition();
        Assert.assertTrue(ffdE.getPosition() <= 8);
        Assert.assertTrue(0 <= ffdE.getPosition());
        if (pos == 0) {
            Assert.assertEquals(List.class, ffdE.getType().getTypeClass());
        }
        if (pos == 1) {
            Assert.assertEquals(Date.class, ffdE.getType().getTypeClass());
        }
        if (pos == 2) {
            Assert.assertEquals(Object.class, ffdE.getType().getTypeClass());
        }
        if (pos == 3) {
            Assert.assertEquals(Float.class, ffdE.getType().getTypeClass());
        }
        if (pos == 4) {
            Assert.assertEquals(Integer.class, ffdE.getType().getTypeClass());
        }
        if (pos == 5) {
            Assert.assertEquals(MyValue.class, ffdE.getType().getTypeClass());
        }
        if (pos == 6) {
            Assert.assertEquals(Long.class, ffdE.getType().getTypeClass());
        }
        if (pos == 7) {
            Assert.assertEquals(Long.class, ffdE.getType().getTypeClass());
        }
        if (pos == 8) {
            Assert.assertEquals(String.class, ffdE.getType().getTypeClass());
        }
        if (pos == 9) {
            Assert.assertEquals(Integer.class, ffdE.getType().getTypeClass());
        }
    }
    ffd.clear();
    pojoType.getFlatFields("*", 0, ffd);
    Assert.assertEquals(10, ffd.size());
    // check if it returns 0-8
    for (FlatFieldDescriptor ffdE : ffd) {
        Assert.assertTrue(ffdE.getPosition() <= 9);
        Assert.assertTrue(0 <= ffdE.getPosition());
        if (ffdE.getPosition() == 9) {
            Assert.assertEquals(Integer.class, ffdE.getType().getTypeClass());
        }
    }
    ffd.clear();
    // ComplexNestedClass complex
    TypeInformation<?> typeComplexNested = pojoType.getTypeAt(0);
    Assert.assertTrue(typeComplexNested instanceof PojoTypeInfo);
    Assert.assertEquals(7, typeComplexNested.getArity());
    Assert.assertEquals(9, typeComplexNested.getTotalFields());
    PojoTypeInfo<?> pojoTypeComplexNested = (PojoTypeInfo<?>) typeComplexNested;
    boolean dateSeen = false, intSeen = false, floatSeen = false, tupleSeen = false, objectSeen = false, writableSeen = false, collectionSeen = false;
    for (int i = 0; i < pojoTypeComplexNested.getArity(); i++) {
        PojoField field = pojoTypeComplexNested.getPojoFieldAt(i);
        String name = field.getField().getName();
        if (name.equals("date")) {
            if (dateSeen) {
                Assert.fail("already seen");
            }
            dateSeen = true;
            Assert.assertEquals(BasicTypeInfo.DATE_TYPE_INFO, field.getTypeInformation());
            Assert.assertEquals(Date.class, field.getTypeInformation().getTypeClass());
        } else if (name.equals("someNumberWithÜnicödeNäme")) {
            if (intSeen) {
                Assert.fail("already seen");
            }
            intSeen = true;
            Assert.assertEquals(BasicTypeInfo.INT_TYPE_INFO, field.getTypeInformation());
            Assert.assertEquals(Integer.class, field.getTypeInformation().getTypeClass());
        } else if (name.equals("someFloat")) {
            if (floatSeen) {
                Assert.fail("already seen");
            }
            floatSeen = true;
            Assert.assertEquals(BasicTypeInfo.FLOAT_TYPE_INFO, field.getTypeInformation());
            Assert.assertEquals(Float.class, field.getTypeInformation().getTypeClass());
        } else if (name.equals("word")) {
            if (tupleSeen) {
                Assert.fail("already seen");
            }
            tupleSeen = true;
            Assert.assertTrue(field.getTypeInformation() instanceof TupleTypeInfo<?>);
            Assert.assertEquals(Tuple3.class, field.getTypeInformation().getTypeClass());
            // do some more advanced checks on the tuple
            TupleTypeInfo<?> tupleTypeFromComplexNested = (TupleTypeInfo<?>) field.getTypeInformation();
            Assert.assertEquals(BasicTypeInfo.LONG_TYPE_INFO, tupleTypeFromComplexNested.getTypeAt(0));
            Assert.assertEquals(BasicTypeInfo.LONG_TYPE_INFO, tupleTypeFromComplexNested.getTypeAt(1));
            Assert.assertEquals(BasicTypeInfo.STRING_TYPE_INFO, tupleTypeFromComplexNested.getTypeAt(2));
        } else if (name.equals("nothing")) {
            if (objectSeen) {
                Assert.fail("already seen");
            }
            objectSeen = true;
            Assert.assertEquals(new GenericTypeInfo<Object>(Object.class), field.getTypeInformation());
            Assert.assertEquals(Object.class, field.getTypeInformation().getTypeClass());
        } else if (name.equals("valueType")) {
            if (writableSeen) {
                Assert.fail("already seen");
            }
            writableSeen = true;
            Assert.assertEquals(new ValueTypeInfo<>(MyValue.class), field.getTypeInformation());
            Assert.assertEquals(MyValue.class, field.getTypeInformation().getTypeClass());
        } else if (name.equals("collection")) {
            if (collectionSeen) {
                Assert.fail("already seen");
            }
            collectionSeen = true;
            Assert.assertEquals(new GenericTypeInfo(List.class), field.getTypeInformation());
        } else {
            Assert.fail("field " + field + " is not expected");
        }
    }
    Assert.assertTrue("Field was not present", dateSeen);
    Assert.assertTrue("Field was not present", intSeen);
    Assert.assertTrue("Field was not present", floatSeen);
    Assert.assertTrue("Field was not present", tupleSeen);
    Assert.assertTrue("Field was not present", objectSeen);
    Assert.assertTrue("Field was not present", writableSeen);
    Assert.assertTrue("Field was not present", collectionSeen);
    // int count
    TypeInformation<?> typeAtOne = pojoType.getTypeAt(1);
    Assert.assertTrue(typeAtOne instanceof BasicTypeInfo);
    Assert.assertEquals(typeInfo.getTypeClass(), WC.class);
    Assert.assertEquals(typeInfo.getArity(), 2);
}
Also used : ArrayList(java.util.ArrayList) FlatFieldDescriptor(org.apache.flink.api.common.typeutils.CompositeType.FlatFieldDescriptor) TypeHint(org.apache.flink.api.common.typeinfo.TypeHint) Tuple3(org.apache.flink.api.java.tuple.Tuple3) BasicTypeInfo(org.apache.flink.api.common.typeinfo.BasicTypeInfo) ArrayList(java.util.ArrayList) List(java.util.List)

Example 2 with FlatFieldDescriptor

use of org.apache.flink.api.common.typeutils.CompositeType.FlatFieldDescriptor in project flink by apache.

the class PojoSerializerTest method testTuplePojoTestEquality.

/**
 * This tests if the hashes returned by the pojo and tuple comparators are the same
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testTuplePojoTestEquality() {
    // test with a simple, string-key first.
    PojoTypeInfo<TestUserClass> pType = (PojoTypeInfo<TestUserClass>) type;
    List<FlatFieldDescriptor> result = new ArrayList<FlatFieldDescriptor>();
    pType.getFlatFields("nestedClass.dumm2", 0, result);
    // see below
    int[] fields = new int[1];
    fields[0] = result.get(0).getPosition();
    TypeComparator<TestUserClass> pojoComp = pType.createComparator(fields, new boolean[] { true }, 0, new ExecutionConfig());
    TestUserClass pojoTestRecord = new TestUserClass(0, "abc", 3d, new int[] { 1, 2, 3 }, new Date(), new NestedTestUserClass(1, "haha", 4d, new int[] { 5, 4, 3 }));
    int pHash = pojoComp.hash(pojoTestRecord);
    Tuple1<String> tupleTest = new Tuple1<String>("haha");
    TupleTypeInfo<Tuple1<String>> tType = (TupleTypeInfo<Tuple1<String>>) TypeExtractor.getForObject(tupleTest);
    TypeComparator<Tuple1<String>> tupleComp = tType.createComparator(new int[] { 0 }, new boolean[] { true }, 0, new ExecutionConfig());
    int tHash = tupleComp.hash(tupleTest);
    Assert.assertTrue("The hashing for tuples and pojos must be the same, so that they are mixable", pHash == tHash);
    Tuple3<Integer, String, Double> multiTupleTest = new Tuple3<Integer, String, Double>(1, "haha", // its important here to use the same values.
    4d);
    TupleTypeInfo<Tuple3<Integer, String, Double>> multiTupleType = (TupleTypeInfo<Tuple3<Integer, String, Double>>) TypeExtractor.getForObject(multiTupleTest);
    ExpressionKeys fieldKey = new ExpressionKeys(new int[] { 1, 0, 2 }, multiTupleType);
    ExpressionKeys expressKey = new ExpressionKeys(new String[] { "nestedClass.dumm2", "nestedClass.dumm1", "nestedClass.dumm3" }, pType);
    try {
        Assert.assertTrue("Expecting the keys to be compatible", fieldKey.areCompatible(expressKey));
    } catch (IncompatibleKeysException e) {
        e.printStackTrace();
        Assert.fail("Keys must be compatible: " + e.getMessage());
    }
    TypeComparator<TestUserClass> multiPojoComp = pType.createComparator(expressKey.computeLogicalKeyPositions(), new boolean[] { true, true, true }, 0, new ExecutionConfig());
    int multiPojoHash = multiPojoComp.hash(pojoTestRecord);
    // pojo order is: dumm2 (str), dumm1 (int), dumm3 (double).
    TypeComparator<Tuple3<Integer, String, Double>> multiTupleComp = multiTupleType.createComparator(fieldKey.computeLogicalKeyPositions(), new boolean[] { true, true, true }, 0, new ExecutionConfig());
    int multiTupleHash = multiTupleComp.hash(multiTupleTest);
    Assert.assertTrue("The hashing for tuples and pojos must be the same, so that they are mixable. Also for those with multiple key fields", multiPojoHash == multiTupleHash);
}
Also used : ArrayList(java.util.ArrayList) PojoTypeInfo(org.apache.flink.api.java.typeutils.PojoTypeInfo) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) FlatFieldDescriptor(org.apache.flink.api.common.typeutils.CompositeType.FlatFieldDescriptor) ExpressionKeys(org.apache.flink.api.common.operators.Keys.ExpressionKeys) Date(java.util.Date) TupleTypeInfo(org.apache.flink.api.java.typeutils.TupleTypeInfo) Tuple1(org.apache.flink.api.java.tuple.Tuple1) Tuple3(org.apache.flink.api.java.tuple.Tuple3) IncompatibleKeysException(org.apache.flink.api.common.operators.Keys.IncompatibleKeysException) Test(org.junit.Test)

Example 3 with FlatFieldDescriptor

use of org.apache.flink.api.common.typeutils.CompositeType.FlatFieldDescriptor in project flink by apache.

the class TypeExtractorTest method testTupleWithBasicTypes.

@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testTupleWithBasicTypes() throws Exception {
    // use getMapReturnTypes()
    RichMapFunction<?, ?> function = new RichMapFunction<Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte>, Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte>>() {

        private static final long serialVersionUID = 1L;

        @Override
        public Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte> map(Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte> value) throws Exception {
            return null;
        }
    };
    TypeInformation<?> ti = TypeExtractor.getMapReturnTypes(function, (TypeInformation) TypeInformation.of(new TypeHint<Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte>>() {
    }));
    Assert.assertTrue(ti.isTupleType());
    Assert.assertEquals(9, ti.getArity());
    Assert.assertTrue(ti instanceof TupleTypeInfo);
    List<FlatFieldDescriptor> ffd = new ArrayList<FlatFieldDescriptor>();
    ((TupleTypeInfo) ti).getFlatFields("f3", 0, ffd);
    Assert.assertTrue(ffd.size() == 1);
    Assert.assertEquals(3, ffd.get(0).getPosition());
    TupleTypeInfo<?> tti = (TupleTypeInfo<?>) ti;
    Assert.assertEquals(Tuple9.class, tti.getTypeClass());
    for (int i = 0; i < 9; i++) {
        Assert.assertTrue(tti.getTypeAt(i) instanceof BasicTypeInfo);
    }
    Assert.assertEquals(BasicTypeInfo.INT_TYPE_INFO, tti.getTypeAt(0));
    Assert.assertEquals(BasicTypeInfo.LONG_TYPE_INFO, tti.getTypeAt(1));
    Assert.assertEquals(BasicTypeInfo.DOUBLE_TYPE_INFO, tti.getTypeAt(2));
    Assert.assertEquals(BasicTypeInfo.FLOAT_TYPE_INFO, tti.getTypeAt(3));
    Assert.assertEquals(BasicTypeInfo.BOOLEAN_TYPE_INFO, tti.getTypeAt(4));
    Assert.assertEquals(BasicTypeInfo.STRING_TYPE_INFO, tti.getTypeAt(5));
    Assert.assertEquals(BasicTypeInfo.CHAR_TYPE_INFO, tti.getTypeAt(6));
    Assert.assertEquals(BasicTypeInfo.SHORT_TYPE_INFO, tti.getTypeAt(7));
    Assert.assertEquals(BasicTypeInfo.BYTE_TYPE_INFO, tti.getTypeAt(8));
    // use getForObject()
    Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte> t = new Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte>(1, 1L, 1.0, 1.0F, false, "Hello World", 'w', (short) 1, (byte) 1);
    Assert.assertTrue(TypeExtractor.getForObject(t) instanceof TupleTypeInfo);
    TupleTypeInfo<?> tti2 = (TupleTypeInfo<?>) TypeExtractor.getForObject(t);
    Assert.assertEquals(BasicTypeInfo.INT_TYPE_INFO, tti2.getTypeAt(0));
    Assert.assertEquals(BasicTypeInfo.LONG_TYPE_INFO, tti2.getTypeAt(1));
    Assert.assertEquals(BasicTypeInfo.DOUBLE_TYPE_INFO, tti2.getTypeAt(2));
    Assert.assertEquals(BasicTypeInfo.FLOAT_TYPE_INFO, tti2.getTypeAt(3));
    Assert.assertEquals(BasicTypeInfo.BOOLEAN_TYPE_INFO, tti2.getTypeAt(4));
    Assert.assertEquals(BasicTypeInfo.STRING_TYPE_INFO, tti2.getTypeAt(5));
    Assert.assertEquals(BasicTypeInfo.CHAR_TYPE_INFO, tti2.getTypeAt(6));
    Assert.assertEquals(BasicTypeInfo.SHORT_TYPE_INFO, tti2.getTypeAt(7));
    Assert.assertEquals(BasicTypeInfo.BYTE_TYPE_INFO, tti2.getTypeAt(8));
    // test that getForClass does not work
    try {
        TypeExtractor.getForClass(Tuple9.class);
        Assert.fail("Exception expected here");
    } catch (InvalidTypesException e) {
    // that is correct
    }
}
Also used : ArrayList(java.util.ArrayList) Tuple9(org.apache.flink.api.java.tuple.Tuple9) FlatFieldDescriptor(org.apache.flink.api.common.typeutils.CompositeType.FlatFieldDescriptor) TypeHint(org.apache.flink.api.common.typeinfo.TypeHint) BigInteger(java.math.BigInteger) RichMapFunction(org.apache.flink.api.common.functions.RichMapFunction) BasicTypeInfo(org.apache.flink.api.common.typeinfo.BasicTypeInfo) InvalidTypesException(org.apache.flink.api.common.functions.InvalidTypesException) Test(org.junit.Test)

Example 4 with FlatFieldDescriptor

use of org.apache.flink.api.common.typeutils.CompositeType.FlatFieldDescriptor in project flink by apache.

the class SemanticPropUtil method parseReadFields.

private static void parseReadFields(SemanticProperties sp, String[] readFieldStrings, TypeInformation<?> inType, int input) {
    if (readFieldStrings == null) {
        return;
    }
    for (String s : readFieldStrings) {
        FieldSet readFields = new FieldSet();
        // remove white characters
        s = s.replaceAll("\\s", "");
        Matcher wcMatcher = PATTERN_WILDCARD.matcher(s);
        // simple wildcard
        if (wcMatcher.matches()) {
            // add all fields
            for (int i = 0; i < inType.getTotalFields(); i++) {
                readFields = readFields.addField(i);
            }
        } else {
            // process field list
            Matcher matcher = PATTERN_LIST.matcher(s);
            if (!matcher.matches()) {
                throw new InvalidSemanticAnnotationException("Invalid format of read field annotation \"" + s + "\".");
            }
            // process field
            matcher = PATTERN_FIELD.matcher(s);
            while (matcher.find()) {
                String fieldStr = matcher.group();
                try {
                    List<FlatFieldDescriptor> ffds = getFlatFields(fieldStr, inType);
                    // get and add flat field positions
                    for (FlatFieldDescriptor ffd : ffds) {
                        readFields = readFields.addField(ffd.getPosition());
                    }
                } catch (InvalidFieldReferenceException ifre) {
                    throw new InvalidSemanticAnnotationException("Invalid field reference in read field annotation \"" + fieldStr + "\".", ifre);
                }
            }
        }
        if (sp instanceof SingleInputSemanticProperties) {
            ((SingleInputSemanticProperties) sp).addReadFields(readFields);
        } else if (sp instanceof DualInputSemanticProperties) {
            ((DualInputSemanticProperties) sp).addReadFields(input, readFields);
        }
    }
}
Also used : FieldSet(org.apache.flink.api.common.operators.util.FieldSet) Matcher(java.util.regex.Matcher) InvalidSemanticAnnotationException(org.apache.flink.api.common.operators.SemanticProperties.InvalidSemanticAnnotationException) SingleInputSemanticProperties(org.apache.flink.api.common.operators.SingleInputSemanticProperties) DualInputSemanticProperties(org.apache.flink.api.common.operators.DualInputSemanticProperties) FlatFieldDescriptor(org.apache.flink.api.common.typeutils.CompositeType.FlatFieldDescriptor) InvalidFieldReferenceException(org.apache.flink.api.common.typeutils.CompositeType.InvalidFieldReferenceException)

Example 5 with FlatFieldDescriptor

use of org.apache.flink.api.common.typeutils.CompositeType.FlatFieldDescriptor in project flink by apache.

the class SemanticPropUtil method parseNonForwardedFields.

private static void parseNonForwardedFields(SemanticProperties sp, String[] nonForwardedStr, TypeInformation<?> inType, TypeInformation<?> outType, int input, boolean skipIncompatibleTypes) {
    if (nonForwardedStr == null) {
        return;
    }
    FieldSet excludedFields = new FieldSet();
    for (String s : nonForwardedStr) {
        // remove white characters
        s = s.replaceAll("\\s", "");
        if (s.equals("")) {
            continue;
        }
        if (!inType.equals(outType)) {
            if (skipIncompatibleTypes) {
                continue;
            } else {
                throw new InvalidSemanticAnnotationException("Non-forwarded fields annotation only allowed for identical input and output types.");
            }
        }
        Matcher matcher = PATTERN_LIST.matcher(s);
        if (!matcher.matches()) {
            throw new InvalidSemanticAnnotationException("Invalid format of non-forwarded fields annotation \"" + s + "\".");
        }
        // process individual fields
        matcher = PATTERN_FIELD.matcher(s);
        while (matcher.find()) {
            String fieldStr = matcher.group();
            try {
                // get and add all flat field positions
                List<FlatFieldDescriptor> inFFDs = getFlatFields(fieldStr, inType);
                for (FlatFieldDescriptor ffd : inFFDs) {
                    excludedFields = excludedFields.addField(ffd.getPosition());
                }
            } catch (InvalidFieldReferenceException ifre) {
                throw new InvalidSemanticAnnotationException("Invalid field reference in non-forwarded fields annotation \"" + fieldStr + "\".", ifre);
            }
        }
    }
    for (int i = 0; i < inType.getTotalFields(); i++) {
        if (!excludedFields.contains(i)) {
            if (sp instanceof SingleInputSemanticProperties) {
                ((SingleInputSemanticProperties) sp).addForwardedField(i, i);
            } else if (sp instanceof DualInputSemanticProperties) {
                ((DualInputSemanticProperties) sp).addForwardedField(input, i, i);
            }
        }
    }
}
Also used : FieldSet(org.apache.flink.api.common.operators.util.FieldSet) Matcher(java.util.regex.Matcher) InvalidSemanticAnnotationException(org.apache.flink.api.common.operators.SemanticProperties.InvalidSemanticAnnotationException) SingleInputSemanticProperties(org.apache.flink.api.common.operators.SingleInputSemanticProperties) DualInputSemanticProperties(org.apache.flink.api.common.operators.DualInputSemanticProperties) FlatFieldDescriptor(org.apache.flink.api.common.typeutils.CompositeType.FlatFieldDescriptor) InvalidFieldReferenceException(org.apache.flink.api.common.typeutils.CompositeType.InvalidFieldReferenceException)

Aggregations

FlatFieldDescriptor (org.apache.flink.api.common.typeutils.CompositeType.FlatFieldDescriptor)6 ArrayList (java.util.ArrayList)3 Matcher (java.util.regex.Matcher)3 DualInputSemanticProperties (org.apache.flink.api.common.operators.DualInputSemanticProperties)3 InvalidSemanticAnnotationException (org.apache.flink.api.common.operators.SemanticProperties.InvalidSemanticAnnotationException)3 SingleInputSemanticProperties (org.apache.flink.api.common.operators.SingleInputSemanticProperties)3 InvalidFieldReferenceException (org.apache.flink.api.common.typeutils.CompositeType.InvalidFieldReferenceException)3 FieldSet (org.apache.flink.api.common.operators.util.FieldSet)2 BasicTypeInfo (org.apache.flink.api.common.typeinfo.BasicTypeInfo)2 TypeHint (org.apache.flink.api.common.typeinfo.TypeHint)2 Tuple3 (org.apache.flink.api.java.tuple.Tuple3)2 Test (org.junit.Test)2 BigInteger (java.math.BigInteger)1 Date (java.util.Date)1 List (java.util.List)1 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)1 InvalidTypesException (org.apache.flink.api.common.functions.InvalidTypesException)1 RichMapFunction (org.apache.flink.api.common.functions.RichMapFunction)1 ExpressionKeys (org.apache.flink.api.common.operators.Keys.ExpressionKeys)1 IncompatibleKeysException (org.apache.flink.api.common.operators.Keys.IncompatibleKeysException)1