use of org.apache.flink.api.common.typeinfo.TypeInformation in project flink by apache.
the class AvroTypeInfo method generateFieldsFromAvroSchema.
private static <T extends SpecificRecordBase> List<PojoField> generateFieldsFromAvroSchema(Class<T> typeClass) {
PojoTypeExtractor pte = new PojoTypeExtractor();
ArrayList<Type> typeHierarchy = new ArrayList<>();
typeHierarchy.add(typeClass);
TypeInformation ti = pte.analyzePojo(typeClass, typeHierarchy, null, null, null);
if (!(ti instanceof PojoTypeInfo)) {
throw new IllegalStateException("Expecting type to be a PojoTypeInfo");
}
PojoTypeInfo pti = (PojoTypeInfo) ti;
List<PojoField> newFields = new ArrayList<>(pti.getTotalFields());
for (int i = 0; i < pti.getArity(); i++) {
PojoField f = pti.getPojoFieldAt(i);
TypeInformation newType = f.getTypeInformation();
// check if type is a CharSequence
if (newType instanceof GenericTypeInfo) {
if ((newType).getTypeClass().equals(CharSequence.class)) {
// replace the type by a org.apache.avro.util.Utf8
newType = new GenericTypeInfo(org.apache.avro.util.Utf8.class);
}
}
PojoField newField = new PojoField(f.getField(), newType);
newFields.add(newField);
}
return newFields;
}
use of org.apache.flink.api.common.typeinfo.TypeInformation in project flink by apache.
the class PojoTypeInfo method getTypeAt.
@SuppressWarnings("unchecked")
@Override
@PublicEvolving
public <X> TypeInformation<X> getTypeAt(String fieldExpression) {
Matcher matcher = PATTERN_NESTED_FIELDS.matcher(fieldExpression);
if (!matcher.matches()) {
if (fieldExpression.startsWith(ExpressionKeys.SELECT_ALL_CHAR) || fieldExpression.startsWith(ExpressionKeys.SELECT_ALL_CHAR_SCALA)) {
throw new InvalidFieldReferenceException("Wildcard expressions are not allowed here.");
} else {
throw new InvalidFieldReferenceException("Invalid format of POJO field expression \"" + fieldExpression + "\".");
}
}
String field = matcher.group(1);
// get field
int fieldPos = -1;
TypeInformation<?> fieldType = null;
for (int i = 0; i < fields.length; i++) {
if (fields[i].getField().getName().equals(field)) {
fieldPos = i;
fieldType = fields[i].getTypeInformation();
break;
}
}
if (fieldPos == -1) {
throw new InvalidFieldReferenceException("Unable to find field \"" + field + "\" in type " + this + ".");
}
String tail = matcher.group(3);
if (tail == null) {
// we found the type
return (TypeInformation<X>) fieldType;
} else {
if (fieldType instanceof CompositeType<?>) {
return ((CompositeType<?>) fieldType).getTypeAt(tail);
} else {
throw new InvalidFieldReferenceException("Nested field expression \"" + tail + "\" not possible on atomic type " + fieldType + ".");
}
}
}
use of org.apache.flink.api.common.typeinfo.TypeInformation in project flink by apache.
the class TypeExtractor method createTypeInfoFromInput.
/**
* Finds the type information to a type variable.
*
* It solve the following:
*
* Return the type information for "returnTypeVar" given that "inType" has type information "inTypeInfo".
* Thus "inType" must contain "returnTypeVar" in a "inputTypeHierarchy", otherwise null is returned.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
private <IN1> TypeInformation<?> createTypeInfoFromInput(TypeVariable<?> returnTypeVar, ArrayList<Type> inputTypeHierarchy, Type inType, TypeInformation<IN1> inTypeInfo) {
TypeInformation<?> info = null;
// use a factory to find corresponding type information to type variable
final ArrayList<Type> factoryHierarchy = new ArrayList<>(inputTypeHierarchy);
final TypeInfoFactory<?> factory = getClosestFactory(factoryHierarchy, inType);
if (factory != null) {
// the type that defines the factory is last in factory hierarchy
final Type factoryDefiningType = factoryHierarchy.get(factoryHierarchy.size() - 1);
// defining type has generics, the factory need to be asked for a mapping of subtypes to type information
if (factoryDefiningType instanceof ParameterizedType) {
final Type[] typeParams = typeToClass(factoryDefiningType).getTypeParameters();
final Type[] actualParams = ((ParameterizedType) factoryDefiningType).getActualTypeArguments();
// go thru all elements and search for type variables
for (int i = 0; i < actualParams.length; i++) {
final Map<String, TypeInformation<?>> componentInfo = inTypeInfo.getGenericParameters();
final String typeParamName = typeParams[i].toString();
if (!componentInfo.containsKey(typeParamName) || componentInfo.get(typeParamName) == null) {
throw new InvalidTypesException("TypeInformation '" + inTypeInfo.getClass().getSimpleName() + "' does not supply a mapping of TypeVariable '" + typeParamName + "' to corresponding TypeInformation. " + "Input type inference can only produce a result with this information. " + "Please implement method 'TypeInformation.getGenericParameters()' for this.");
}
info = createTypeInfoFromInput(returnTypeVar, factoryHierarchy, actualParams[i], componentInfo.get(typeParamName));
if (info != null) {
break;
}
}
}
} else // the input is a type variable
if (sameTypeVars(inType, returnTypeVar)) {
return inTypeInfo;
} else if (inType instanceof TypeVariable) {
Type resolvedInType = materializeTypeVariable(inputTypeHierarchy, (TypeVariable<?>) inType);
if (resolvedInType != inType) {
info = createTypeInfoFromInput(returnTypeVar, inputTypeHierarchy, resolvedInType, inTypeInfo);
}
} else // input is an array
if (inType instanceof GenericArrayType) {
TypeInformation<?> componentInfo = null;
if (inTypeInfo instanceof BasicArrayTypeInfo) {
componentInfo = ((BasicArrayTypeInfo<?, ?>) inTypeInfo).getComponentInfo();
} else if (inTypeInfo instanceof PrimitiveArrayTypeInfo) {
componentInfo = BasicTypeInfo.getInfoFor(inTypeInfo.getTypeClass().getComponentType());
} else if (inTypeInfo instanceof ObjectArrayTypeInfo) {
componentInfo = ((ObjectArrayTypeInfo<?, ?>) inTypeInfo).getComponentInfo();
}
info = createTypeInfoFromInput(returnTypeVar, inputTypeHierarchy, ((GenericArrayType) inType).getGenericComponentType(), componentInfo);
} else // the input is a tuple
if (inTypeInfo instanceof TupleTypeInfo && isClassType(inType) && Tuple.class.isAssignableFrom(typeToClass(inType))) {
ParameterizedType tupleBaseClass;
// get tuple from possible tuple subclass
while (!(isClassType(inType) && typeToClass(inType).getSuperclass().equals(Tuple.class))) {
inputTypeHierarchy.add(inType);
inType = typeToClass(inType).getGenericSuperclass();
}
inputTypeHierarchy.add(inType);
// we can assume to be parameterized since we
// already did input validation
tupleBaseClass = (ParameterizedType) inType;
Type[] tupleElements = tupleBaseClass.getActualTypeArguments();
// go thru all tuple elements and search for type variables
for (int i = 0; i < tupleElements.length; i++) {
info = createTypeInfoFromInput(returnTypeVar, inputTypeHierarchy, tupleElements[i], ((TupleTypeInfo<?>) inTypeInfo).getTypeAt(i));
if (info != null) {
break;
}
}
} else // the input is a pojo
if (inTypeInfo instanceof PojoTypeInfo && isClassType(inType)) {
// build the entire type hierarchy for the pojo
getTypeHierarchy(inputTypeHierarchy, inType, Object.class);
// determine a field containing the type variable
List<Field> fields = getAllDeclaredFields(typeToClass(inType), false);
for (Field field : fields) {
Type fieldType = field.getGenericType();
if (fieldType instanceof TypeVariable && sameTypeVars(returnTypeVar, materializeTypeVariable(inputTypeHierarchy, (TypeVariable<?>) fieldType))) {
return getTypeOfPojoField(inTypeInfo, field);
} else if (fieldType instanceof ParameterizedType || fieldType instanceof GenericArrayType) {
ArrayList<Type> typeHierarchyWithFieldType = new ArrayList<>(inputTypeHierarchy);
typeHierarchyWithFieldType.add(fieldType);
TypeInformation<?> foundInfo = createTypeInfoFromInput(returnTypeVar, typeHierarchyWithFieldType, fieldType, getTypeOfPojoField(inTypeInfo, field));
if (foundInfo != null) {
return foundInfo;
}
}
}
}
return info;
}
use of org.apache.flink.api.common.typeinfo.TypeInformation in project flink by apache.
the class PartitionOperator method computeOrdering.
private static <T> Ordering computeOrdering(Keys<T> pKeys, Order[] orders) {
Ordering ordering = new Ordering();
final int[] logicalKeyPositions = pKeys.computeLogicalKeyPositions();
if (orders == null) {
for (int key : logicalKeyPositions) {
ordering.appendOrdering(key, null, Order.ASCENDING);
}
} else {
final TypeInformation<?>[] originalKeyFieldTypes = pKeys.getOriginalKeyFieldTypes();
int index = 0;
for (int i = 0; i < originalKeyFieldTypes.length; i++) {
final int typeTotalFields = originalKeyFieldTypes[i].getTotalFields();
for (int j = index; j < index + typeTotalFields; j++) {
ordering.appendOrdering(logicalKeyPositions[j], null, orders[i]);
}
index += typeTotalFields;
}
}
return ordering;
}
use of org.apache.flink.api.common.typeinfo.TypeInformation in project flink by apache.
the class CollectionInputFormatTest method testSerializability.
@Test
public void testSerializability() {
try (ByteArrayOutputStream buffer = new ByteArrayOutputStream();
ObjectOutputStream out = new ObjectOutputStream(buffer)) {
Collection<ElementType> inputCollection = new ArrayList<ElementType>();
ElementType element1 = new ElementType(1);
ElementType element2 = new ElementType(2);
ElementType element3 = new ElementType(3);
inputCollection.add(element1);
inputCollection.add(element2);
inputCollection.add(element3);
@SuppressWarnings("unchecked") TypeInformation<ElementType> info = (TypeInformation<ElementType>) TypeExtractor.createTypeInfo(ElementType.class);
CollectionInputFormat<ElementType> inputFormat = new CollectionInputFormat<ElementType>(inputCollection, info.createSerializer(new ExecutionConfig()));
out.writeObject(inputFormat);
ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray()));
Object serializationResult = in.readObject();
assertNotNull(serializationResult);
assertTrue(serializationResult instanceof CollectionInputFormat<?>);
@SuppressWarnings("unchecked") CollectionInputFormat<ElementType> result = (CollectionInputFormat<ElementType>) serializationResult;
GenericInputSplit inputSplit = new GenericInputSplit(0, 1);
inputFormat.open(inputSplit);
result.open(inputSplit);
while (!inputFormat.reachedEnd() && !result.reachedEnd()) {
ElementType expectedElement = inputFormat.nextRecord(null);
ElementType actualElement = result.nextRecord(null);
assertEquals(expectedElement, actualElement);
}
} catch (Exception e) {
e.printStackTrace();
fail(e.toString());
}
}
Aggregations