use of org.apache.flink.table.catalog.DataTypeFactory in project flink by apache.
the class BuiltInFunctionTestBase method testFunction.
@Test
public void testFunction() {
final TableEnvironment env = TableEnvironment.create(EnvironmentSettings.newInstance().build());
env.getConfig().addConfiguration(configuration());
testSpec.functions.forEach(f -> env.createTemporarySystemFunction(f.getSimpleName(), f));
final DataTypeFactory dataTypeFactory = ((TableEnvironmentInternal) env).getCatalogManager().getDataTypeFactory();
final Table inputTable;
if (testSpec.fieldDataTypes == null) {
inputTable = env.fromValues(Row.of(testSpec.fieldData));
} else {
final DataTypes.UnresolvedField[] fields = IntStream.range(0, testSpec.fieldDataTypes.length).mapToObj(i -> DataTypes.FIELD("f" + i, testSpec.fieldDataTypes[i])).toArray(DataTypes.UnresolvedField[]::new);
inputTable = env.fromValues(DataTypes.ROW(fields), Row.of(testSpec.fieldData));
}
for (TestItem testItem : testSpec.testItems) {
try {
if (testItem instanceof ResultTestItem<?>) {
testResult(dataTypeFactory, env, inputTable, (ResultTestItem<?>) testItem);
} else if (testItem instanceof ErrorTestItem<?>) {
testError(env, inputTable, (ErrorTestItem<?>) testItem);
}
} catch (Throwable t) {
throw new AssertionError("Failing test item: " + testItem, t);
}
}
}
use of org.apache.flink.table.catalog.DataTypeFactory in project flink by apache.
the class RelDataTypeJsonSerializer method serialize.
@Override
public void serialize(RelDataType relDataType, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException {
final SerdeContext serdeContext = SerdeContext.get(serializerProvider);
final DataTypeFactory dataTypeFactory = serdeContext.getFlinkContext().getCatalogManager().getDataTypeFactory();
// Conversion to LogicalType also ensures that Calcite's type system is materialized
// so data types like DECIMAL will receive a concrete precision and scale (not unspecified
// anymore).
final LogicalType logicalType = LogicalRelDataTypeConverter.toLogicalType(relDataType, dataTypeFactory);
serializerProvider.defaultSerializeValue(logicalType, jsonGenerator);
}
use of org.apache.flink.table.catalog.DataTypeFactory in project flink-mirror by flink-ci.
the class TypeInfoDataTypeConverter method convertToStructuredType.
private static DataType convertToStructuredType(DataTypeFactory dataTypeFactory, CompositeType<?> compositeType, boolean forceNullability) {
final int arity = compositeType.getArity();
final String[] fieldNames = compositeType.getFieldNames();
final Class<?> typeClass = compositeType.getTypeClass();
final Map<String, DataType> fieldDataTypes = new LinkedHashMap<>();
IntStream.range(0, arity).forEachOrdered(pos -> fieldDataTypes.put(fieldNames[pos], toDataType(dataTypeFactory, compositeType.getTypeAt(pos))));
final List<String> fieldNamesReordered;
final boolean isNullable;
// for POJOs and Avro records
if (compositeType instanceof PojoTypeInfo) {
final PojoTypeInfo<?> pojoTypeInfo = (PojoTypeInfo<?>) compositeType;
final List<Field> pojoFields = IntStream.range(0, arity).mapToObj(pojoTypeInfo::getPojoFieldAt).map(PojoField::getField).collect(Collectors.toList());
// POJO serializer supports top-level nulls
isNullable = true;
// based on type information all fields are boxed classes,
// therefore we need to check the reflective field for more details
fieldDataTypes.replaceAll((name, dataType) -> {
final Class<?> fieldClass = pojoFields.stream().filter(f -> f.getName().equals(name)).findFirst().orElseThrow(IllegalStateException::new).getType();
if (fieldClass.isPrimitive()) {
return dataType.notNull().bridgedTo(fieldClass);
}
// serializer supports nullable fields
return dataType.nullable();
});
// best effort extraction of the field order, if it fails we use the default order of
// PojoTypeInfo which is alphabetical
fieldNamesReordered = extractStructuredTypeFieldOrder(typeClass, pojoFields);
} else // for tuples and case classes
{
// serializers don't support top-level nulls
isNullable = forceNullability;
// based on type information all fields are boxed classes,
// but case classes might contain primitives
fieldDataTypes.replaceAll((name, dataType) -> {
try {
final Class<?> fieldClass = getStructuredField(typeClass, name).getType();
if (fieldClass.isPrimitive()) {
return dataType.notNull().bridgedTo(fieldClass);
}
} catch (Throwable t) {
// ignore extraction errors and keep the original conversion class
}
return dataType;
});
// field order from type information is correct
fieldNamesReordered = null;
}
final DataTypes.Field[] structuredFields;
if (fieldNamesReordered != null) {
structuredFields = fieldNamesReordered.stream().map(name -> DataTypes.FIELD(name, fieldDataTypes.get(name))).toArray(DataTypes.Field[]::new);
} else {
structuredFields = fieldDataTypes.entrySet().stream().map(e -> DataTypes.FIELD(e.getKey(), e.getValue())).toArray(DataTypes.Field[]::new);
}
final DataType structuredDataType = DataTypes.STRUCTURED(typeClass, structuredFields);
if (isNullable) {
return structuredDataType.nullable();
} else {
return structuredDataType.notNull();
}
}
use of org.apache.flink.table.catalog.DataTypeFactory in project flink-mirror by flink-ci.
the class SqlAggFunctionVisitor method createSqlAggFunction.
private SqlAggFunction createSqlAggFunction(CallExpression call) {
final FunctionDefinition definition = call.getFunctionDefinition();
// legacy
if (definition instanceof AggregateFunctionDefinition) {
return createLegacySqlAggregateFunction(call.getFunctionIdentifier().orElse(null), (AggregateFunctionDefinition) definition);
} else if (definition instanceof TableAggregateFunctionDefinition) {
return createLegacySqlTableAggregateFunction(call.getFunctionIdentifier().orElse(null), (TableAggregateFunctionDefinition) definition);
}
// new stack
final DataTypeFactory dataTypeFactory = ShortcutUtils.unwrapContext(relBuilder).getCatalogManager().getDataTypeFactory();
final TypeInference typeInference = definition.getTypeInference(dataTypeFactory);
return BridgingSqlAggFunction.of(dataTypeFactory, ShortcutUtils.unwrapTypeFactory(relBuilder), SqlKind.OTHER_FUNCTION, ContextResolvedFunction.fromCallExpression(call), typeInference);
}
use of org.apache.flink.table.catalog.DataTypeFactory in project flink-mirror by flink-ci.
the class StructuredObjectConverter method createOrError.
/**
* Creates a {@link DataStructureConverter} for the given structured type.
*
* <p>Note: We do not perform validation if data type and structured type implementation match.
* This must have been done earlier in the {@link DataTypeFactory}.
*/
@SuppressWarnings("RedundantCast")
private static StructuredObjectConverter<?> createOrError(DataType dataType) {
final List<DataType> fields = dataType.getChildren();
final DataStructureConverter<Object, Object>[] fieldConverters = fields.stream().map(dt -> (DataStructureConverter<Object, Object>) DataStructureConverters.getConverter(dt)).toArray(DataStructureConverter[]::new);
final RowData.FieldGetter[] fieldGetters = IntStream.range(0, fields.size()).mapToObj(pos -> RowData.createFieldGetter(fields.get(pos).getLogicalType(), pos)).toArray(RowData.FieldGetter[]::new);
final Class<?>[] fieldClasses = fields.stream().map(DataType::getConversionClass).toArray(Class[]::new);
final StructuredType structuredType = (StructuredType) dataType.getLogicalType();
final Class<?> implementationClass = structuredType.getImplementationClass().orElseThrow(IllegalStateException::new);
final int uniqueClassId = nextUniqueClassId.getAndIncrement();
final String converterName = String.format("%s$%s$Converter", implementationClass.getName().replace('.', '$'), uniqueClassId);
final String converterCode = generateCode(converterName, implementationClass, getFieldNames(structuredType).toArray(new String[0]), fieldClasses);
return new StructuredObjectConverter<>(fieldConverters, fieldGetters, converterName, converterCode);
}
Aggregations