use of org.apache.flink.table.types.utils.DataTypeFactoryMock in project flink by apache.
the class LogicalTypeJsonSerdeTest method testIdentifierSerde.
@Test
public void testIdentifierSerde() throws IOException {
final DataTypeFactoryMock dataTypeFactoryMock = new DataTypeFactoryMock();
final TableConfig tableConfig = TableConfig.getDefault();
final Configuration config = tableConfig.getConfiguration();
final CatalogManager catalogManager = preparedCatalogManager().dataTypeFactory(dataTypeFactoryMock).build();
final SerdeContext serdeContext = configuredSerdeContext(catalogManager, tableConfig);
// minimal plan content
config.set(TableConfigOptions.PLAN_COMPILE_CATALOG_OBJECTS, IDENTIFIER);
final String minimalJson = toJson(serdeContext, STRUCTURED_TYPE);
assertThat(minimalJson).isEqualTo("\"`default_catalog`.`default_database`.`MyType`\"");
// catalog lookup with miss
config.set(TableConfigOptions.PLAN_RESTORE_CATALOG_OBJECTS, TableConfigOptions.CatalogPlanRestore.IDENTIFIER);
dataTypeFactoryMock.logicalType = Optional.empty();
assertThatThrownBy(() -> toObject(serdeContext, minimalJson, LogicalType.class)).satisfies(anyCauseMatches(ValidationException.class, "No type found."));
// catalog lookup
config.set(TableConfigOptions.PLAN_RESTORE_CATALOG_OBJECTS, TableConfigOptions.CatalogPlanRestore.IDENTIFIER);
dataTypeFactoryMock.logicalType = Optional.of(STRUCTURED_TYPE);
assertThat(toObject(serdeContext, minimalJson, LogicalType.class)).isEqualTo(STRUCTURED_TYPE);
// maximum plan content
config.set(TableConfigOptions.PLAN_COMPILE_CATALOG_OBJECTS, ALL);
final String maximumJson = toJson(serdeContext, STRUCTURED_TYPE);
final ObjectMapper mapper = new ObjectMapper();
final JsonNode maximumJsonNode = mapper.readTree(maximumJson);
assertThat(maximumJsonNode.get(LogicalTypeJsonSerializer.FIELD_NAME_ATTRIBUTES)).isNotNull();
assertThat(maximumJsonNode.get(LogicalTypeJsonSerializer.FIELD_NAME_DESCRIPTION).asText()).isEqualTo("My original type.");
// catalog lookup with miss
config.set(TableConfigOptions.PLAN_RESTORE_CATALOG_OBJECTS, TableConfigOptions.CatalogPlanRestore.IDENTIFIER);
dataTypeFactoryMock.logicalType = Optional.empty();
assertThatThrownBy(() -> toObject(serdeContext, maximumJson, LogicalType.class)).satisfies(anyCauseMatches(ValidationException.class, "No type found."));
// catalog lookup
config.set(TableConfigOptions.PLAN_RESTORE_CATALOG_OBJECTS, TableConfigOptions.CatalogPlanRestore.IDENTIFIER);
dataTypeFactoryMock.logicalType = Optional.of(UPDATED_STRUCTURED_TYPE);
assertThat(toObject(serdeContext, maximumJson, LogicalType.class)).isEqualTo(UPDATED_STRUCTURED_TYPE);
// no lookup
config.set(TableConfigOptions.PLAN_RESTORE_CATALOG_OBJECTS, TableConfigOptions.CatalogPlanRestore.ALL);
dataTypeFactoryMock.logicalType = Optional.of(UPDATED_STRUCTURED_TYPE);
assertThat(toObject(serdeContext, maximumJson, LogicalType.class)).isEqualTo(STRUCTURED_TYPE);
}
use of org.apache.flink.table.types.utils.DataTypeFactoryMock in project flink by apache.
the class SchemaTranslatorTest method dataTypeFactoryWithRawType.
private static DataTypeFactory dataTypeFactoryWithRawType(Class<?> rawType) {
final DataTypeFactoryMock dataTypeFactory = new DataTypeFactoryMock();
dataTypeFactory.dataType = Optional.of(DataTypeFactoryMock.dummyRaw(rawType));
return dataTypeFactory;
}
use of org.apache.flink.table.types.utils.DataTypeFactoryMock in project flink by apache.
the class InputTypeStrategiesTestBase method runTypeInference.
private TypeInferenceUtil.Result runTypeInference(List<DataType> actualArgumentTypes) {
final FunctionDefinitionMock functionDefinitionMock = new FunctionDefinitionMock();
functionDefinitionMock.functionKind = FunctionKind.SCALAR;
final CallContextMock callContextMock = new CallContextMock();
callContextMock.typeFactory = new DataTypeFactoryMock();
callContextMock.functionDefinition = functionDefinitionMock;
callContextMock.argumentDataTypes = actualArgumentTypes;
callContextMock.argumentLiterals = IntStream.range(0, actualArgumentTypes.size()).mapToObj(i -> testSpec.literalPos != null && i == testSpec.literalPos).collect(Collectors.toList());
callContextMock.argumentValues = IntStream.range(0, actualArgumentTypes.size()).mapToObj(i -> (testSpec.literalPos != null && i == testSpec.literalPos) ? Optional.ofNullable(testSpec.literalValue) : Optional.empty()).collect(Collectors.toList());
callContextMock.argumentNulls = IntStream.range(0, actualArgumentTypes.size()).mapToObj(i -> false).collect(Collectors.toList());
callContextMock.name = "f";
callContextMock.outputDataType = Optional.empty();
final TypeInferenceUtil.SurroundingInfo surroundingInfo;
if (testSpec.surroundingStrategy != null) {
final TypeInference outerTypeInference = TypeInference.newBuilder().inputTypeStrategy(testSpec.surroundingStrategy).outputTypeStrategy(TypeStrategies.MISSING).build();
surroundingInfo = TypeInferenceUtil.SurroundingInfo.of("f_outer", functionDefinitionMock, outerTypeInference, 1, 0, callContextMock.isGroupedAggregation);
} else {
surroundingInfo = null;
}
return TypeInferenceUtil.runTypeInference(createTypeInference(), callContextMock, surroundingInfo);
}
use of org.apache.flink.table.types.utils.DataTypeFactoryMock in project flink by apache.
the class LogicalRelDataTypeConverterTest method testConversion.
@ParameterizedTest
@MethodSource("testConversion")
public void testConversion(LogicalType logicalType) throws IOException {
final RelDataTypeFactory typeFactory = FlinkTypeFactory.INSTANCE();
final DataTypeFactoryMock dataTypeFactory = new DataTypeFactoryMock();
final RelDataType relDataType = LogicalRelDataTypeConverter.toRelDataType(logicalType, typeFactory);
assertThat(LogicalRelDataTypeConverter.toLogicalType(relDataType, dataTypeFactory)).isEqualTo(logicalType);
}
Aggregations