Search in sources :

Example 1 with ROW

use of org.apache.flink.table.types.logical.LogicalTypeRoot.ROW in project flink by apache.

the class DataTypeUtils method stripRowPrefix.

/**
 * Removes a string prefix from the fields of the given row data type.
 */
public static DataType stripRowPrefix(DataType dataType, String prefix) {
    Preconditions.checkArgument(dataType.getLogicalType().is(ROW), "Row data type expected.");
    final RowType rowType = (RowType) dataType.getLogicalType();
    final List<String> newFieldNames = rowType.getFieldNames().stream().map(s -> {
        if (s.startsWith(prefix)) {
            return s.substring(prefix.length());
        }
        return s;
    }).collect(Collectors.toList());
    final LogicalType newRowType = LogicalTypeUtils.renameRowFields(rowType, newFieldNames);
    return new FieldsDataType(newRowType, dataType.getConversionClass(), dataType.getChildren());
}
Also used : DataType(org.apache.flink.table.types.DataType) AtomicDataType(org.apache.flink.table.types.AtomicDataType) Arrays(java.util.Arrays) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) StructuredType(org.apache.flink.table.types.logical.StructuredType) MapType(org.apache.flink.table.types.logical.MapType) RowField(org.apache.flink.table.types.logical.RowType.RowField) StructuredAttribute(org.apache.flink.table.types.logical.StructuredType.StructuredAttribute) Projection(org.apache.flink.table.connector.Projection) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) Preconditions(org.apache.flink.util.Preconditions) Collectors(java.util.stream.Collectors) LegacyTypeInformationType(org.apache.flink.table.types.logical.LegacyTypeInformationType) List(java.util.List) Stream(java.util.stream.Stream) CollectionDataType(org.apache.flink.table.types.CollectionDataType) DistinctType(org.apache.flink.table.types.logical.DistinctType) LogicalType(org.apache.flink.table.types.logical.LogicalType) ValidationException(org.apache.flink.table.api.ValidationException) Optional(java.util.Optional) DataTypeFactory(org.apache.flink.table.catalog.DataTypeFactory) IntStream(java.util.stream.IntStream) LogicalTypeUtils.toInternalConversionClass(org.apache.flink.table.types.logical.utils.LogicalTypeUtils.toInternalConversionClass) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) Column(org.apache.flink.table.catalog.Column) RowType(org.apache.flink.table.types.logical.RowType) TimestampKind(org.apache.flink.table.types.logical.TimestampKind) LogicalTypeFamily(org.apache.flink.table.types.logical.LogicalTypeFamily) FieldsDataType(org.apache.flink.table.types.FieldsDataType) DISTINCT_TYPE(org.apache.flink.table.types.logical.LogicalTypeRoot.DISTINCT_TYPE) LogicalTypeUtils.removeTimeAttributes(org.apache.flink.table.types.logical.utils.LogicalTypeUtils.removeTimeAttributes) LogicalTypeUtils(org.apache.flink.table.types.logical.utils.LogicalTypeUtils) Nullable(javax.annotation.Nullable) ROW(org.apache.flink.table.types.logical.LogicalTypeRoot.ROW) MultisetType(org.apache.flink.table.types.logical.MultisetType) LogicalTypeChecks.getFieldNames(org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getFieldNames) LogicalTypeUtils.getAtomicName(org.apache.flink.table.types.logical.utils.LogicalTypeUtils.getAtomicName) CompositeType(org.apache.flink.api.common.typeutils.CompositeType) LogicalTypeChecks.isCompositeType(org.apache.flink.table.types.logical.utils.LogicalTypeChecks.isCompositeType) LogicalTypeDefaultVisitor(org.apache.flink.table.types.logical.utils.LogicalTypeDefaultVisitor) DataTypes(org.apache.flink.table.api.DataTypes) TypeTransformation(org.apache.flink.table.types.inference.TypeTransformation) ArrayType(org.apache.flink.table.types.logical.ArrayType) STRUCTURED_TYPE(org.apache.flink.table.types.logical.LogicalTypeRoot.STRUCTURED_TYPE) DataTypeVisitor(org.apache.flink.table.types.DataTypeVisitor) ExtractionUtils.primitiveToWrapper(org.apache.flink.table.types.extraction.ExtractionUtils.primitiveToWrapper) Internal(org.apache.flink.annotation.Internal) Collections(java.util.Collections) LogicalTypeChecks(org.apache.flink.table.types.logical.utils.LogicalTypeChecks) FieldsDataType(org.apache.flink.table.types.FieldsDataType) RowType(org.apache.flink.table.types.logical.RowType) LogicalType(org.apache.flink.table.types.logical.LogicalType)

Example 2 with ROW

use of org.apache.flink.table.types.logical.LogicalTypeRoot.ROW in project flink by apache.

the class ValuesOperationFactory method findCommonTypeAtPosition.

private DataType findCommonTypeAtPosition(List<List<ResolvedExpression>> resolvedRows, int i) {
    List<LogicalType> typesAtIPosition = extractLogicalTypesAtPosition(resolvedRows, i);
    LogicalType logicalType = LogicalTypeMerging.findCommonType(typesAtIPosition).orElseThrow(() -> {
        Set<DataType> columnTypes = resolvedRows.stream().map(row -> row.get(i).getOutputDataType()).collect(Collectors.toCollection(LinkedHashSet::new));
        return new ValidationException(String.format("Types in fromValues(...) must have a common super type. Could not find a common type" + " for all rows at column %d.\n" + "Could not find a common super type for types: %s", i, columnTypes));
    });
    return TypeConversions.fromLogicalToDataType(logicalType);
}
Also used : IntStream(java.util.stream.IntStream) DataType(org.apache.flink.table.types.DataType) QueryOperation(org.apache.flink.table.operations.QueryOperation) ApiExpressionUtils.valueLiteral(org.apache.flink.table.expressions.ApiExpressionUtils.valueLiteral) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) CallExpression(org.apache.flink.table.expressions.CallExpression) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) LogicalTypeMerging(org.apache.flink.table.types.logical.utils.LogicalTypeMerging) Expression(org.apache.flink.table.expressions.Expression) ExpressionDefaultVisitor(org.apache.flink.table.expressions.ExpressionDefaultVisitor) ArrayList(java.util.ArrayList) NULL(org.apache.flink.table.types.logical.LogicalTypeRoot.NULL) ResolvedExpression(org.apache.flink.table.expressions.ResolvedExpression) FieldsDataType(org.apache.flink.table.types.FieldsDataType) ExpressionResolver(org.apache.flink.table.expressions.resolver.ExpressionResolver) MAP(org.apache.flink.table.types.logical.LogicalTypeRoot.MAP) LogicalTypeCasts.supportsExplicitCast(org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsExplicitCast) ValuesQueryOperation(org.apache.flink.table.operations.ValuesQueryOperation) Nullable(javax.annotation.Nullable) LinkedHashSet(java.util.LinkedHashSet) ROW(org.apache.flink.table.types.logical.LogicalTypeRoot.ROW) FunctionDefinition(org.apache.flink.table.functions.FunctionDefinition) BuiltInFunctionDefinitions(org.apache.flink.table.functions.BuiltInFunctionDefinitions) TableException(org.apache.flink.table.api.TableException) Set(java.util.Set) ValueLiteralExpression(org.apache.flink.table.expressions.ValueLiteralExpression) Collectors(java.util.stream.Collectors) List(java.util.List) CollectionDataType(org.apache.flink.table.types.CollectionDataType) LogicalType(org.apache.flink.table.types.logical.LogicalType) ARRAY(org.apache.flink.table.types.logical.LogicalTypeRoot.ARRAY) ValidationException(org.apache.flink.table.api.ValidationException) Optional(java.util.Optional) Internal(org.apache.flink.annotation.Internal) TypeConversions(org.apache.flink.table.types.utils.TypeConversions) Collections(java.util.Collections) ValidationException(org.apache.flink.table.api.ValidationException) LogicalType(org.apache.flink.table.types.logical.LogicalType) DataType(org.apache.flink.table.types.DataType) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) FieldsDataType(org.apache.flink.table.types.FieldsDataType) CollectionDataType(org.apache.flink.table.types.CollectionDataType)

Example 3 with ROW

use of org.apache.flink.table.types.logical.LogicalTypeRoot.ROW in project flink by apache.

the class ValuesOperationFactory method create.

/**
 * Creates a valid {@link ValuesQueryOperation} operation.
 *
 * <p>It derives a row type based on {@link LogicalTypeMerging}. It flattens any row
 * constructors. It does not flatten ROWs which are a result of e.g. a function call.
 *
 * <p>The resulting schema can be provided manually. If it is not, the schema will be
 * automatically derived from the types of the expressions.
 */
QueryOperation create(@Nullable ResolvedSchema expectedSchema, List<ResolvedExpression> resolvedExpressions, ExpressionResolver.PostResolverFactory postResolverFactory) {
    List<List<ResolvedExpression>> resolvedRows = unwrapFromRowConstructor(resolvedExpressions);
    if (expectedSchema != null) {
        verifyAllSameSize(resolvedRows, expectedSchema.getColumnCount());
    }
    ResolvedSchema schema = Optional.ofNullable(expectedSchema).orElseGet(() -> extractSchema(resolvedRows));
    List<List<ResolvedExpression>> castedExpressions = resolvedRows.stream().map(row -> convertTopLevelExpressionToExpectedRowType(postResolverFactory, schema.getColumnDataTypes(), row)).collect(Collectors.toList());
    return new ValuesQueryOperation(castedExpressions, schema);
}
Also used : IntStream(java.util.stream.IntStream) DataType(org.apache.flink.table.types.DataType) QueryOperation(org.apache.flink.table.operations.QueryOperation) ApiExpressionUtils.valueLiteral(org.apache.flink.table.expressions.ApiExpressionUtils.valueLiteral) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) CallExpression(org.apache.flink.table.expressions.CallExpression) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) LogicalTypeMerging(org.apache.flink.table.types.logical.utils.LogicalTypeMerging) Expression(org.apache.flink.table.expressions.Expression) ExpressionDefaultVisitor(org.apache.flink.table.expressions.ExpressionDefaultVisitor) ArrayList(java.util.ArrayList) NULL(org.apache.flink.table.types.logical.LogicalTypeRoot.NULL) ResolvedExpression(org.apache.flink.table.expressions.ResolvedExpression) FieldsDataType(org.apache.flink.table.types.FieldsDataType) ExpressionResolver(org.apache.flink.table.expressions.resolver.ExpressionResolver) MAP(org.apache.flink.table.types.logical.LogicalTypeRoot.MAP) LogicalTypeCasts.supportsExplicitCast(org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsExplicitCast) ValuesQueryOperation(org.apache.flink.table.operations.ValuesQueryOperation) Nullable(javax.annotation.Nullable) LinkedHashSet(java.util.LinkedHashSet) ROW(org.apache.flink.table.types.logical.LogicalTypeRoot.ROW) FunctionDefinition(org.apache.flink.table.functions.FunctionDefinition) BuiltInFunctionDefinitions(org.apache.flink.table.functions.BuiltInFunctionDefinitions) TableException(org.apache.flink.table.api.TableException) Set(java.util.Set) ValueLiteralExpression(org.apache.flink.table.expressions.ValueLiteralExpression) Collectors(java.util.stream.Collectors) List(java.util.List) CollectionDataType(org.apache.flink.table.types.CollectionDataType) LogicalType(org.apache.flink.table.types.logical.LogicalType) ARRAY(org.apache.flink.table.types.logical.LogicalTypeRoot.ARRAY) ValidationException(org.apache.flink.table.api.ValidationException) Optional(java.util.Optional) Internal(org.apache.flink.annotation.Internal) TypeConversions(org.apache.flink.table.types.utils.TypeConversions) Collections(java.util.Collections) ValuesQueryOperation(org.apache.flink.table.operations.ValuesQueryOperation) ArrayList(java.util.ArrayList) List(java.util.List) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema)

Example 4 with ROW

use of org.apache.flink.table.types.logical.LogicalTypeRoot.ROW in project flink by apache.

the class DataViewUtils method extractDataViews.

/**
 * Searches for data views in the data type of an accumulator and extracts them.
 */
public static List<DataViewSpec> extractDataViews(int aggIndex, DataType accumulatorDataType) {
    final LogicalType accumulatorType = accumulatorDataType.getLogicalType();
    if (!accumulatorType.is(ROW) && !accumulatorType.is(STRUCTURED_TYPE)) {
        return Collections.emptyList();
    }
    final List<String> fieldNames = getFieldNames(accumulatorType);
    final List<DataType> fieldDataTypes = accumulatorDataType.getChildren();
    final List<DataViewSpec> specs = new ArrayList<>();
    for (int fieldIndex = 0; fieldIndex < fieldDataTypes.size(); fieldIndex++) {
        final DataType fieldDataType = fieldDataTypes.get(fieldIndex);
        final LogicalType fieldType = fieldDataType.getLogicalType();
        if (isDataView(fieldType, ListView.class)) {
            specs.add(new ListViewSpec(createStateId(aggIndex, fieldNames.get(fieldIndex)), fieldIndex, fieldDataType.getChildren().get(0)));
        } else if (isDataView(fieldType, MapView.class)) {
            specs.add(new MapViewSpec(createStateId(aggIndex, fieldNames.get(fieldIndex)), fieldIndex, fieldDataType.getChildren().get(0), false));
        }
        if (fieldType.getChildren().stream().anyMatch(c -> hasNested(c, t -> isDataView(t, DataView.class)))) {
            throw new TableException("Data views are only supported in the first level of a composite accumulator type.");
        }
    }
    return specs;
}
Also used : MapView(org.apache.flink.table.api.dataview.MapView) DataType(org.apache.flink.table.types.DataType) DataViewSpec(org.apache.flink.table.runtime.dataview.DataViewSpec) StructuredType(org.apache.flink.table.types.logical.StructuredType) ListViewSpec(org.apache.flink.table.runtime.dataview.ListViewSpec) Function(java.util.function.Function) ArrayList(java.util.ArrayList) RawType(org.apache.flink.table.types.logical.RawType) DataView(org.apache.flink.table.api.dataview.DataView) LogicalTypeChecks.hasNested(org.apache.flink.table.types.logical.utils.LogicalTypeChecks.hasNested) ListView(org.apache.flink.table.api.dataview.ListView) LazyBinaryFormat(org.apache.flink.table.data.binary.LazyBinaryFormat) ROW(org.apache.flink.table.types.logical.LogicalTypeRoot.ROW) DataTypeUtils(org.apache.flink.table.types.utils.DataTypeUtils) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) NullSerializer(org.apache.flink.table.dataview.NullSerializer) LogicalTypeChecks.getFieldNames(org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getFieldNames) TableException(org.apache.flink.table.api.TableException) MapViewSpec(org.apache.flink.table.runtime.dataview.MapViewSpec) DataTypes(org.apache.flink.table.api.DataTypes) TypeTransformation(org.apache.flink.table.types.inference.TypeTransformation) List(java.util.List) STRUCTURED_TYPE(org.apache.flink.table.types.logical.LogicalTypeRoot.STRUCTURED_TYPE) LogicalType(org.apache.flink.table.types.logical.LogicalType) Internal(org.apache.flink.annotation.Internal) ExternalSerializer(org.apache.flink.table.runtime.typeutils.ExternalSerializer) Collections(java.util.Collections) TableException(org.apache.flink.table.api.TableException) DataViewSpec(org.apache.flink.table.runtime.dataview.DataViewSpec) ArrayList(java.util.ArrayList) LogicalType(org.apache.flink.table.types.logical.LogicalType) MapViewSpec(org.apache.flink.table.runtime.dataview.MapViewSpec) ListViewSpec(org.apache.flink.table.runtime.dataview.ListViewSpec) DataType(org.apache.flink.table.types.DataType) MapView(org.apache.flink.table.api.dataview.MapView)

Aggregations

Collections (java.util.Collections)4 List (java.util.List)4 Internal (org.apache.flink.annotation.Internal)4 DataType (org.apache.flink.table.types.DataType)4 LogicalType (org.apache.flink.table.types.logical.LogicalType)4 ROW (org.apache.flink.table.types.logical.LogicalTypeRoot.ROW)4 ArrayList (java.util.ArrayList)3 Optional (java.util.Optional)3 Collectors (java.util.stream.Collectors)3 IntStream (java.util.stream.IntStream)3 Nullable (javax.annotation.Nullable)3 TableException (org.apache.flink.table.api.TableException)3 ValidationException (org.apache.flink.table.api.ValidationException)3 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)3 CollectionDataType (org.apache.flink.table.types.CollectionDataType)3 FieldsDataType (org.apache.flink.table.types.FieldsDataType)3 KeyValueDataType (org.apache.flink.table.types.KeyValueDataType)3 LinkedHashSet (java.util.LinkedHashSet)2 Set (java.util.Set)2 DataTypes (org.apache.flink.table.api.DataTypes)2