Search in sources :

Example 16 with StructuredType

use of org.apache.flink.table.types.logical.StructuredType in project flink by apache.

the class LogicalTypeJsonDeserializer method deserializeStructuredTypeFromPlan.

private static LogicalType deserializeStructuredTypeFromPlan(JsonNode logicalTypeNode, SerdeContext serdeContext) {
    final ObjectIdentifier identifier;
    if (logicalTypeNode.has(FIELD_NAME_OBJECT_IDENTIFIER)) {
        identifier = ObjectIdentifierJsonDeserializer.deserialize(logicalTypeNode.get(FIELD_NAME_OBJECT_IDENTIFIER).asText(), serdeContext);
    } else {
        identifier = null;
    }
    final Class<?> implementationClass;
    if (logicalTypeNode.has(FIELD_NAME_IMPLEMENTATION_CLASS)) {
        implementationClass = loadClass(logicalTypeNode.get(FIELD_NAME_IMPLEMENTATION_CLASS).asText(), serdeContext, "structured type");
    } else {
        implementationClass = null;
    }
    final StructuredType.Builder builder;
    if (identifier != null && implementationClass != null) {
        builder = StructuredType.newBuilder(identifier, implementationClass);
    } else if (identifier != null) {
        builder = StructuredType.newBuilder(identifier);
    } else {
        builder = StructuredType.newBuilder(implementationClass);
    }
    if (logicalTypeNode.has(FIELD_NAME_DESCRIPTION)) {
        builder.description(logicalTypeNode.get(FIELD_NAME_FIELD_DESCRIPTION).asText());
    }
    final ArrayNode attributeNodes = (ArrayNode) logicalTypeNode.get(FIELD_NAME_ATTRIBUTES);
    final List<StructuredAttribute> attributes = new ArrayList<>();
    for (JsonNode attributeNode : attributeNodes) {
        final String attributeName = attributeNode.get(FIELD_NAME_ATTRIBUTE_NAME).asText();
        final LogicalType attributeType = deserialize(attributeNode.get(FIELD_NAME_ATTRIBUTE_TYPE), serdeContext);
        final String attributeDescription;
        if (attributeNode.has(FIELD_NAME_ATTRIBUTE_DESCRIPTION)) {
            attributeDescription = attributeNode.get(FIELD_NAME_ATTRIBUTE_DESCRIPTION).asText();
        } else {
            attributeDescription = null;
        }
        attributes.add(new StructuredAttribute(attributeName, attributeType, attributeDescription));
    }
    builder.attributes(attributes);
    if (logicalTypeNode.has(FIELD_NAME_FINAL)) {
        builder.setFinal(logicalTypeNode.get(FIELD_NAME_FINAL).asBoolean());
    }
    if (logicalTypeNode.has(FIELD_NAME_INSTANTIABLE)) {
        builder.setInstantiable(logicalTypeNode.get(FIELD_NAME_INSTANTIABLE).asBoolean());
    }
    if (logicalTypeNode.has(FIELD_NAME_COMPARISON)) {
        builder.comparison(StructuredComparison.valueOf(logicalTypeNode.get(FIELD_NAME_COMPARISON).asText()));
    }
    if (logicalTypeNode.has(FIELD_NAME_SUPER_TYPE)) {
        final StructuredType superType = (StructuredType) deserialize(logicalTypeNode.get(FIELD_NAME_SUPER_TYPE), serdeContext);
        builder.superType(superType);
    }
    return builder.build();
}
Also used : ArrayList(java.util.ArrayList) StructuredAttribute(org.apache.flink.table.types.logical.StructuredType.StructuredAttribute) LogicalType(org.apache.flink.table.types.logical.LogicalType) JsonNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode) ArrayNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) StructuredType(org.apache.flink.table.types.logical.StructuredType)

Example 17 with StructuredType

use of org.apache.flink.table.types.logical.StructuredType in project flink by apache.

the class LogicalTypeUtils method toRowType.

/**
 * Converts any logical type to a row type. Composite types are converted to a row type. Atomic
 * types are wrapped into a field.
 */
public static RowType toRowType(LogicalType t) {
    switch(t.getTypeRoot()) {
        case ROW:
            return (RowType) t;
        case STRUCTURED_TYPE:
            final StructuredType structuredType = (StructuredType) t;
            final List<RowField> fields = structuredType.getAttributes().stream().map(attribute -> new RowField(attribute.getName(), attribute.getType(), attribute.getDescription().orElse(null))).collect(Collectors.toList());
            return new RowType(structuredType.isNullable(), fields);
        case DISTINCT_TYPE:
            return toRowType(((DistinctType) t).getSourceType());
        default:
            return RowType.of(t);
    }
}
Also used : RowField(org.apache.flink.table.types.logical.RowType.RowField) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) IntStream(java.util.stream.IntStream) RowData(org.apache.flink.table.data.RowData) TimestampData(org.apache.flink.table.data.TimestampData) MapData(org.apache.flink.table.data.MapData) StructuredType(org.apache.flink.table.types.logical.StructuredType) DecimalData(org.apache.flink.table.data.DecimalData) RowType(org.apache.flink.table.types.logical.RowType) Preconditions(org.apache.flink.util.Preconditions) Collectors(java.util.stream.Collectors) StringData(org.apache.flink.table.data.StringData) TimestampType(org.apache.flink.table.types.logical.TimestampType) ArrayData(org.apache.flink.table.data.ArrayData) List(java.util.List) DistinctType(org.apache.flink.table.types.logical.DistinctType) LogicalType(org.apache.flink.table.types.logical.LogicalType) RawValueData(org.apache.flink.table.data.RawValueData) RowField(org.apache.flink.table.types.logical.RowType.RowField) Internal(org.apache.flink.annotation.Internal) ZonedTimestampType(org.apache.flink.table.types.logical.ZonedTimestampType) RowType(org.apache.flink.table.types.logical.RowType) StructuredType(org.apache.flink.table.types.logical.StructuredType)

Example 18 with StructuredType

use of org.apache.flink.table.types.logical.StructuredType in project flink by apache.

the class StructuredToStringCastRule method generateCodeBlockInternal.

/* Example generated code for MyStructuredType in CastRulesTest:

    builder$287.setLength(0);
    builder$287.append("(");
    long f0Value$289 = -1L;
    boolean f0IsNull$290 = _myInput.isNullAt(0);
    if (!f0IsNull$290) {
        f0Value$289 = _myInput.getLong(0);
        isNull$2 = f0IsNull$290;
        if (!isNull$2) {
            result$3 = org.apache.flink.table.data.binary.BinaryStringData.fromString("" + f0Value$289);
            isNull$2 = result$3 == null;
        } else {
            result$3 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
        }
        builder$287.append("a=" + result$3);
    } else {
        builder$287.append("a=" + "NULL");
    }
    builder$287.append(", ");
    long f1Value$291 = -1L;
    boolean f1IsNull$292 = _myInput.isNullAt(1);
    if (!f1IsNull$292) {
        f1Value$291 = _myInput.getLong(1);
        isNull$4 = f1IsNull$292;
        if (!isNull$4) {
            result$5 = org.apache.flink.table.data.binary.BinaryStringData.fromString("" + f1Value$291);
            isNull$4 = result$5 == null;
        } else {
            result$5 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
        }
        builder$287.append("b=" + result$5);
    } else {
        builder$287.append("b=" + "NULL");
    }
    builder$287.append(", ");
    org.apache.flink.table.data.binary.BinaryStringData f2Value$293 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
    boolean f2IsNull$294 = _myInput.isNullAt(2);
    if (!f2IsNull$294) {
        f2Value$293 = ((org.apache.flink.table.data.binary.BinaryStringData) _myInput.getString(2));
        builder$287.append("c=" + f2Value$293);
    } else {
        builder$287.append("c=" + "NULL");
    }
    builder$287.append(", ");
    org.apache.flink.table.data.ArrayData f3Value$295 = null;
    boolean f3IsNull$296 = _myInput.isNullAt(3);
    if (!f3IsNull$296) {
        f3Value$295 = _myInput.getArray(3);
        isNull$6 = f3IsNull$296;
        if (!isNull$6) {
            builder$297.setLength(0);
            builder$297.append("[");
            for (int i$299 = 0; i$299 < f3Value$295.size(); i$299++) {
                if (i$299 != 0) {
                    builder$297.append(", ");
                }
                org.apache.flink.table.data.binary.BinaryStringData element$300 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
                boolean elementIsNull$301 = f3Value$295.isNullAt(i$299);
                if (!elementIsNull$301) {
                    element$300 = ((org.apache.flink.table.data.binary.BinaryStringData) f3Value$295.getString(i$299));
                    builder$297.append(element$300);
                } else {
                    builder$297.append("NULL");
                }
            }
            builder$297.append("]");
            java.lang.String resultString$298;
            resultString$298 = builder$297.toString();
            result$7 = org.apache.flink.table.data.binary.BinaryStringData.fromString(resultString$298);
            isNull$6 = result$7 == null;
        } else {
            result$7 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
        }
        builder$287.append("d=" + result$7);
    } else {
        builder$287.append("d=" + "NULL");
    }
    builder$287.append(")");
    java.lang.String resultString$288;
    resultString$288 = builder$287.toString();
    result$1 = org.apache.flink.table.data.binary.BinaryStringData.fromString(resultString$288);

     */
@Override
protected String generateCodeBlockInternal(CodeGeneratorCastRule.Context context, String inputTerm, String returnVariable, LogicalType inputLogicalType, LogicalType targetLogicalType) {
    StructuredType inputStructuredType = (StructuredType) inputLogicalType;
    final String builderTerm = newName("builder");
    context.declareClassField(className(StringBuilder.class), builderTerm, constructorCall(StringBuilder.class));
    final String resultStringTerm = newName("resultString");
    final int length = LogicalTypeChecks.getLength(targetLogicalType);
    final CastRuleUtils.CodeWriter writer = new CastRuleUtils.CodeWriter().stmt(methodCall(builderTerm, "setLength", 0)).stmt(methodCall(builderTerm, "append", strLiteral("(")));
    for (int i = 0; i < inputStructuredType.getAttributes().size(); i++) {
        final int fieldIndex = i;
        final StructuredType.StructuredAttribute attribute = inputStructuredType.getAttributes().get(fieldIndex);
        final String fieldTerm = newName("f" + fieldIndex + "Value");
        final String fieldIsNullTerm = newName("f" + fieldIndex + "IsNull");
        final CastCodeBlock codeBlock = // Null check is done at the row access level
        CastRuleProvider.generateAlwaysNonNullCodeBlock(context, fieldTerm, attribute.getType(), VarCharType.STRING_TYPE);
        // Write the comma
        if (fieldIndex != 0) {
            writer.stmt(methodCall(builderTerm, "append", strLiteral(", ")));
        }
        writer.declPrimitiveStmt(attribute.getType(), fieldTerm).declStmt(boolean.class, fieldIsNullTerm, methodCall(inputTerm, "isNullAt", fieldIndex)).ifStmt("!" + fieldIsNullTerm, thenBodyWriter -> thenBodyWriter.assignStmt(fieldTerm, CodeGenUtils.rowFieldReadAccess(fieldIndex, inputTerm, attribute.getType())).append(codeBlock).stmt(methodCall(builderTerm, "append", strLiteral(attribute.getName() + "=") + " + " + codeBlock.getReturnTerm())), elseBodyWriter -> elseBodyWriter.stmt(methodCall(builderTerm, "append", strLiteral(attribute.getName() + "=") + " + " + nullLiteral(context.legacyBehaviour()))));
    }
    writer.stmt(methodCall(builderTerm, "append", strLiteral(")")));
    return CharVarCharTrimPadCastRule.padAndTrimStringIfNeeded(writer, targetLogicalType, context.legacyBehaviour(), length, resultStringTerm, builderTerm).assignStmt(returnVariable, CastRuleUtils.staticCall(BINARY_STRING_DATA_FROM_STRING(), resultStringTerm)).toString();
}
Also used : StructuredType(org.apache.flink.table.types.logical.StructuredType)

Example 19 with StructuredType

use of org.apache.flink.table.types.logical.StructuredType in project flink by apache.

the class CommonPythonUtil method extractDataViewSpecs.

public static DataViewSpec[] extractDataViewSpecs(int index, DataType accType) {
    if (!(accType instanceof FieldsDataType)) {
        return new DataViewSpec[0];
    }
    FieldsDataType compositeAccType = (FieldsDataType) accType;
    if (includesDataView(compositeAccType)) {
        LogicalType logicalType = compositeAccType.getLogicalType();
        if (logicalType instanceof RowType) {
            List<DataType> childrenDataTypes = compositeAccType.getChildren();
            return IntStream.range(0, childrenDataTypes.size()).mapToObj(i -> {
                DataType childDataType = childrenDataTypes.get(i);
                LogicalType childLogicalType = childDataType.getLogicalType();
                if ((childLogicalType instanceof RowType) && includesDataView((FieldsDataType) childDataType)) {
                    throw new TableException("For Python AggregateFunction, DataView cannot be used in the" + " nested columns of the accumulator. ");
                } else if ((childLogicalType instanceof StructuredType) && ListView.class.isAssignableFrom(((StructuredType) childLogicalType).getImplementationClass().get())) {
                    return new ListViewSpec("agg" + index + "$" + ((RowType) logicalType).getFieldNames().get(i), i, childDataType.getChildren().get(0));
                } else if ((childLogicalType instanceof StructuredType) && MapView.class.isAssignableFrom(((StructuredType) childLogicalType).getImplementationClass().get())) {
                    return new MapViewSpec("agg" + index + "$" + ((RowType) logicalType).getFieldNames().get(i), i, childDataType.getChildren().get(0), false);
                }
                return null;
            }).filter(Objects::nonNull).toArray(DataViewSpec[]::new);
        } else {
            throw new TableException("For Python AggregateFunction you can only use DataView in " + "Row type.");
        }
    } else {
        return new DataViewSpec[0];
    }
}
Also used : FieldsDataType(org.apache.flink.table.types.FieldsDataType) TableException(org.apache.flink.table.api.TableException) MapViewSpec(org.apache.flink.table.runtime.dataview.MapViewSpec) DataViewSpec(org.apache.flink.table.runtime.dataview.DataViewSpec) ListViewSpec(org.apache.flink.table.runtime.dataview.ListViewSpec) LogicalType(org.apache.flink.table.types.logical.LogicalType) RowType(org.apache.flink.table.types.logical.RowType) DataType(org.apache.flink.table.types.DataType) FieldsDataType(org.apache.flink.table.types.FieldsDataType) MapView(org.apache.flink.table.api.dataview.MapView) StructuredType(org.apache.flink.table.types.logical.StructuredType)

Aggregations

StructuredType (org.apache.flink.table.types.logical.StructuredType)19 DataType (org.apache.flink.table.types.DataType)11 FieldsDataType (org.apache.flink.table.types.FieldsDataType)9 BigIntType (org.apache.flink.table.types.logical.BigIntType)7 IntType (org.apache.flink.table.types.logical.IntType)7 StructuredAttribute (org.apache.flink.table.types.logical.StructuredType.StructuredAttribute)7 LogicalType (org.apache.flink.table.types.logical.LogicalType)6 BooleanType (org.apache.flink.table.types.logical.BooleanType)4 ArrayList (java.util.ArrayList)3 TableException (org.apache.flink.table.api.TableException)3 Test (org.junit.Test)3 LocalDateTime (java.time.LocalDateTime)2 List (java.util.List)2 IntStream (java.util.stream.IntStream)2 Internal (org.apache.flink.annotation.Internal)2 RowData (org.apache.flink.table.data.RowData)2 MapType (org.apache.flink.table.types.logical.MapType)2 RowType (org.apache.flink.table.types.logical.RowType)2 Field (java.lang.reflect.Field)1 Method (java.lang.reflect.Method)1