use of org.apache.flink.table.types.logical.StructuredType in project flink by apache.
the class LogicalTypeJsonDeserializer method deserializeStructuredTypeFromPlan.
private static LogicalType deserializeStructuredTypeFromPlan(JsonNode logicalTypeNode, SerdeContext serdeContext) {
final ObjectIdentifier identifier;
if (logicalTypeNode.has(FIELD_NAME_OBJECT_IDENTIFIER)) {
identifier = ObjectIdentifierJsonDeserializer.deserialize(logicalTypeNode.get(FIELD_NAME_OBJECT_IDENTIFIER).asText(), serdeContext);
} else {
identifier = null;
}
final Class<?> implementationClass;
if (logicalTypeNode.has(FIELD_NAME_IMPLEMENTATION_CLASS)) {
implementationClass = loadClass(logicalTypeNode.get(FIELD_NAME_IMPLEMENTATION_CLASS).asText(), serdeContext, "structured type");
} else {
implementationClass = null;
}
final StructuredType.Builder builder;
if (identifier != null && implementationClass != null) {
builder = StructuredType.newBuilder(identifier, implementationClass);
} else if (identifier != null) {
builder = StructuredType.newBuilder(identifier);
} else {
builder = StructuredType.newBuilder(implementationClass);
}
if (logicalTypeNode.has(FIELD_NAME_DESCRIPTION)) {
builder.description(logicalTypeNode.get(FIELD_NAME_FIELD_DESCRIPTION).asText());
}
final ArrayNode attributeNodes = (ArrayNode) logicalTypeNode.get(FIELD_NAME_ATTRIBUTES);
final List<StructuredAttribute> attributes = new ArrayList<>();
for (JsonNode attributeNode : attributeNodes) {
final String attributeName = attributeNode.get(FIELD_NAME_ATTRIBUTE_NAME).asText();
final LogicalType attributeType = deserialize(attributeNode.get(FIELD_NAME_ATTRIBUTE_TYPE), serdeContext);
final String attributeDescription;
if (attributeNode.has(FIELD_NAME_ATTRIBUTE_DESCRIPTION)) {
attributeDescription = attributeNode.get(FIELD_NAME_ATTRIBUTE_DESCRIPTION).asText();
} else {
attributeDescription = null;
}
attributes.add(new StructuredAttribute(attributeName, attributeType, attributeDescription));
}
builder.attributes(attributes);
if (logicalTypeNode.has(FIELD_NAME_FINAL)) {
builder.setFinal(logicalTypeNode.get(FIELD_NAME_FINAL).asBoolean());
}
if (logicalTypeNode.has(FIELD_NAME_INSTANTIABLE)) {
builder.setInstantiable(logicalTypeNode.get(FIELD_NAME_INSTANTIABLE).asBoolean());
}
if (logicalTypeNode.has(FIELD_NAME_COMPARISON)) {
builder.comparison(StructuredComparison.valueOf(logicalTypeNode.get(FIELD_NAME_COMPARISON).asText()));
}
if (logicalTypeNode.has(FIELD_NAME_SUPER_TYPE)) {
final StructuredType superType = (StructuredType) deserialize(logicalTypeNode.get(FIELD_NAME_SUPER_TYPE), serdeContext);
builder.superType(superType);
}
return builder.build();
}
use of org.apache.flink.table.types.logical.StructuredType in project flink by apache.
the class LogicalTypeUtils method toRowType.
/**
* Converts any logical type to a row type. Composite types are converted to a row type. Atomic
* types are wrapped into a field.
*/
public static RowType toRowType(LogicalType t) {
switch(t.getTypeRoot()) {
case ROW:
return (RowType) t;
case STRUCTURED_TYPE:
final StructuredType structuredType = (StructuredType) t;
final List<RowField> fields = structuredType.getAttributes().stream().map(attribute -> new RowField(attribute.getName(), attribute.getType(), attribute.getDescription().orElse(null))).collect(Collectors.toList());
return new RowType(structuredType.isNullable(), fields);
case DISTINCT_TYPE:
return toRowType(((DistinctType) t).getSourceType());
default:
return RowType.of(t);
}
}
use of org.apache.flink.table.types.logical.StructuredType in project flink by apache.
the class StructuredToStringCastRule method generateCodeBlockInternal.
/* Example generated code for MyStructuredType in CastRulesTest:
builder$287.setLength(0);
builder$287.append("(");
long f0Value$289 = -1L;
boolean f0IsNull$290 = _myInput.isNullAt(0);
if (!f0IsNull$290) {
f0Value$289 = _myInput.getLong(0);
isNull$2 = f0IsNull$290;
if (!isNull$2) {
result$3 = org.apache.flink.table.data.binary.BinaryStringData.fromString("" + f0Value$289);
isNull$2 = result$3 == null;
} else {
result$3 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
}
builder$287.append("a=" + result$3);
} else {
builder$287.append("a=" + "NULL");
}
builder$287.append(", ");
long f1Value$291 = -1L;
boolean f1IsNull$292 = _myInput.isNullAt(1);
if (!f1IsNull$292) {
f1Value$291 = _myInput.getLong(1);
isNull$4 = f1IsNull$292;
if (!isNull$4) {
result$5 = org.apache.flink.table.data.binary.BinaryStringData.fromString("" + f1Value$291);
isNull$4 = result$5 == null;
} else {
result$5 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
}
builder$287.append("b=" + result$5);
} else {
builder$287.append("b=" + "NULL");
}
builder$287.append(", ");
org.apache.flink.table.data.binary.BinaryStringData f2Value$293 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
boolean f2IsNull$294 = _myInput.isNullAt(2);
if (!f2IsNull$294) {
f2Value$293 = ((org.apache.flink.table.data.binary.BinaryStringData) _myInput.getString(2));
builder$287.append("c=" + f2Value$293);
} else {
builder$287.append("c=" + "NULL");
}
builder$287.append(", ");
org.apache.flink.table.data.ArrayData f3Value$295 = null;
boolean f3IsNull$296 = _myInput.isNullAt(3);
if (!f3IsNull$296) {
f3Value$295 = _myInput.getArray(3);
isNull$6 = f3IsNull$296;
if (!isNull$6) {
builder$297.setLength(0);
builder$297.append("[");
for (int i$299 = 0; i$299 < f3Value$295.size(); i$299++) {
if (i$299 != 0) {
builder$297.append(", ");
}
org.apache.flink.table.data.binary.BinaryStringData element$300 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
boolean elementIsNull$301 = f3Value$295.isNullAt(i$299);
if (!elementIsNull$301) {
element$300 = ((org.apache.flink.table.data.binary.BinaryStringData) f3Value$295.getString(i$299));
builder$297.append(element$300);
} else {
builder$297.append("NULL");
}
}
builder$297.append("]");
java.lang.String resultString$298;
resultString$298 = builder$297.toString();
result$7 = org.apache.flink.table.data.binary.BinaryStringData.fromString(resultString$298);
isNull$6 = result$7 == null;
} else {
result$7 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
}
builder$287.append("d=" + result$7);
} else {
builder$287.append("d=" + "NULL");
}
builder$287.append(")");
java.lang.String resultString$288;
resultString$288 = builder$287.toString();
result$1 = org.apache.flink.table.data.binary.BinaryStringData.fromString(resultString$288);
*/
@Override
protected String generateCodeBlockInternal(CodeGeneratorCastRule.Context context, String inputTerm, String returnVariable, LogicalType inputLogicalType, LogicalType targetLogicalType) {
StructuredType inputStructuredType = (StructuredType) inputLogicalType;
final String builderTerm = newName("builder");
context.declareClassField(className(StringBuilder.class), builderTerm, constructorCall(StringBuilder.class));
final String resultStringTerm = newName("resultString");
final int length = LogicalTypeChecks.getLength(targetLogicalType);
final CastRuleUtils.CodeWriter writer = new CastRuleUtils.CodeWriter().stmt(methodCall(builderTerm, "setLength", 0)).stmt(methodCall(builderTerm, "append", strLiteral("(")));
for (int i = 0; i < inputStructuredType.getAttributes().size(); i++) {
final int fieldIndex = i;
final StructuredType.StructuredAttribute attribute = inputStructuredType.getAttributes().get(fieldIndex);
final String fieldTerm = newName("f" + fieldIndex + "Value");
final String fieldIsNullTerm = newName("f" + fieldIndex + "IsNull");
final CastCodeBlock codeBlock = // Null check is done at the row access level
CastRuleProvider.generateAlwaysNonNullCodeBlock(context, fieldTerm, attribute.getType(), VarCharType.STRING_TYPE);
// Write the comma
if (fieldIndex != 0) {
writer.stmt(methodCall(builderTerm, "append", strLiteral(", ")));
}
writer.declPrimitiveStmt(attribute.getType(), fieldTerm).declStmt(boolean.class, fieldIsNullTerm, methodCall(inputTerm, "isNullAt", fieldIndex)).ifStmt("!" + fieldIsNullTerm, thenBodyWriter -> thenBodyWriter.assignStmt(fieldTerm, CodeGenUtils.rowFieldReadAccess(fieldIndex, inputTerm, attribute.getType())).append(codeBlock).stmt(methodCall(builderTerm, "append", strLiteral(attribute.getName() + "=") + " + " + codeBlock.getReturnTerm())), elseBodyWriter -> elseBodyWriter.stmt(methodCall(builderTerm, "append", strLiteral(attribute.getName() + "=") + " + " + nullLiteral(context.legacyBehaviour()))));
}
writer.stmt(methodCall(builderTerm, "append", strLiteral(")")));
return CharVarCharTrimPadCastRule.padAndTrimStringIfNeeded(writer, targetLogicalType, context.legacyBehaviour(), length, resultStringTerm, builderTerm).assignStmt(returnVariable, CastRuleUtils.staticCall(BINARY_STRING_DATA_FROM_STRING(), resultStringTerm)).toString();
}
use of org.apache.flink.table.types.logical.StructuredType in project flink by apache.
the class CommonPythonUtil method extractDataViewSpecs.
public static DataViewSpec[] extractDataViewSpecs(int index, DataType accType) {
if (!(accType instanceof FieldsDataType)) {
return new DataViewSpec[0];
}
FieldsDataType compositeAccType = (FieldsDataType) accType;
if (includesDataView(compositeAccType)) {
LogicalType logicalType = compositeAccType.getLogicalType();
if (logicalType instanceof RowType) {
List<DataType> childrenDataTypes = compositeAccType.getChildren();
return IntStream.range(0, childrenDataTypes.size()).mapToObj(i -> {
DataType childDataType = childrenDataTypes.get(i);
LogicalType childLogicalType = childDataType.getLogicalType();
if ((childLogicalType instanceof RowType) && includesDataView((FieldsDataType) childDataType)) {
throw new TableException("For Python AggregateFunction, DataView cannot be used in the" + " nested columns of the accumulator. ");
} else if ((childLogicalType instanceof StructuredType) && ListView.class.isAssignableFrom(((StructuredType) childLogicalType).getImplementationClass().get())) {
return new ListViewSpec("agg" + index + "$" + ((RowType) logicalType).getFieldNames().get(i), i, childDataType.getChildren().get(0));
} else if ((childLogicalType instanceof StructuredType) && MapView.class.isAssignableFrom(((StructuredType) childLogicalType).getImplementationClass().get())) {
return new MapViewSpec("agg" + index + "$" + ((RowType) logicalType).getFieldNames().get(i), i, childDataType.getChildren().get(0), false);
}
return null;
}).filter(Objects::nonNull).toArray(DataViewSpec[]::new);
} else {
throw new TableException("For Python AggregateFunction you can only use DataView in " + "Row type.");
}
} else {
return new DataViewSpec[0];
}
}
Aggregations