use of org.apache.flink.table.types.logical.LogicalTypeRoot.ROW in project flink by apache.
the class DataTypeUtils method stripRowPrefix.
/**
* Removes a string prefix from the fields of the given row data type.
*/
public static DataType stripRowPrefix(DataType dataType, String prefix) {
Preconditions.checkArgument(dataType.getLogicalType().is(ROW), "Row data type expected.");
final RowType rowType = (RowType) dataType.getLogicalType();
final List<String> newFieldNames = rowType.getFieldNames().stream().map(s -> {
if (s.startsWith(prefix)) {
return s.substring(prefix.length());
}
return s;
}).collect(Collectors.toList());
final LogicalType newRowType = LogicalTypeUtils.renameRowFields(rowType, newFieldNames);
return new FieldsDataType(newRowType, dataType.getConversionClass(), dataType.getChildren());
}
use of org.apache.flink.table.types.logical.LogicalTypeRoot.ROW in project flink by apache.
the class ValuesOperationFactory method findCommonTypeAtPosition.
private DataType findCommonTypeAtPosition(List<List<ResolvedExpression>> resolvedRows, int i) {
List<LogicalType> typesAtIPosition = extractLogicalTypesAtPosition(resolvedRows, i);
LogicalType logicalType = LogicalTypeMerging.findCommonType(typesAtIPosition).orElseThrow(() -> {
Set<DataType> columnTypes = resolvedRows.stream().map(row -> row.get(i).getOutputDataType()).collect(Collectors.toCollection(LinkedHashSet::new));
return new ValidationException(String.format("Types in fromValues(...) must have a common super type. Could not find a common type" + " for all rows at column %d.\n" + "Could not find a common super type for types: %s", i, columnTypes));
});
return TypeConversions.fromLogicalToDataType(logicalType);
}
use of org.apache.flink.table.types.logical.LogicalTypeRoot.ROW in project flink by apache.
the class ValuesOperationFactory method create.
/**
* Creates a valid {@link ValuesQueryOperation} operation.
*
* <p>It derives a row type based on {@link LogicalTypeMerging}. It flattens any row
* constructors. It does not flatten ROWs which are a result of e.g. a function call.
*
* <p>The resulting schema can be provided manually. If it is not, the schema will be
* automatically derived from the types of the expressions.
*/
QueryOperation create(@Nullable ResolvedSchema expectedSchema, List<ResolvedExpression> resolvedExpressions, ExpressionResolver.PostResolverFactory postResolverFactory) {
List<List<ResolvedExpression>> resolvedRows = unwrapFromRowConstructor(resolvedExpressions);
if (expectedSchema != null) {
verifyAllSameSize(resolvedRows, expectedSchema.getColumnCount());
}
ResolvedSchema schema = Optional.ofNullable(expectedSchema).orElseGet(() -> extractSchema(resolvedRows));
List<List<ResolvedExpression>> castedExpressions = resolvedRows.stream().map(row -> convertTopLevelExpressionToExpectedRowType(postResolverFactory, schema.getColumnDataTypes(), row)).collect(Collectors.toList());
return new ValuesQueryOperation(castedExpressions, schema);
}
use of org.apache.flink.table.types.logical.LogicalTypeRoot.ROW in project flink by apache.
the class DataViewUtils method extractDataViews.
/**
* Searches for data views in the data type of an accumulator and extracts them.
*/
public static List<DataViewSpec> extractDataViews(int aggIndex, DataType accumulatorDataType) {
final LogicalType accumulatorType = accumulatorDataType.getLogicalType();
if (!accumulatorType.is(ROW) && !accumulatorType.is(STRUCTURED_TYPE)) {
return Collections.emptyList();
}
final List<String> fieldNames = getFieldNames(accumulatorType);
final List<DataType> fieldDataTypes = accumulatorDataType.getChildren();
final List<DataViewSpec> specs = new ArrayList<>();
for (int fieldIndex = 0; fieldIndex < fieldDataTypes.size(); fieldIndex++) {
final DataType fieldDataType = fieldDataTypes.get(fieldIndex);
final LogicalType fieldType = fieldDataType.getLogicalType();
if (isDataView(fieldType, ListView.class)) {
specs.add(new ListViewSpec(createStateId(aggIndex, fieldNames.get(fieldIndex)), fieldIndex, fieldDataType.getChildren().get(0)));
} else if (isDataView(fieldType, MapView.class)) {
specs.add(new MapViewSpec(createStateId(aggIndex, fieldNames.get(fieldIndex)), fieldIndex, fieldDataType.getChildren().get(0), false));
}
if (fieldType.getChildren().stream().anyMatch(c -> hasNested(c, t -> isDataView(t, DataView.class)))) {
throw new TableException("Data views are only supported in the first level of a composite accumulator type.");
}
}
return specs;
}
Aggregations