use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class ExpressionConverter method visit.
@Override
public RexNode visit(ValueLiteralExpression valueLiteral) {
LogicalType type = fromDataTypeToLogicalType(valueLiteral.getOutputDataType());
RexBuilder rexBuilder = relBuilder.getRexBuilder();
FlinkTypeFactory typeFactory = (FlinkTypeFactory) relBuilder.getTypeFactory();
RelDataType relDataType = typeFactory.createFieldTypeFromLogicalType(type);
if (valueLiteral.isNull()) {
return rexBuilder.makeNullLiteral(relDataType);
}
Object value = null;
switch(type.getTypeRoot()) {
case DECIMAL:
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case FLOAT:
case DOUBLE:
value = extractValue(valueLiteral, BigDecimal.class);
break;
case VARCHAR:
case CHAR:
value = extractValue(valueLiteral, String.class);
break;
case BINARY:
case VARBINARY:
value = new ByteString(extractValue(valueLiteral, byte[].class));
break;
case INTERVAL_YEAR_MONTH:
// convert to total months
value = BigDecimal.valueOf(extractValue(valueLiteral, Period.class).toTotalMonths());
break;
case INTERVAL_DAY_TIME:
// TODO planner supports only milliseconds precision
// convert to total millis
value = BigDecimal.valueOf(extractValue(valueLiteral, Duration.class).toMillis());
break;
case DATE:
value = DateString.fromDaysSinceEpoch((int) extractValue(valueLiteral, LocalDate.class).toEpochDay());
break;
case TIME_WITHOUT_TIME_ZONE:
// TODO type factory strips the precision, for literals we can be more lenient
// already
// Moreover conversion from long supports precision up to TIME(3) planner does not
// support higher
// precisions
TimeType timeType = (TimeType) type;
int precision = timeType.getPrecision();
relDataType = typeFactory.createSqlType(SqlTypeName.TIME, Math.min(precision, 3));
value = TimeString.fromMillisOfDay(extractValue(valueLiteral, LocalTime.class).get(ChronoField.MILLI_OF_DAY));
break;
case TIMESTAMP_WITHOUT_TIME_ZONE:
LocalDateTime datetime = extractValue(valueLiteral, LocalDateTime.class);
value = fromLocalDateTime(datetime);
break;
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
// normalize to UTC
Instant instant = extractValue(valueLiteral, Instant.class);
value = fromLocalDateTime(instant.atOffset(ZoneOffset.UTC).toLocalDateTime());
break;
default:
value = extractValue(valueLiteral, Object.class);
if (value instanceof TimePointUnit) {
value = commonToCalcite((TimePointUnit) value);
} else if (value instanceof TimeIntervalUnit) {
value = commonToCalcite((TimeIntervalUnit) value);
}
break;
}
return rexBuilder.makeLiteral(value, relDataType, // RexBuilder#makeCast.
true);
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class ArrayToArrayCastRule method generateCodeBlockInternal.
@SuppressWarnings("rawtypes")
@Override
protected String generateCodeBlockInternal(CodeGeneratorCastRule.Context context, String inputTerm, String returnVariable, LogicalType inputLogicalType, LogicalType targetLogicalType) {
final LogicalType innerInputType = ((ArrayType) inputLogicalType).getElementType();
final LogicalType innerTargetType = ((ArrayType) targetLogicalType).getElementType();
final String innerTargetTypeTerm = arrayElementType(innerTargetType);
final String arraySize = methodCall(inputTerm, "size");
final String objArrayTerm = newName("objArray");
return new CastRuleUtils.CodeWriter().declStmt(innerTargetTypeTerm + "[]", objArrayTerm, newArray(innerTargetTypeTerm, arraySize)).forStmt(arraySize, (index, loopWriter) -> {
CastCodeBlock codeBlock = // Null check is done at the array access level
CastRuleProvider.generateAlwaysNonNullCodeBlock(context, rowFieldReadAccess(index, inputTerm, innerInputType), innerInputType, innerTargetType);
if (innerTargetType.isNullable()) {
loopWriter.ifStmt("!" + methodCall(inputTerm, "isNullAt", index), thenWriter -> thenWriter.append(codeBlock).assignArrayStmt(objArrayTerm, index, codeBlock.getReturnTerm()));
} else {
loopWriter.append(codeBlock).assignArrayStmt(objArrayTerm, index, codeBlock.getReturnTerm());
}
}).assignStmt(returnVariable, constructorCall(GenericArrayData.class, objArrayTerm)).toString();
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class EqualiserCodeGeneratorTest method testRaw.
@Test
public void testRaw() {
RecordEqualiser equaliser = new EqualiserCodeGenerator(new LogicalType[] { new TypeInformationRawType<>(Types.INT) }).generateRecordEqualiser("RAW").newInstance(Thread.currentThread().getContextClassLoader());
Function<RawValueData<?>, BinaryRowData> func = o -> {
BinaryRowData row = new BinaryRowData(1);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.writeRawValue(0, o, new RawValueDataSerializer<>(IntSerializer.INSTANCE));
writer.complete();
return row;
};
assertBoolean(equaliser, func, RawValueData.fromObject(1), RawValueData.fromObject(1), true);
assertBoolean(equaliser, func, RawValueData.fromObject(1), RawValueData.fromObject(2), false);
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class SortCodeGeneratorTest method testOneKey.
@Test
public void testOneKey() throws Exception {
for (int time = 0; time < 100; time++) {
Random rnd = new Random();
LogicalType[] fields = new LogicalType[rnd.nextInt(9) + 1];
for (int i = 0; i < fields.length; i++) {
fields[i] = types[rnd.nextInt(types.length)];
}
inputType = RowType.of(fields);
SortSpec.SortSpecBuilder builder = SortSpec.builder();
boolean order = rnd.nextBoolean();
builder.addField(0, order, SortUtil.getNullDefaultOrder(order));
sortSpec = builder.build();
testInner();
}
}
use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.
the class OperationConverterUtils method toTableColumn.
private static TableColumn toTableColumn(SqlTableColumn tableColumn, SqlValidator sqlValidator) {
if (!(tableColumn instanceof SqlRegularColumn)) {
throw new TableException("Only regular columns are supported for this operation yet.");
}
SqlRegularColumn regularColumn = (SqlRegularColumn) tableColumn;
String name = regularColumn.getName().getSimple();
SqlDataTypeSpec typeSpec = regularColumn.getType();
boolean nullable = typeSpec.getNullable() == null ? true : typeSpec.getNullable();
LogicalType logicalType = FlinkTypeFactory.toLogicalType(typeSpec.deriveType(sqlValidator, nullable));
DataType dataType = TypeConversions.fromLogicalToDataType(logicalType);
return TableColumn.physical(name, dataType);
}
Aggregations