Search in sources :

Example 91 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class ExpressionConverter method visit.

@Override
public RexNode visit(ValueLiteralExpression valueLiteral) {
    LogicalType type = fromDataTypeToLogicalType(valueLiteral.getOutputDataType());
    RexBuilder rexBuilder = relBuilder.getRexBuilder();
    FlinkTypeFactory typeFactory = (FlinkTypeFactory) relBuilder.getTypeFactory();
    RelDataType relDataType = typeFactory.createFieldTypeFromLogicalType(type);
    if (valueLiteral.isNull()) {
        return rexBuilder.makeNullLiteral(relDataType);
    }
    Object value = null;
    switch(type.getTypeRoot()) {
        case DECIMAL:
        case TINYINT:
        case SMALLINT:
        case INTEGER:
        case BIGINT:
        case FLOAT:
        case DOUBLE:
            value = extractValue(valueLiteral, BigDecimal.class);
            break;
        case VARCHAR:
        case CHAR:
            value = extractValue(valueLiteral, String.class);
            break;
        case BINARY:
        case VARBINARY:
            value = new ByteString(extractValue(valueLiteral, byte[].class));
            break;
        case INTERVAL_YEAR_MONTH:
            // convert to total months
            value = BigDecimal.valueOf(extractValue(valueLiteral, Period.class).toTotalMonths());
            break;
        case INTERVAL_DAY_TIME:
            // TODO planner supports only milliseconds precision
            // convert to total millis
            value = BigDecimal.valueOf(extractValue(valueLiteral, Duration.class).toMillis());
            break;
        case DATE:
            value = DateString.fromDaysSinceEpoch((int) extractValue(valueLiteral, LocalDate.class).toEpochDay());
            break;
        case TIME_WITHOUT_TIME_ZONE:
            // TODO type factory strips the precision, for literals we can be more lenient
            // already
            // Moreover conversion from long supports precision up to TIME(3) planner does not
            // support higher
            // precisions
            TimeType timeType = (TimeType) type;
            int precision = timeType.getPrecision();
            relDataType = typeFactory.createSqlType(SqlTypeName.TIME, Math.min(precision, 3));
            value = TimeString.fromMillisOfDay(extractValue(valueLiteral, LocalTime.class).get(ChronoField.MILLI_OF_DAY));
            break;
        case TIMESTAMP_WITHOUT_TIME_ZONE:
            LocalDateTime datetime = extractValue(valueLiteral, LocalDateTime.class);
            value = fromLocalDateTime(datetime);
            break;
        case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
            // normalize to UTC
            Instant instant = extractValue(valueLiteral, Instant.class);
            value = fromLocalDateTime(instant.atOffset(ZoneOffset.UTC).toLocalDateTime());
            break;
        default:
            value = extractValue(valueLiteral, Object.class);
            if (value instanceof TimePointUnit) {
                value = commonToCalcite((TimePointUnit) value);
            } else if (value instanceof TimeIntervalUnit) {
                value = commonToCalcite((TimeIntervalUnit) value);
            }
            break;
    }
    return rexBuilder.makeLiteral(value, relDataType, // RexBuilder#makeCast.
    true);
}
Also used : LocalDateTime(java.time.LocalDateTime) TimestampStringUtils.fromLocalDateTime(org.apache.flink.table.planner.utils.TimestampStringUtils.fromLocalDateTime) TimeIntervalUnit(org.apache.flink.table.expressions.TimeIntervalUnit) LocalTime(java.time.LocalTime) ByteString(org.apache.calcite.avatica.util.ByteString) Instant(java.time.Instant) LogicalTypeDataTypeConverter.fromDataTypeToLogicalType(org.apache.flink.table.runtime.types.LogicalTypeDataTypeConverter.fromDataTypeToLogicalType) LogicalType(org.apache.flink.table.types.logical.LogicalType) Period(java.time.Period) RelDataType(org.apache.calcite.rel.type.RelDataType) Duration(java.time.Duration) TimeString(org.apache.calcite.util.TimeString) DateString(org.apache.calcite.util.DateString) ByteString(org.apache.calcite.avatica.util.ByteString) BigDecimal(java.math.BigDecimal) TimeType(org.apache.flink.table.types.logical.TimeType) TimePointUnit(org.apache.flink.table.expressions.TimePointUnit) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) RexBuilder(org.apache.calcite.rex.RexBuilder)

Example 92 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class ArrayToArrayCastRule method generateCodeBlockInternal.

@SuppressWarnings("rawtypes")
@Override
protected String generateCodeBlockInternal(CodeGeneratorCastRule.Context context, String inputTerm, String returnVariable, LogicalType inputLogicalType, LogicalType targetLogicalType) {
    final LogicalType innerInputType = ((ArrayType) inputLogicalType).getElementType();
    final LogicalType innerTargetType = ((ArrayType) targetLogicalType).getElementType();
    final String innerTargetTypeTerm = arrayElementType(innerTargetType);
    final String arraySize = methodCall(inputTerm, "size");
    final String objArrayTerm = newName("objArray");
    return new CastRuleUtils.CodeWriter().declStmt(innerTargetTypeTerm + "[]", objArrayTerm, newArray(innerTargetTypeTerm, arraySize)).forStmt(arraySize, (index, loopWriter) -> {
        CastCodeBlock codeBlock = // Null check is done at the array access level
        CastRuleProvider.generateAlwaysNonNullCodeBlock(context, rowFieldReadAccess(index, inputTerm, innerInputType), innerInputType, innerTargetType);
        if (innerTargetType.isNullable()) {
            loopWriter.ifStmt("!" + methodCall(inputTerm, "isNullAt", index), thenWriter -> thenWriter.append(codeBlock).assignArrayStmt(objArrayTerm, index, codeBlock.getReturnTerm()));
        } else {
            loopWriter.append(codeBlock).assignArrayStmt(objArrayTerm, index, codeBlock.getReturnTerm());
        }
    }).assignStmt(returnVariable, constructorCall(GenericArrayData.class, objArrayTerm)).toString();
}
Also used : ArrayType(org.apache.flink.table.types.logical.ArrayType) LogicalType(org.apache.flink.table.types.logical.LogicalType)

Example 93 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class EqualiserCodeGeneratorTest method testRaw.

@Test
public void testRaw() {
    RecordEqualiser equaliser = new EqualiserCodeGenerator(new LogicalType[] { new TypeInformationRawType<>(Types.INT) }).generateRecordEqualiser("RAW").newInstance(Thread.currentThread().getContextClassLoader());
    Function<RawValueData<?>, BinaryRowData> func = o -> {
        BinaryRowData row = new BinaryRowData(1);
        BinaryRowWriter writer = new BinaryRowWriter(row);
        writer.writeRawValue(0, o, new RawValueDataSerializer<>(IntSerializer.INSTANCE));
        writer.complete();
        return row;
    };
    assertBoolean(equaliser, func, RawValueData.fromObject(1), RawValueData.fromObject(1), true);
    assertBoolean(equaliser, func, RawValueData.fromObject(1), RawValueData.fromObject(2), false);
}
Also used : Types(org.apache.flink.api.common.typeinfo.Types) TypeInformationRawType(org.apache.flink.table.types.logical.TypeInformationRawType) IntStream(java.util.stream.IntStream) RecordEqualiser(org.apache.flink.table.runtime.generated.RecordEqualiser) TimestampData(org.apache.flink.table.data.TimestampData) Assert.assertTrue(org.junit.Assert.assertTrue) VarCharType(org.apache.flink.table.types.logical.VarCharType) Test(org.junit.Test) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) Function(java.util.function.Function) TimestampData.fromEpochMillis(org.apache.flink.table.data.TimestampData.fromEpochMillis) IntSerializer(org.apache.flink.api.common.typeutils.base.IntSerializer) StringData(org.apache.flink.table.data.StringData) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) TimestampType(org.apache.flink.table.types.logical.TimestampType) RawValueDataSerializer(org.apache.flink.table.runtime.typeutils.RawValueDataSerializer) GenericRowData(org.apache.flink.table.data.GenericRowData) LogicalType(org.apache.flink.table.types.logical.LogicalType) RawValueData(org.apache.flink.table.data.RawValueData) Assert(org.junit.Assert) RecordEqualiser(org.apache.flink.table.runtime.generated.RecordEqualiser) RawValueDataSerializer(org.apache.flink.table.runtime.typeutils.RawValueDataSerializer) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) RawValueData(org.apache.flink.table.data.RawValueData) Test(org.junit.Test)

Example 94 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class SortCodeGeneratorTest method testOneKey.

@Test
public void testOneKey() throws Exception {
    for (int time = 0; time < 100; time++) {
        Random rnd = new Random();
        LogicalType[] fields = new LogicalType[rnd.nextInt(9) + 1];
        for (int i = 0; i < fields.length; i++) {
            fields[i] = types[rnd.nextInt(types.length)];
        }
        inputType = RowType.of(fields);
        SortSpec.SortSpecBuilder builder = SortSpec.builder();
        boolean order = rnd.nextBoolean();
        builder.addField(0, order, SortUtil.getNullDefaultOrder(order));
        sortSpec = builder.build();
        testInner();
    }
}
Also used : Random(java.util.Random) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) LogicalType(org.apache.flink.table.types.logical.LogicalType) SortSpec(org.apache.flink.table.planner.plan.nodes.exec.spec.SortSpec) Test(org.junit.Test)

Example 95 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class OperationConverterUtils method toTableColumn.

private static TableColumn toTableColumn(SqlTableColumn tableColumn, SqlValidator sqlValidator) {
    if (!(tableColumn instanceof SqlRegularColumn)) {
        throw new TableException("Only regular columns are supported for this operation yet.");
    }
    SqlRegularColumn regularColumn = (SqlRegularColumn) tableColumn;
    String name = regularColumn.getName().getSimple();
    SqlDataTypeSpec typeSpec = regularColumn.getType();
    boolean nullable = typeSpec.getNullable() == null ? true : typeSpec.getNullable();
    LogicalType logicalType = FlinkTypeFactory.toLogicalType(typeSpec.deriveType(sqlValidator, nullable));
    DataType dataType = TypeConversions.fromLogicalToDataType(logicalType);
    return TableColumn.physical(name, dataType);
}
Also used : TableException(org.apache.flink.table.api.TableException) SqlRegularColumn(org.apache.flink.sql.parser.ddl.SqlTableColumn.SqlRegularColumn) LogicalType(org.apache.flink.table.types.logical.LogicalType) DataType(org.apache.flink.table.types.DataType) SqlDataTypeSpec(org.apache.calcite.sql.SqlDataTypeSpec)

Aggregations

LogicalType (org.apache.flink.table.types.logical.LogicalType)192 DataType (org.apache.flink.table.types.DataType)53 RowType (org.apache.flink.table.types.logical.RowType)53 RowData (org.apache.flink.table.data.RowData)45 List (java.util.List)29 ArrayList (java.util.ArrayList)28 TableException (org.apache.flink.table.api.TableException)25 TimestampType (org.apache.flink.table.types.logical.TimestampType)25 Internal (org.apache.flink.annotation.Internal)21 IntType (org.apache.flink.table.types.logical.IntType)21 Map (java.util.Map)20 ValidationException (org.apache.flink.table.api.ValidationException)20 ArrayType (org.apache.flink.table.types.logical.ArrayType)19 DecimalType (org.apache.flink.table.types.logical.DecimalType)19 LocalZonedTimestampType (org.apache.flink.table.types.logical.LocalZonedTimestampType)17 Test (org.junit.Test)17 BigIntType (org.apache.flink.table.types.logical.BigIntType)16 LegacyTypeInformationType (org.apache.flink.table.types.logical.LegacyTypeInformationType)16 GenericRowData (org.apache.flink.table.data.GenericRowData)15 Arrays (java.util.Arrays)14