Search in sources :

Example 11 with SqlIntervalQualifier

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIntervalQualifier in project flink by apache.

the class FlinkConvertletTable method convertTryCast.

// Slightly modified version of StandardConvertletTable::convertCast
private RexNode convertTryCast(SqlRexContext cx, final SqlCall call) {
    RelDataTypeFactory typeFactory = cx.getTypeFactory();
    final SqlNode leftNode = call.operand(0);
    final SqlNode rightNode = call.operand(1);
    final RexNode valueRex = cx.convertExpression(leftNode);
    RelDataType type;
    if (rightNode instanceof SqlIntervalQualifier) {
        type = typeFactory.createSqlIntervalType((SqlIntervalQualifier) rightNode);
    } else if (rightNode instanceof SqlDataTypeSpec) {
        SqlDataTypeSpec dataType = ((SqlDataTypeSpec) rightNode);
        type = dataType.deriveType(cx.getValidator());
        if (type == null) {
            type = cx.getValidator().getValidatedNodeType(dataType.getTypeName());
        }
    } else {
        throw new IllegalStateException("Invalid right argument type for TRY_CAST: " + rightNode);
    }
    type = typeFactory.createTypeWithNullability(type, true);
    if (SqlUtil.isNullLiteral(leftNode, false)) {
        final SqlValidatorImpl validator = (SqlValidatorImpl) cx.getValidator();
        validator.setValidatedNodeType(leftNode, type);
        return cx.convertExpression(leftNode);
    }
    return cx.getRexBuilder().makeCall(type, FlinkSqlOperatorTable.TRY_CAST, Collections.singletonList(valueRex));
}
Also used : SqlValidatorImpl(org.apache.calcite.sql.validate.SqlValidatorImpl) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) RelDataType(org.apache.calcite.rel.type.RelDataType) SqlDataTypeSpec(org.apache.calcite.sql.SqlDataTypeSpec) SqlNode(org.apache.calcite.sql.SqlNode) RexNode(org.apache.calcite.rex.RexNode)

Example 12 with SqlIntervalQualifier

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIntervalQualifier in project flink by apache.

the class DynamicTableSourceSpecSerdeTest method testDynamicTableSinkSpecSerde.

public static Stream<DynamicTableSourceSpec> testDynamicTableSinkSpecSerde() {
    Map<String, String> options1 = new HashMap<>();
    options1.put("connector", FileSystemTableFactory.IDENTIFIER);
    options1.put("format", TestCsvFormatFactory.IDENTIFIER);
    options1.put("path", "/tmp");
    final ResolvedSchema resolvedSchema1 = new ResolvedSchema(Collections.singletonList(Column.physical("a", DataTypes.BIGINT())), Collections.emptyList(), null);
    final CatalogTable catalogTable1 = CatalogTable.of(Schema.newBuilder().fromResolvedSchema(resolvedSchema1).build(), null, Collections.emptyList(), options1);
    DynamicTableSourceSpec spec1 = new DynamicTableSourceSpec(ContextResolvedTable.temporary(ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "MyTable"), new ResolvedCatalogTable(catalogTable1, resolvedSchema1)), null);
    Map<String, String> options2 = new HashMap<>();
    options2.put("connector", TestValuesTableFactory.IDENTIFIER);
    options2.put("disable-lookup", "true");
    options2.put("enable-watermark-push-down", "true");
    options2.put("filterable-fields", "b");
    options2.put("bounded", "false");
    options2.put("readable-metadata", "m1:INT, m2:STRING");
    final ResolvedSchema resolvedSchema2 = new ResolvedSchema(Arrays.asList(Column.physical("a", DataTypes.BIGINT()), Column.physical("b", DataTypes.INT()), Column.physical("c", DataTypes.STRING()), Column.physical("p", DataTypes.STRING()), Column.metadata("m1", DataTypes.INT(), null, false), Column.metadata("m2", DataTypes.STRING(), null, false), Column.physical("ts", DataTypes.TIMESTAMP(3))), Collections.emptyList(), null);
    final CatalogTable catalogTable2 = CatalogTable.of(Schema.newBuilder().fromResolvedSchema(resolvedSchema2).build(), null, Collections.emptyList(), options2);
    FlinkTypeFactory factory = FlinkTypeFactory.INSTANCE();
    RexBuilder rexBuilder = new RexBuilder(factory);
    DynamicTableSourceSpec spec2 = new DynamicTableSourceSpec(ContextResolvedTable.temporary(ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "MyTable"), new ResolvedCatalogTable(catalogTable2, resolvedSchema2)), Arrays.asList(new ProjectPushDownSpec(new int[][] { { 0 }, { 1 }, { 4 }, { 6 } }, RowType.of(new LogicalType[] { new BigIntType(), new IntType(), new IntType(), new TimestampType(3) }, new String[] { "a", "b", "m1", "ts" })), new ReadingMetadataSpec(Arrays.asList("m1", "m2"), RowType.of(new LogicalType[] { new BigIntType(), new IntType(), new IntType(), new TimestampType(3) }, new String[] { "a", "b", "m1", "ts" })), new FilterPushDownSpec(Collections.singletonList(// b >= 10
    rexBuilder.makeCall(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, rexBuilder.makeInputRef(factory.createSqlType(SqlTypeName.INTEGER), 1), rexBuilder.makeExactLiteral(new BigDecimal(10))))), new WatermarkPushDownSpec(rexBuilder.makeCall(SqlStdOperatorTable.MINUS, rexBuilder.makeInputRef(factory.createSqlType(SqlTypeName.TIMESTAMP, 3), 3), rexBuilder.makeIntervalLiteral(BigDecimal.valueOf(1000), new SqlIntervalQualifier(TimeUnit.SECOND, 2, TimeUnit.SECOND, 6, SqlParserPos.ZERO))), 5000, RowType.of(new BigIntType(), new IntType(), new IntType(), new TimestampType(false, TimestampKind.ROWTIME, 3))), new SourceWatermarkSpec(true, RowType.of(new BigIntType(), new IntType(), new IntType(), new TimestampType(false, TimestampKind.ROWTIME, 3))), new LimitPushDownSpec(100), new PartitionPushDownSpec(Arrays.asList(new HashMap<String, String>() {

        {
            put("p", "A");
        }
    }, new HashMap<String, String>() {

        {
            put("p", "B");
        }
    }))));
    return Stream.of(spec1, spec2);
}
Also used : WatermarkPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.WatermarkPushDownSpec) HashMap(java.util.HashMap) ProjectPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.ProjectPushDownSpec) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) DynamicTableSourceSpec(org.apache.flink.table.planner.plan.nodes.exec.spec.DynamicTableSourceSpec) LogicalType(org.apache.flink.table.types.logical.LogicalType) BigIntType(org.apache.flink.table.types.logical.BigIntType) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) ReadingMetadataSpec(org.apache.flink.table.planner.plan.abilities.source.ReadingMetadataSpec) BigDecimal(java.math.BigDecimal) IntType(org.apache.flink.table.types.logical.IntType) BigIntType(org.apache.flink.table.types.logical.BigIntType) SourceWatermarkSpec(org.apache.flink.table.planner.plan.abilities.source.SourceWatermarkSpec) LimitPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.LimitPushDownSpec) PartitionPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.PartitionPushDownSpec) FilterPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.FilterPushDownSpec) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) RexBuilder(org.apache.calcite.rex.RexBuilder) TimestampType(org.apache.flink.table.types.logical.TimestampType) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema)

Example 13 with SqlIntervalQualifier

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIntervalQualifier in project druid by druid-io.

the class ExpressionsTest method testTimeMinusDayTimeInterval.

@Test
public void testTimeMinusDayTimeInterval() {
    final Period period = new Period("P1DT1H1M");
    testHelper.testExpression(SqlTypeName.TIMESTAMP, SqlStdOperatorTable.MINUS_DATE, ImmutableList.of(testHelper.makeInputRef("t"), testHelper.makeLiteral(// DAY-TIME literals value is millis
    new BigDecimal(period.toStandardDuration().getMillis()), new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.MINUTE, SqlParserPos.ZERO))), DruidExpression.ofExpression(ColumnType.LONG, (args) -> "(" + args.get(0).getExpression() + " - " + args.get(1).getExpression() + ")", ImmutableList.of(DruidExpression.ofColumn(ColumnType.LONG, "t"), // RexNode type of "interval day to minute" is not converted to druid long... yet
    DruidExpression.ofLiteral(null, "90060000"))), DateTimes.of("2000-02-03T04:05:06").minus(period).getMillis());
}
Also used : RPadOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.RPadOperatorConversion) SqlTrimFunction(org.apache.calcite.sql.fun.SqlTrimFunction) TimeUnit(org.apache.calcite.avatica.util.TimeUnit) ReverseOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.ReverseOperatorConversion) BigDecimal(java.math.BigDecimal) SearchQueryDimFilter(org.apache.druid.query.filter.SearchQueryDimFilter) TimeParseOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.TimeParseOperatorConversion) Map(java.util.Map) LeftOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.LeftOperatorConversion) RoundOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.RoundOperatorConversion) IAE(org.apache.druid.java.util.common.IAE) ContainsOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.ContainsOperatorConversion) TimeFloorOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.TimeFloorOperatorConversion) StringFormatOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.StringFormatOperatorConversion) DateTimes(org.apache.druid.java.util.common.DateTimes) RegexDimFilter(org.apache.druid.query.filter.RegexDimFilter) ImmutableMap(com.google.common.collect.ImmutableMap) RegexpExtractOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.RegexpExtractOperatorConversion) TestExprMacroTable(org.apache.druid.query.expression.TestExprMacroTable) RegexDimExtractionFn(org.apache.druid.query.extraction.RegexDimExtractionFn) TimeCeilOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.TimeCeilOperatorConversion) SqlStdOperatorTable(org.apache.calcite.sql.fun.SqlStdOperatorTable) TruncateOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.TruncateOperatorConversion) RegexpLikeOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.RegexpLikeOperatorConversion) RightOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.RightOperatorConversion) DateTruncOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.DateTruncOperatorConversion) RepeatOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.RepeatOperatorConversion) TimeUnitRange(org.apache.calcite.avatica.util.TimeUnitRange) ImmutableList(com.google.common.collect.ImmutableList) TimeExtractOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.TimeExtractOperatorConversion) ExpressionVirtualColumn(org.apache.druid.segment.virtual.ExpressionVirtualColumn) HumanReadableFormatOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.HumanReadableFormatOperatorConversion) Before(org.junit.Before) TimeShiftOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.TimeShiftOperatorConversion) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) Period(org.joda.time.Period) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) TimeFormatOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.TimeFormatOperatorConversion) LPadOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.LPadOperatorConversion) Test(org.junit.Test) StrposOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.StrposOperatorConversion) SqlFunction(org.apache.calcite.sql.SqlFunction) NullHandling(org.apache.druid.common.config.NullHandling) RowSignature(org.apache.druid.segment.column.RowSignature) ParseLongOperatorConversion(org.apache.druid.sql.calcite.expression.builtin.ParseLongOperatorConversion) ColumnType(org.apache.druid.segment.column.ColumnType) ContainsSearchQuerySpec(org.apache.druid.query.search.ContainsSearchQuerySpec) Collections(java.util.Collections) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) Period(org.joda.time.Period) BigDecimal(java.math.BigDecimal) Test(org.junit.Test)

Example 14 with SqlIntervalQualifier

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIntervalQualifier in project druid by druid-io.

the class ExpressionsTest method testTimePlusYearMonthInterval.

@Test
public void testTimePlusYearMonthInterval() {
    final Period period = new Period("P1Y1M");
    testHelper.testExpressionString(SqlStdOperatorTable.DATETIME_PLUS, ImmutableList.of(testHelper.makeInputRef("t"), testHelper.makeLiteral(// YEAR-MONTH literals value is months
    new BigDecimal(13), new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, SqlParserPos.ZERO))), makeExpression(ColumnType.LONG, "timestamp_shift(\"t\",'P13M',1,'UTC')"), DateTimes.of("2000-02-03T04:05:06").plus(period).getMillis());
}
Also used : SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) Period(org.joda.time.Period) BigDecimal(java.math.BigDecimal) Test(org.junit.Test)

Example 15 with SqlIntervalQualifier

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIntervalQualifier in project druid by druid-io.

the class LeastExpressionTest method testInvalidType.

@Test
public void testInvalidType() {
    expectException(IllegalArgumentException.class, "Argument 0 has invalid type: INTERVAL_YEAR_MONTH");
    testExpression(Collections.singletonList(testHelper.makeLiteral(// YEAR-MONTH literals value is months
    new BigDecimal(13), new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, SqlParserPos.ZERO))), null, null);
}
Also used : SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) BigDecimal(java.math.BigDecimal) Test(org.junit.Test)

Aggregations

SqlIntervalQualifier (org.apache.calcite.sql.SqlIntervalQualifier)31 BigDecimal (java.math.BigDecimal)24 RelDataType (org.apache.calcite.rel.type.RelDataType)13 SqlParserPos (org.apache.calcite.sql.parser.SqlParserPos)12 Test (org.junit.Test)8 HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)6 RelDataTypeFactory (org.apache.calcite.rel.type.RelDataTypeFactory)5 Period (org.joda.time.Period)5 Calendar (java.util.Calendar)4 RexBuilder (org.apache.calcite.rex.RexBuilder)4 RexNode (org.apache.calcite.rex.RexNode)4 SqlIntervalLiteral (org.apache.calcite.sql.SqlIntervalLiteral)4 SqlNode (org.apache.calcite.sql.SqlNode)4 TimestampString (org.apache.calcite.util.TimestampString)4 BigInteger (java.math.BigInteger)3 BitString (org.apache.calcite.util.BitString)3 DateString (org.apache.calcite.util.DateString)3 GregorianCalendar (java.util.GregorianCalendar)2 TimeUnit (org.apache.calcite.avatica.util.TimeUnit)2 RexLiteral (org.apache.calcite.rex.RexLiteral)2