Search in sources :

Example 6 with SqlIntervalQualifier

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIntervalQualifier in project flink by apache.

the class HiveParserRexNodeConverter method convertConstant.

public static RexNode convertConstant(ExprNodeConstantDesc literal, RelOptCluster cluster) throws SemanticException {
    RexBuilder rexBuilder = cluster.getRexBuilder();
    RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
    PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo();
    RelDataType calciteDataType = HiveParserTypeConverter.convert(hiveType, dtFactory);
    PrimitiveObjectInspector.PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory();
    ConstantObjectInspector coi = literal.getWritableObjectInspector();
    Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi);
    RexNode calciteLiteral;
    HiveShim hiveShim = HiveParserUtils.getSessionHiveShim();
    // If value is null, the type should also be VOID.
    if (value == null) {
        hiveTypeCategory = PrimitiveObjectInspector.PrimitiveCategory.VOID;
    }
    // TODO: Verify if we need to use ConstantObjectInspector to unwrap data
    switch(hiveTypeCategory) {
        case BOOLEAN:
            calciteLiteral = rexBuilder.makeLiteral((Boolean) value);
            break;
        case BYTE:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType);
            break;
        case SHORT:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType);
            break;
        case INT:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value));
            break;
        case LONG:
            calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value));
            break;
        // TODO: is Decimal an exact numeric or approximate numeric?
        case DECIMAL:
            if (value instanceof HiveDecimal) {
                value = ((HiveDecimal) value).bigDecimalValue();
            } else if (value instanceof Decimal128) {
                value = ((Decimal128) value).toBigDecimal();
            }
            if (value == null) {
                // For now, we will not run CBO in the presence of invalid decimal literals.
                throw new SemanticException("Expression " + literal.getExprString() + " is not a valid decimal");
            // TODO: return createNullLiteral(literal);
            }
            BigDecimal bd = (BigDecimal) value;
            BigInteger unscaled = bd.unscaledValue();
            if (unscaled.compareTo(MIN_LONG_BI) >= 0 && unscaled.compareTo(MAX_LONG_BI) <= 0) {
                calciteLiteral = rexBuilder.makeExactLiteral(bd);
            } else {
                // CBO doesn't support unlimited precision decimals. In practice, this
                // will work...
                // An alternative would be to throw CboSemanticException and fall back
                // to no CBO.
                RelDataType relType = cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, unscaled.toString().length(), bd.scale());
                calciteLiteral = rexBuilder.makeExactLiteral(bd, relType);
            }
            break;
        case FLOAT:
            calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Float.toString((Float) value)), calciteDataType);
            break;
        case DOUBLE:
            // TODO: The best solution is to support NaN in expression reduction.
            if (Double.isNaN((Double) value)) {
                throw new SemanticException("NaN");
            }
            calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Double.toString((Double) value)), calciteDataType);
            break;
        case CHAR:
            if (value instanceof HiveChar) {
                value = ((HiveChar) value).getValue();
            }
            calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
            break;
        case VARCHAR:
            if (value instanceof HiveVarchar) {
                value = ((HiveVarchar) value).getValue();
            }
            calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
            break;
        case STRING:
            Object constantDescVal = literal.getValue();
            constantDescVal = constantDescVal instanceof NlsString ? constantDescVal : asUnicodeString((String) value);
            // calcite treat string literal as char type, we should treat it as string just like
            // hive
            RelDataType type = HiveParserTypeConverter.convert(hiveType, dtFactory);
            // if we get here, the value is not null
            type = dtFactory.createTypeWithNullability(type, false);
            calciteLiteral = rexBuilder.makeLiteral(constantDescVal, type, true);
            break;
        case DATE:
            LocalDate localDate = HiveParserUtils.getSessionHiveShim().toFlinkDate(value);
            DateString dateString = new DateString(localDate.getYear(), localDate.getMonthValue(), localDate.getDayOfMonth());
            calciteLiteral = rexBuilder.makeDateLiteral(dateString);
            break;
        case TIMESTAMP:
            TimestampString timestampString;
            if (value instanceof Calendar) {
                timestampString = TimestampString.fromCalendarFields((Calendar) value);
            } else {
                LocalDateTime localDateTime = HiveParserUtils.getSessionHiveShim().toFlinkTimestamp(value);
                timestampString = new TimestampString(localDateTime.getYear(), localDateTime.getMonthValue(), localDateTime.getDayOfMonth(), localDateTime.getHour(), localDateTime.getMinute(), localDateTime.getSecond());
                timestampString = timestampString.withNanos(localDateTime.getNano());
            }
            // hive always treats timestamp with precision 9
            calciteLiteral = rexBuilder.makeTimestampLiteral(timestampString, 9);
            break;
        case VOID:
            calciteLiteral = cluster.getRexBuilder().makeLiteral(null, dtFactory.createSqlType(SqlTypeName.NULL), true);
            break;
        case BINARY:
        case UNKNOWN:
        default:
            if (hiveShim.isIntervalYearMonthType(hiveTypeCategory)) {
                // Calcite year-month literal value is months as BigDecimal
                BigDecimal totalMonths = BigDecimal.valueOf(((HiveParserIntervalYearMonth) value).getTotalMonths());
                calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths, new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
            } else if (hiveShim.isIntervalDayTimeType(hiveTypeCategory)) {
                // Calcite day-time interval is millis value as BigDecimal
                // Seconds converted to millis
                BigDecimal secsValueBd = BigDecimal.valueOf(((HiveParserIntervalDayTime) value).getTotalSeconds() * 1000);
                // Nanos converted to millis
                BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveParserIntervalDayTime) value).getNanos(), 6);
                calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd), new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, new SqlParserPos(1, 1)));
            } else {
                throw new RuntimeException("UnSupported Literal type " + hiveTypeCategory);
            }
    }
    return calciteLiteral;
}
Also used : LocalDateTime(java.time.LocalDateTime) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) RelDataType(org.apache.calcite.rel.type.RelDataType) LocalDate(java.time.LocalDate) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) RexBuilder(org.apache.calcite.rex.RexBuilder) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) Calendar(java.util.Calendar) Decimal128(org.apache.hadoop.hive.common.type.Decimal128) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) BigDecimal(java.math.BigDecimal) DateString(org.apache.calcite.util.DateString) BigInteger(java.math.BigInteger) NlsString(org.apache.calcite.util.NlsString) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) TimestampString(org.apache.calcite.util.TimestampString) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) HiveParserIntervalDayTime(org.apache.flink.table.planner.delegation.hive.copy.HiveParserIntervalDayTime) RexNode(org.apache.calcite.rex.RexNode)

Example 7 with SqlIntervalQualifier

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIntervalQualifier in project flink by apache.

the class HiveParserTypeConverter method convert.

public static RelDataType convert(PrimitiveTypeInfo type, RelDataTypeFactory dtFactory) {
    RelDataType convertedType = null;
    HiveShim hiveShim = HiveParserUtils.getSessionHiveShim();
    switch(type.getPrimitiveCategory()) {
        case VOID:
            convertedType = dtFactory.createSqlType(SqlTypeName.NULL);
            break;
        case BOOLEAN:
            convertedType = dtFactory.createSqlType(SqlTypeName.BOOLEAN);
            break;
        case BYTE:
            convertedType = dtFactory.createSqlType(SqlTypeName.TINYINT);
            break;
        case SHORT:
            convertedType = dtFactory.createSqlType(SqlTypeName.SMALLINT);
            break;
        case INT:
            convertedType = dtFactory.createSqlType(SqlTypeName.INTEGER);
            break;
        case LONG:
            convertedType = dtFactory.createSqlType(SqlTypeName.BIGINT);
            break;
        case FLOAT:
            convertedType = dtFactory.createSqlType(SqlTypeName.FLOAT);
            break;
        case DOUBLE:
            convertedType = dtFactory.createSqlType(SqlTypeName.DOUBLE);
            break;
        case STRING:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case DATE:
            convertedType = dtFactory.createSqlType(SqlTypeName.DATE);
            break;
        case TIMESTAMP:
            convertedType = dtFactory.createSqlType(SqlTypeName.TIMESTAMP, 9);
            break;
        case BINARY:
            convertedType = dtFactory.createSqlType(SqlTypeName.BINARY);
            break;
        case DECIMAL:
            DecimalTypeInfo dtInf = (DecimalTypeInfo) type;
            convertedType = dtFactory.createSqlType(SqlTypeName.DECIMAL, dtInf.precision(), dtInf.scale());
            break;
        case VARCHAR:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case CHAR:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.CHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case UNKNOWN:
            convertedType = dtFactory.createSqlType(SqlTypeName.OTHER);
            break;
        default:
            if (hiveShim.isIntervalYearMonthType(type.getPrimitiveCategory())) {
                convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
            } else if (hiveShim.isIntervalDayTimeType(type.getPrimitiveCategory())) {
                convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, new SqlParserPos(1, 1)));
            }
    }
    if (null == convertedType) {
        throw new RuntimeException("Unsupported Type : " + type.getTypeName());
    }
    return dtFactory.createTypeWithNullability(convertedType, true);
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) RelDataType(org.apache.calcite.rel.type.RelDataType) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim)

Example 8 with SqlIntervalQualifier

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIntervalQualifier in project flink by apache.

the class RelDataTypeJsonSerdeTest method testMissingPrecisionAndScale.

@Test
public void testMissingPrecisionAndScale() throws IOException {
    final SerdeContext serdeContext = configuredSerdeContext();
    final String json = toJson(serdeContext, FACTORY.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, SqlParserPos.ZERO)));
    final RelDataType actual = toObject(serdeContext, json, RelDataType.class);
    assertThat(actual).isSameAs(FACTORY.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.DAY, DayTimeIntervalType.DEFAULT_DAY_PRECISION, TimeUnit.SECOND, DayTimeIntervalType.DEFAULT_FRACTIONAL_PRECISION, SqlParserPos.ZERO)));
}
Also used : SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) JsonSerdeTestUtil.configuredSerdeContext(org.apache.flink.table.planner.plan.nodes.exec.serde.JsonSerdeTestUtil.configuredSerdeContext) RelDataType(org.apache.calcite.rel.type.RelDataType) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 9 with SqlIntervalQualifier

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIntervalQualifier in project flink by apache.

the class RexNodeJsonSerdeTest method testRexNodeSerde.

// --------------------------------------------------------------------------------------------
// Test data
// --------------------------------------------------------------------------------------------
@SuppressWarnings("UnstableApiUsage")
private static Stream<RexNode> testRexNodeSerde() {
    final RexBuilder rexBuilder = new RexBuilder(FACTORY);
    final RelDataType inputType = FACTORY.createStructType(StructKind.PEEK_FIELDS_NO_EXPAND, Arrays.asList(FACTORY.createSqlType(SqlTypeName.INTEGER), FACTORY.createSqlType(SqlTypeName.BIGINT), FACTORY.createStructType(StructKind.PEEK_FIELDS_NO_EXPAND, Arrays.asList(FACTORY.createSqlType(SqlTypeName.VARCHAR), FACTORY.createSqlType(SqlTypeName.VARCHAR)), Arrays.asList("n1", "n2"))), Arrays.asList("f1", "f2", "f3"));
    return Stream.of(rexBuilder.makeNullLiteral(FACTORY.createSqlType(SqlTypeName.VARCHAR)), rexBuilder.makeLiteral(true), rexBuilder.makeExactLiteral(new BigDecimal(Byte.MAX_VALUE), FACTORY.createSqlType(SqlTypeName.TINYINT)), rexBuilder.makeExactLiteral(new BigDecimal(Short.MAX_VALUE), FACTORY.createSqlType(SqlTypeName.SMALLINT)), rexBuilder.makeExactLiteral(new BigDecimal(Integer.MAX_VALUE), FACTORY.createSqlType(SqlTypeName.INTEGER)), rexBuilder.makeExactLiteral(new BigDecimal(Long.MAX_VALUE), FACTORY.createSqlType(SqlTypeName.BIGINT)), rexBuilder.makeExactLiteral(BigDecimal.valueOf(Double.MAX_VALUE), FACTORY.createSqlType(SqlTypeName.DOUBLE)), rexBuilder.makeApproxLiteral(BigDecimal.valueOf(Float.MAX_VALUE), FACTORY.createSqlType(SqlTypeName.FLOAT)), rexBuilder.makeExactLiteral(new BigDecimal("23.1234567890123456789012345678")), rexBuilder.makeIntervalLiteral(BigDecimal.valueOf(100), new SqlIntervalQualifier(TimeUnit.YEAR, 4, TimeUnit.YEAR, RelDataType.PRECISION_NOT_SPECIFIED, SqlParserPos.ZERO)), rexBuilder.makeIntervalLiteral(BigDecimal.valueOf(3), new SqlIntervalQualifier(TimeUnit.YEAR, 2, TimeUnit.MONTH, RelDataType.PRECISION_NOT_SPECIFIED, SqlParserPos.ZERO)), rexBuilder.makeIntervalLiteral(BigDecimal.valueOf(3), new SqlIntervalQualifier(TimeUnit.DAY, 2, TimeUnit.SECOND, 6, SqlParserPos.ZERO)), rexBuilder.makeIntervalLiteral(BigDecimal.valueOf(3), new SqlIntervalQualifier(TimeUnit.SECOND, 2, TimeUnit.SECOND, 6, SqlParserPos.ZERO)), rexBuilder.makeDateLiteral(DateString.fromDaysSinceEpoch(10)), rexBuilder.makeDateLiteral(new DateString("2000-12-12")), rexBuilder.makeTimeLiteral(TimeString.fromMillisOfDay(1234), 3), rexBuilder.makeTimeLiteral(TimeString.fromMillisOfDay(123456), 6), rexBuilder.makeTimeLiteral(new TimeString("01:01:01.000000001"), 9), rexBuilder.makeTimestampLiteral(TimestampString.fromMillisSinceEpoch(1234), 3), rexBuilder.makeTimestampLiteral(TimestampString.fromMillisSinceEpoch(123456789), 9), rexBuilder.makeTimestampLiteral(new TimestampString("0001-01-01 01:01:01.000000001"), 9), rexBuilder.makeTimestampLiteral(new TimestampString("2000-12-12 12:30:57.1234"), 4), rexBuilder.makeBinaryLiteral(ByteString.EMPTY), rexBuilder.makeBinaryLiteral(ByteString.ofBase64("SGVsbG8gV29ybGQh")), rexBuilder.makeLiteral(""), rexBuilder.makeLiteral("abc"), rexBuilder.makeFlag(SqlTrimFunction.Flag.BOTH), rexBuilder.makeFlag(TimeUnitRange.DAY), rexBuilder.makeSearchArgumentLiteral(Sarg.of(false, ImmutableRangeSet.of(Range.closed(BigDecimal.valueOf(1), BigDecimal.valueOf(10)))), FACTORY.createSqlType(SqlTypeName.INTEGER)), rexBuilder.makeSearchArgumentLiteral(Sarg.of(false, ImmutableRangeSet.of(Range.range(BigDecimal.valueOf(1), BoundType.OPEN, BigDecimal.valueOf(10), BoundType.CLOSED))), FACTORY.createSqlType(SqlTypeName.INTEGER)), rexBuilder.makeSearchArgumentLiteral(Sarg.of(false, TreeRangeSet.create(Arrays.asList(Range.closed(BigDecimal.valueOf(1), BigDecimal.valueOf(1)), Range.closed(BigDecimal.valueOf(3), BigDecimal.valueOf(3)), Range.closed(BigDecimal.valueOf(6), BigDecimal.valueOf(6))))), FACTORY.createSqlType(SqlTypeName.INTEGER)), rexBuilder.makeInputRef(FACTORY.createSqlType(SqlTypeName.BIGINT), 0), rexBuilder.makeCorrel(inputType, new CorrelationId("$cor1")), rexBuilder.makeFieldAccess(rexBuilder.makeCorrel(inputType, new CorrelationId("$cor2")), "f2", true), // cast($1 as smallint)
    rexBuilder.makeCast(FACTORY.createSqlType(SqlTypeName.SMALLINT), rexBuilder.makeInputRef(FACTORY.createSqlType(SqlTypeName.INTEGER), 1)), // $1 in (1, 3, 5)
    rexBuilder.makeIn(rexBuilder.makeInputRef(FACTORY.createSqlType(SqlTypeName.INTEGER), 1), Arrays.asList(rexBuilder.makeExactLiteral(new BigDecimal(1)), rexBuilder.makeExactLiteral(new BigDecimal(3)), rexBuilder.makeExactLiteral(new BigDecimal(5)))), // null or $1 is null
    rexBuilder.makeCall(SqlStdOperatorTable.OR, rexBuilder.makeNullLiteral(FACTORY.createSqlType(SqlTypeName.INTEGER)), rexBuilder.makeCall(SqlStdOperatorTable.IS_NOT_NULL, rexBuilder.makeInputRef(FACTORY.createSqlType(SqlTypeName.INTEGER), 1))), // $1 >= 10
    rexBuilder.makeCall(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, rexBuilder.makeInputRef(FACTORY.createSqlType(SqlTypeName.INTEGER), 1), rexBuilder.makeExactLiteral(new BigDecimal(10))), // hash_code($1)
    rexBuilder.makeCall(FlinkSqlOperatorTable.HASH_CODE, rexBuilder.makeInputRef(FACTORY.createSqlType(SqlTypeName.INTEGER), 1)), rexBuilder.makePatternFieldRef("test", FACTORY.createSqlType(SqlTypeName.INTEGER), 0));
}
Also used : TimeString(org.apache.calcite.util.TimeString) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) DateString(org.apache.calcite.util.DateString) RexBuilder(org.apache.calcite.rex.RexBuilder) RelDataType(org.apache.calcite.rel.type.RelDataType) TimestampString(org.apache.calcite.util.TimestampString) CorrelationId(org.apache.calcite.rel.core.CorrelationId) BigDecimal(java.math.BigDecimal)

Example 10 with SqlIntervalQualifier

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIntervalQualifier in project flink by apache.

the class SqlValidatorImpl method validateLiteral.

public void validateLiteral(SqlLiteral literal) {
    switch(literal.getTypeName()) {
        case DECIMAL:
            // Decimal and long have the same precision (as 64-bit integers), so
            // the unscaled value of a decimal must fit into a long.
            // REVIEW jvs 4-Aug-2004:  This should probably be calling over to
            // the available calculator implementations to see what they
            // support.  For now use ESP instead.
            // 
            // jhyde 2006/12/21: I think the limits should be baked into the
            // type system, not dependent on the calculator implementation.
            BigDecimal bd = (BigDecimal) literal.getValue();
            BigInteger unscaled = bd.unscaledValue();
            long longValue = unscaled.longValue();
            if (!BigInteger.valueOf(longValue).equals(unscaled)) {
                // overflow
                throw newValidationError(literal, RESOURCE.numberLiteralOutOfRange(bd.toString()));
            }
            break;
        case DOUBLE:
            validateLiteralAsDouble(literal);
            break;
        case BINARY:
            final BitString bitString = (BitString) literal.getValue();
            if ((bitString.getBitCount() % 8) != 0) {
                throw newValidationError(literal, RESOURCE.binaryLiteralOdd());
            }
            break;
        case DATE:
        case TIME:
        case TIMESTAMP:
            Calendar calendar = literal.getValueAs(Calendar.class);
            final int year = calendar.get(Calendar.YEAR);
            final int era = calendar.get(Calendar.ERA);
            if (year < 1 || era == GregorianCalendar.BC || year > 9999) {
                throw newValidationError(literal, RESOURCE.dateLiteralOutOfRange(literal.toString()));
            }
            break;
        case INTERVAL_YEAR:
        case INTERVAL_YEAR_MONTH:
        case INTERVAL_MONTH:
        case INTERVAL_DAY:
        case INTERVAL_DAY_HOUR:
        case INTERVAL_DAY_MINUTE:
        case INTERVAL_DAY_SECOND:
        case INTERVAL_HOUR:
        case INTERVAL_HOUR_MINUTE:
        case INTERVAL_HOUR_SECOND:
        case INTERVAL_MINUTE:
        case INTERVAL_MINUTE_SECOND:
        case INTERVAL_SECOND:
            if (literal instanceof SqlIntervalLiteral) {
                SqlIntervalLiteral.IntervalValue interval = literal.getValueAs(SqlIntervalLiteral.IntervalValue.class);
                SqlIntervalQualifier intervalQualifier = interval.getIntervalQualifier();
                // ensure qualifier is good before attempting to validate literal
                validateIntervalQualifier(intervalQualifier);
                String intervalStr = interval.getIntervalLiteral();
                // throws CalciteContextException if string is invalid
                int[] values = intervalQualifier.evaluateIntervalLiteral(intervalStr, literal.getParserPosition(), typeFactory.getTypeSystem());
                Util.discard(values);
            }
            break;
        default:
    }
}
Also used : BitString(org.apache.calcite.util.BitString) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) GregorianCalendar(java.util.GregorianCalendar) Calendar(java.util.Calendar) SqlIntervalLiteral(org.apache.calcite.sql.SqlIntervalLiteral) BigInteger(java.math.BigInteger) BitString(org.apache.calcite.util.BitString) BigDecimal(java.math.BigDecimal)

Aggregations

SqlIntervalQualifier (org.apache.calcite.sql.SqlIntervalQualifier)31 BigDecimal (java.math.BigDecimal)24 RelDataType (org.apache.calcite.rel.type.RelDataType)13 SqlParserPos (org.apache.calcite.sql.parser.SqlParserPos)12 Test (org.junit.Test)8 HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)6 RelDataTypeFactory (org.apache.calcite.rel.type.RelDataTypeFactory)5 Period (org.joda.time.Period)5 Calendar (java.util.Calendar)4 RexBuilder (org.apache.calcite.rex.RexBuilder)4 RexNode (org.apache.calcite.rex.RexNode)4 SqlIntervalLiteral (org.apache.calcite.sql.SqlIntervalLiteral)4 SqlNode (org.apache.calcite.sql.SqlNode)4 TimestampString (org.apache.calcite.util.TimestampString)4 BigInteger (java.math.BigInteger)3 BitString (org.apache.calcite.util.BitString)3 DateString (org.apache.calcite.util.DateString)3 GregorianCalendar (java.util.GregorianCalendar)2 TimeUnit (org.apache.calcite.avatica.util.TimeUnit)2 RexLiteral (org.apache.calcite.rex.RexLiteral)2