Search in sources :

Example 16 with SqlParserPos

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.parser.SqlParserPos in project flink by apache.

the class HiveParserRexNodeConverter method convertConstant.

public static RexNode convertConstant(ExprNodeConstantDesc literal, RelOptCluster cluster) throws SemanticException {
    RexBuilder rexBuilder = cluster.getRexBuilder();
    RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
    PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo();
    RelDataType calciteDataType = HiveParserTypeConverter.convert(hiveType, dtFactory);
    PrimitiveObjectInspector.PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory();
    ConstantObjectInspector coi = literal.getWritableObjectInspector();
    Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi);
    RexNode calciteLiteral;
    HiveShim hiveShim = HiveParserUtils.getSessionHiveShim();
    // If value is null, the type should also be VOID.
    if (value == null) {
        hiveTypeCategory = PrimitiveObjectInspector.PrimitiveCategory.VOID;
    }
    // TODO: Verify if we need to use ConstantObjectInspector to unwrap data
    switch(hiveTypeCategory) {
        case BOOLEAN:
            calciteLiteral = rexBuilder.makeLiteral((Boolean) value);
            break;
        case BYTE:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType);
            break;
        case SHORT:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType);
            break;
        case INT:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value));
            break;
        case LONG:
            calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value));
            break;
        // TODO: is Decimal an exact numeric or approximate numeric?
        case DECIMAL:
            if (value instanceof HiveDecimal) {
                value = ((HiveDecimal) value).bigDecimalValue();
            } else if (value instanceof Decimal128) {
                value = ((Decimal128) value).toBigDecimal();
            }
            if (value == null) {
                // For now, we will not run CBO in the presence of invalid decimal literals.
                throw new SemanticException("Expression " + literal.getExprString() + " is not a valid decimal");
            // TODO: return createNullLiteral(literal);
            }
            BigDecimal bd = (BigDecimal) value;
            BigInteger unscaled = bd.unscaledValue();
            if (unscaled.compareTo(MIN_LONG_BI) >= 0 && unscaled.compareTo(MAX_LONG_BI) <= 0) {
                calciteLiteral = rexBuilder.makeExactLiteral(bd);
            } else {
                // CBO doesn't support unlimited precision decimals. In practice, this
                // will work...
                // An alternative would be to throw CboSemanticException and fall back
                // to no CBO.
                RelDataType relType = cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, unscaled.toString().length(), bd.scale());
                calciteLiteral = rexBuilder.makeExactLiteral(bd, relType);
            }
            break;
        case FLOAT:
            calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Float.toString((Float) value)), calciteDataType);
            break;
        case DOUBLE:
            // TODO: The best solution is to support NaN in expression reduction.
            if (Double.isNaN((Double) value)) {
                throw new SemanticException("NaN");
            }
            calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Double.toString((Double) value)), calciteDataType);
            break;
        case CHAR:
            if (value instanceof HiveChar) {
                value = ((HiveChar) value).getValue();
            }
            calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
            break;
        case VARCHAR:
            if (value instanceof HiveVarchar) {
                value = ((HiveVarchar) value).getValue();
            }
            calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
            break;
        case STRING:
            Object constantDescVal = literal.getValue();
            constantDescVal = constantDescVal instanceof NlsString ? constantDescVal : asUnicodeString((String) value);
            // calcite treat string literal as char type, we should treat it as string just like
            // hive
            RelDataType type = HiveParserTypeConverter.convert(hiveType, dtFactory);
            // if we get here, the value is not null
            type = dtFactory.createTypeWithNullability(type, false);
            calciteLiteral = rexBuilder.makeLiteral(constantDescVal, type, true);
            break;
        case DATE:
            LocalDate localDate = HiveParserUtils.getSessionHiveShim().toFlinkDate(value);
            DateString dateString = new DateString(localDate.getYear(), localDate.getMonthValue(), localDate.getDayOfMonth());
            calciteLiteral = rexBuilder.makeDateLiteral(dateString);
            break;
        case TIMESTAMP:
            TimestampString timestampString;
            if (value instanceof Calendar) {
                timestampString = TimestampString.fromCalendarFields((Calendar) value);
            } else {
                LocalDateTime localDateTime = HiveParserUtils.getSessionHiveShim().toFlinkTimestamp(value);
                timestampString = new TimestampString(localDateTime.getYear(), localDateTime.getMonthValue(), localDateTime.getDayOfMonth(), localDateTime.getHour(), localDateTime.getMinute(), localDateTime.getSecond());
                timestampString = timestampString.withNanos(localDateTime.getNano());
            }
            // hive always treats timestamp with precision 9
            calciteLiteral = rexBuilder.makeTimestampLiteral(timestampString, 9);
            break;
        case VOID:
            calciteLiteral = cluster.getRexBuilder().makeLiteral(null, dtFactory.createSqlType(SqlTypeName.NULL), true);
            break;
        case BINARY:
        case UNKNOWN:
        default:
            if (hiveShim.isIntervalYearMonthType(hiveTypeCategory)) {
                // Calcite year-month literal value is months as BigDecimal
                BigDecimal totalMonths = BigDecimal.valueOf(((HiveParserIntervalYearMonth) value).getTotalMonths());
                calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths, new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
            } else if (hiveShim.isIntervalDayTimeType(hiveTypeCategory)) {
                // Calcite day-time interval is millis value as BigDecimal
                // Seconds converted to millis
                BigDecimal secsValueBd = BigDecimal.valueOf(((HiveParserIntervalDayTime) value).getTotalSeconds() * 1000);
                // Nanos converted to millis
                BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveParserIntervalDayTime) value).getNanos(), 6);
                calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd), new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, new SqlParserPos(1, 1)));
            } else {
                throw new RuntimeException("UnSupported Literal type " + hiveTypeCategory);
            }
    }
    return calciteLiteral;
}
Also used : LocalDateTime(java.time.LocalDateTime) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) RelDataType(org.apache.calcite.rel.type.RelDataType) LocalDate(java.time.LocalDate) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) RexBuilder(org.apache.calcite.rex.RexBuilder) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) Calendar(java.util.Calendar) Decimal128(org.apache.hadoop.hive.common.type.Decimal128) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) BigDecimal(java.math.BigDecimal) DateString(org.apache.calcite.util.DateString) BigInteger(java.math.BigInteger) NlsString(org.apache.calcite.util.NlsString) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) TimestampString(org.apache.calcite.util.TimestampString) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) HiveParserIntervalDayTime(org.apache.flink.table.planner.delegation.hive.copy.HiveParserIntervalDayTime) RexNode(org.apache.calcite.rex.RexNode)

Example 17 with SqlParserPos

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.parser.SqlParserPos in project flink by apache.

the class HiveParserBaseSemanticAnalyzer method getBound.

public static RexWindowBound getBound(HiveParserWindowingSpec.BoundarySpec spec, RelOptCluster cluster) {
    RexWindowBound res = null;
    if (spec != null) {
        SqlParserPos dummyPos = new SqlParserPos(1, 1);
        SqlNode amt = spec.getAmt() == 0 || spec.getAmt() == HiveParserWindowingSpec.BoundarySpec.UNBOUNDED_AMOUNT ? null : SqlLiteral.createExactNumeric(String.valueOf(spec.getAmt()), new SqlParserPos(2, 2));
        RexNode amtLiteral = amt == null ? null : cluster.getRexBuilder().makeLiteral(spec.getAmt(), cluster.getTypeFactory().createSqlType(SqlTypeName.INTEGER), true);
        switch(spec.getDirection()) {
            case PRECEDING:
                if (amt == null) {
                    res = RexWindowBound.create(SqlWindow.createUnboundedPreceding(dummyPos), null);
                } else {
                    SqlCall call = (SqlCall) SqlWindow.createPreceding(amt, dummyPos);
                    res = RexWindowBound.create(call, cluster.getRexBuilder().makeCall(call.getOperator(), amtLiteral));
                }
                break;
            case CURRENT:
                res = RexWindowBound.create(SqlWindow.createCurrentRow(dummyPos), null);
                break;
            case FOLLOWING:
                if (amt == null) {
                    res = RexWindowBound.create(SqlWindow.createUnboundedFollowing(dummyPos), null);
                } else {
                    SqlCall call = (SqlCall) SqlWindow.createFollowing(amt, dummyPos);
                    res = RexWindowBound.create(call, cluster.getRexBuilder().makeCall(call.getOperator(), amtLiteral));
                }
                break;
        }
    }
    return res;
}
Also used : SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlCall(org.apache.calcite.sql.SqlCall) RexWindowBound(org.apache.calcite.rex.RexWindowBound) SqlNode(org.apache.calcite.sql.SqlNode) RexNode(org.apache.calcite.rex.RexNode)

Example 18 with SqlParserPos

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.parser.SqlParserPos in project flink by apache.

the class HiveParserTypeConverter method convert.

public static RelDataType convert(PrimitiveTypeInfo type, RelDataTypeFactory dtFactory) {
    RelDataType convertedType = null;
    HiveShim hiveShim = HiveParserUtils.getSessionHiveShim();
    switch(type.getPrimitiveCategory()) {
        case VOID:
            convertedType = dtFactory.createSqlType(SqlTypeName.NULL);
            break;
        case BOOLEAN:
            convertedType = dtFactory.createSqlType(SqlTypeName.BOOLEAN);
            break;
        case BYTE:
            convertedType = dtFactory.createSqlType(SqlTypeName.TINYINT);
            break;
        case SHORT:
            convertedType = dtFactory.createSqlType(SqlTypeName.SMALLINT);
            break;
        case INT:
            convertedType = dtFactory.createSqlType(SqlTypeName.INTEGER);
            break;
        case LONG:
            convertedType = dtFactory.createSqlType(SqlTypeName.BIGINT);
            break;
        case FLOAT:
            convertedType = dtFactory.createSqlType(SqlTypeName.FLOAT);
            break;
        case DOUBLE:
            convertedType = dtFactory.createSqlType(SqlTypeName.DOUBLE);
            break;
        case STRING:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case DATE:
            convertedType = dtFactory.createSqlType(SqlTypeName.DATE);
            break;
        case TIMESTAMP:
            convertedType = dtFactory.createSqlType(SqlTypeName.TIMESTAMP, 9);
            break;
        case BINARY:
            convertedType = dtFactory.createSqlType(SqlTypeName.BINARY);
            break;
        case DECIMAL:
            DecimalTypeInfo dtInf = (DecimalTypeInfo) type;
            convertedType = dtFactory.createSqlType(SqlTypeName.DECIMAL, dtInf.precision(), dtInf.scale());
            break;
        case VARCHAR:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case CHAR:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.CHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case UNKNOWN:
            convertedType = dtFactory.createSqlType(SqlTypeName.OTHER);
            break;
        default:
            if (hiveShim.isIntervalYearMonthType(type.getPrimitiveCategory())) {
                convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
            } else if (hiveShim.isIntervalDayTimeType(type.getPrimitiveCategory())) {
                convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, new SqlParserPos(1, 1)));
            }
    }
    if (null == convertedType) {
        throw new RuntimeException("Unsupported Type : " + type.getTypeName());
    }
    return dtFactory.createTypeWithNullability(convertedType, true);
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) RelDataType(org.apache.calcite.rel.type.RelDataType) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim)

Example 19 with SqlParserPos

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.parser.SqlParserPos in project hive by apache.

the class TypeConverter method convert.

public static RelDataType convert(PrimitiveTypeInfo type, RelDataTypeFactory dtFactory) {
    RelDataType convertedType = null;
    switch(type.getPrimitiveCategory()) {
        case VOID:
            convertedType = dtFactory.createSqlType(SqlTypeName.NULL);
            break;
        case BOOLEAN:
            convertedType = dtFactory.createSqlType(SqlTypeName.BOOLEAN);
            break;
        case BYTE:
            convertedType = dtFactory.createSqlType(SqlTypeName.TINYINT);
            break;
        case SHORT:
            convertedType = dtFactory.createSqlType(SqlTypeName.SMALLINT);
            break;
        case INT:
            convertedType = dtFactory.createSqlType(SqlTypeName.INTEGER);
            break;
        case LONG:
            convertedType = dtFactory.createSqlType(SqlTypeName.BIGINT);
            break;
        case FLOAT:
            convertedType = dtFactory.createSqlType(SqlTypeName.FLOAT);
            break;
        case DOUBLE:
            convertedType = dtFactory.createSqlType(SqlTypeName.DOUBLE);
            break;
        case STRING:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case DATE:
            convertedType = dtFactory.createSqlType(SqlTypeName.DATE);
            break;
        case TIMESTAMP:
            convertedType = dtFactory.createSqlType(SqlTypeName.TIMESTAMP);
            break;
        case TIMESTAMPLOCALTZ:
            convertedType = dtFactory.createSqlType(SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE);
            break;
        case INTERVAL_YEAR_MONTH:
            convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
            break;
        case INTERVAL_DAY_TIME:
            convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, new SqlParserPos(1, 1)));
            break;
        case BINARY:
            convertedType = dtFactory.createSqlType(SqlTypeName.BINARY);
            break;
        case DECIMAL:
            DecimalTypeInfo dtInf = (DecimalTypeInfo) type;
            convertedType = dtFactory.createSqlType(SqlTypeName.DECIMAL, dtInf.precision(), dtInf.scale());
            break;
        case VARCHAR:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case CHAR:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.CHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case UNKNOWN:
            convertedType = dtFactory.createSqlType(SqlTypeName.OTHER);
            break;
    }
    if (null == convertedType) {
        throw new RuntimeException("Unsupported Type : " + type.getTypeName());
    }
    return dtFactory.createTypeWithNullability(convertedType, true);
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) RelDataType(org.apache.calcite.rel.type.RelDataType)

Example 20 with SqlParserPos

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.parser.SqlParserPos in project hive by apache.

the class RexNodeConverter method convert.

protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticException {
    final RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
    final PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo();
    final RelDataType calciteDataType = TypeConverter.convert(hiveType, dtFactory);
    PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory();
    ConstantObjectInspector coi = literal.getWritableObjectInspector();
    Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi);
    RexNode calciteLiteral = null;
    // If value is null, the type should also be VOID.
    if (value == null) {
        hiveTypeCategory = PrimitiveCategory.VOID;
    }
    // TODO: Verify if we need to use ConstantObjectInspector to unwrap data
    switch(hiveTypeCategory) {
        case BOOLEAN:
            calciteLiteral = rexBuilder.makeLiteral(((Boolean) value).booleanValue());
            break;
        case BYTE:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType);
            break;
        case SHORT:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType);
            break;
        case INT:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value));
            break;
        case LONG:
            calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value));
            break;
        case DECIMAL:
            if (value instanceof HiveDecimal) {
                value = ((HiveDecimal) value).bigDecimalValue();
            } else if (value instanceof Decimal128) {
                value = ((Decimal128) value).toBigDecimal();
            }
            if (value == null) {
                // literals.
                throw new CalciteSemanticException("Expression " + literal.getExprString() + " is not a valid decimal", UnsupportedFeature.Invalid_decimal);
            // TODO: return createNullLiteral(literal);
            }
            calciteLiteral = rexBuilder.makeExactLiteral((BigDecimal) value, calciteDataType);
            break;
        case FLOAT:
            calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Float.toString((Float) value)), calciteDataType);
            break;
        case DOUBLE:
            // TODO: The best solution is to support NaN in expression reduction.
            if (Double.isNaN((Double) value)) {
                throw new CalciteSemanticException("NaN", UnsupportedFeature.Invalid_decimal);
            }
            calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Double.toString((Double) value)), calciteDataType);
            break;
        case CHAR:
            if (value instanceof HiveChar) {
                value = ((HiveChar) value).getValue();
            }
            final int lengthChar = TypeInfoUtils.getCharacterLengthForType(hiveType);
            RelDataType charType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation(rexBuilder.getTypeFactory().createSqlType(SqlTypeName.CHAR, lengthChar), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            calciteLiteral = rexBuilder.makeLiteral(RexNodeExprFactory.makeHiveUnicodeString((String) value), charType, false);
            break;
        case VARCHAR:
            if (value instanceof HiveVarchar) {
                value = ((HiveVarchar) value).getValue();
            }
            final int lengthVarchar = TypeInfoUtils.getCharacterLengthForType(hiveType);
            RelDataType varcharType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation(rexBuilder.getTypeFactory().createSqlType(SqlTypeName.VARCHAR, lengthVarchar), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            calciteLiteral = rexBuilder.makeLiteral(RexNodeExprFactory.makeHiveUnicodeString((String) value), varcharType, true);
            break;
        case STRING:
            RelDataType stringType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation(rexBuilder.getTypeFactory().createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            calciteLiteral = rexBuilder.makeLiteral(RexNodeExprFactory.makeHiveUnicodeString((String) value), stringType, true);
            break;
        case DATE:
            final Date date = (Date) value;
            calciteLiteral = rexBuilder.makeDateLiteral(DateString.fromDaysSinceEpoch(date.toEpochDay()));
            break;
        case TIMESTAMP:
            final TimestampString tsString;
            if (value instanceof Calendar) {
                tsString = TimestampString.fromCalendarFields((Calendar) value);
            } else {
                final Timestamp ts = (Timestamp) value;
                tsString = TimestampString.fromMillisSinceEpoch(ts.toEpochMilli()).withNanos(ts.getNanos());
            }
            // Must call makeLiteral, not makeTimestampLiteral
            // to have the RexBuilder.roundTime logic kick in
            calciteLiteral = rexBuilder.makeLiteral(tsString, rexBuilder.getTypeFactory().createSqlType(SqlTypeName.TIMESTAMP, rexBuilder.getTypeFactory().getTypeSystem().getDefaultPrecision(SqlTypeName.TIMESTAMP)), false);
            break;
        case TIMESTAMPLOCALTZ:
            final TimestampString tsLocalTZString;
            Instant i = ((TimestampTZ) value).getZonedDateTime().toInstant();
            tsLocalTZString = TimestampString.fromMillisSinceEpoch(i.toEpochMilli()).withNanos(i.getNano());
            calciteLiteral = rexBuilder.makeTimestampWithLocalTimeZoneLiteral(tsLocalTZString, rexBuilder.getTypeFactory().getTypeSystem().getDefaultPrecision(SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE));
            break;
        case INTERVAL_YEAR_MONTH:
            // Calcite year-month literal value is months as BigDecimal
            BigDecimal totalMonths = BigDecimal.valueOf(((HiveIntervalYearMonth) value).getTotalMonths());
            calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths, new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
            break;
        case INTERVAL_DAY_TIME:
            // Calcite day-time interval is millis value as BigDecimal
            // Seconds converted to millis
            BigDecimal secsValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getTotalSeconds() * 1000);
            // Nanos converted to millis
            BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getNanos(), 6);
            calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd), new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, new SqlParserPos(1, 1)));
            break;
        case VOID:
            calciteLiteral = rexBuilder.makeLiteral(null, calciteDataType, true);
            break;
        case BINARY:
        case UNKNOWN:
        default:
            throw new RuntimeException("Unsupported Literal");
    }
    return calciteLiteral;
}
Also used : HiveChar(org.apache.hadoop.hive.common.type.HiveChar) RelDataType(org.apache.calcite.rel.type.RelDataType) GenericUDFTimestamp(org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) Calendar(java.util.Calendar) Instant(java.time.Instant) Decimal128(org.apache.hadoop.hive.common.type.Decimal128) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) BigDecimal(java.math.BigDecimal) Date(org.apache.hadoop.hive.common.type.Date) HiveFloorDate(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFloorDate) HiveExtractDate(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveExtractDate) GenericUDFToDate(org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDate) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) TimestampString(org.apache.calcite.util.TimestampString) RexNode(org.apache.calcite.rex.RexNode)

Aggregations

SqlParserPos (org.apache.calcite.sql.parser.SqlParserPos)42 SqlNode (org.apache.calcite.sql.SqlNode)17 SqlIntervalQualifier (org.apache.calcite.sql.SqlIntervalQualifier)11 RelDataType (org.apache.calcite.rel.type.RelDataType)9 SqlIdentifier (org.apache.calcite.sql.SqlIdentifier)9 BigDecimal (java.math.BigDecimal)8 HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)6 SqlCall (org.apache.calcite.sql.SqlCall)5 ArrayList (java.util.ArrayList)4 RelDataTypeField (org.apache.calcite.rel.type.RelDataTypeField)4 SqlNodeList (org.apache.calcite.sql.SqlNodeList)4 ImmutableList (com.google.common.collect.ImmutableList)3 HashMap (java.util.HashMap)3 Map (java.util.Map)3 RexNode (org.apache.calcite.rex.RexNode)3 SqlNumericLiteral (org.apache.calcite.sql.SqlNumericLiteral)3 SqlOperator (org.apache.calcite.sql.SqlOperator)3 Calendar (java.util.Calendar)2 IdentityHashMap (java.util.IdentityHashMap)2 RelDataTypeFactory (org.apache.calcite.rel.type.RelDataTypeFactory)2