Search in sources :

Example 1 with SqlIntervalQualifier

use of org.apache.calcite.sql.SqlIntervalQualifier in project hive by apache.

the class RexNodeConverter method convert.

protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticException {
    RexBuilder rexBuilder = cluster.getRexBuilder();
    RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
    PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo();
    RelDataType calciteDataType = TypeConverter.convert(hiveType, dtFactory);
    PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory();
    ConstantObjectInspector coi = literal.getWritableObjectInspector();
    Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi);
    RexNode calciteLiteral = null;
    // If value is null, the type should also be VOID.
    if (value == null) {
        hiveTypeCategory = PrimitiveCategory.VOID;
    }
    // TODO: Verify if we need to use ConstantObjectInspector to unwrap data
    switch(hiveTypeCategory) {
        case BOOLEAN:
            calciteLiteral = rexBuilder.makeLiteral(((Boolean) value).booleanValue());
            break;
        case BYTE:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType);
            break;
        case SHORT:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType);
            break;
        case INT:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value));
            break;
        case LONG:
            calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value));
            break;
        // TODO: is Decimal an exact numeric or approximate numeric?
        case DECIMAL:
            if (value instanceof HiveDecimal) {
                value = ((HiveDecimal) value).bigDecimalValue();
            } else if (value instanceof Decimal128) {
                value = ((Decimal128) value).toBigDecimal();
            }
            if (value == null) {
                // literals.
                throw new CalciteSemanticException("Expression " + literal.getExprString() + " is not a valid decimal", UnsupportedFeature.Invalid_decimal);
            // TODO: return createNullLiteral(literal);
            }
            BigDecimal bd = (BigDecimal) value;
            BigInteger unscaled = bd.unscaledValue();
            if (unscaled.compareTo(MIN_LONG_BI) >= 0 && unscaled.compareTo(MAX_LONG_BI) <= 0) {
                calciteLiteral = rexBuilder.makeExactLiteral(bd);
            } else {
                // CBO doesn't support unlimited precision decimals. In practice, this
                // will work...
                // An alternative would be to throw CboSemanticException and fall back
                // to no CBO.
                RelDataType relType = cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, unscaled.toString().length(), bd.scale());
                calciteLiteral = rexBuilder.makeExactLiteral(bd, relType);
            }
            break;
        case FLOAT:
            calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Float.toString((Float) value)), calciteDataType);
            break;
        case DOUBLE:
            // TODO: The best solution is to support NaN in expression reduction.
            if (Double.isNaN((Double) value)) {
                throw new CalciteSemanticException("NaN", UnsupportedFeature.Invalid_decimal);
            }
            calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Double.toString((Double) value)), calciteDataType);
            break;
        case CHAR:
            if (value instanceof HiveChar) {
                value = ((HiveChar) value).getValue();
            }
            calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
            break;
        case VARCHAR:
            if (value instanceof HiveVarchar) {
                value = ((HiveVarchar) value).getValue();
            }
            calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
            break;
        case STRING:
            calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
            break;
        case DATE:
            Calendar cal = new GregorianCalendar();
            cal.setTime((Date) value);
            calciteLiteral = rexBuilder.makeDateLiteral(cal);
            break;
        case TIMESTAMP:
            Calendar c = null;
            if (value instanceof Calendar) {
                c = (Calendar) value;
            } else {
                c = Calendar.getInstance();
                c.setTimeInMillis(((Timestamp) value).getTime());
            }
            calciteLiteral = rexBuilder.makeTimestampLiteral(c, RelDataType.PRECISION_NOT_SPECIFIED);
            break;
        case INTERVAL_YEAR_MONTH:
            // Calcite year-month literal value is months as BigDecimal
            BigDecimal totalMonths = BigDecimal.valueOf(((HiveIntervalYearMonth) value).getTotalMonths());
            calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths, new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
            break;
        case INTERVAL_DAY_TIME:
            // Calcite day-time interval is millis value as BigDecimal
            // Seconds converted to millis
            BigDecimal secsValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getTotalSeconds() * 1000);
            // Nanos converted to millis
            BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getNanos(), 6);
            calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd), new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, new SqlParserPos(1, 1)));
            break;
        case VOID:
            calciteLiteral = cluster.getRexBuilder().makeLiteral(null, cluster.getTypeFactory().createSqlType(SqlTypeName.NULL), true);
            break;
        case BINARY:
        case UNKNOWN:
        default:
            throw new RuntimeException("UnSupported Literal");
    }
    return calciteLiteral;
}
Also used : SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) GregorianCalendar(java.util.GregorianCalendar) Calendar(java.util.Calendar) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) GregorianCalendar(java.util.GregorianCalendar) RelDataType(org.apache.calcite.rel.type.RelDataType) Decimal128(org.apache.hadoop.hive.common.type.Decimal128) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) BigDecimal(java.math.BigDecimal) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) RexBuilder(org.apache.calcite.rex.RexBuilder) BigInteger(java.math.BigInteger) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) RexNode(org.apache.calcite.rex.RexNode)

Example 2 with SqlIntervalQualifier

use of org.apache.calcite.sql.SqlIntervalQualifier in project hive by apache.

the class TypeConverter method convert.

public static RelDataType convert(PrimitiveTypeInfo type, RelDataTypeFactory dtFactory) {
    RelDataType convertedType = null;
    switch(type.getPrimitiveCategory()) {
        case VOID:
            convertedType = dtFactory.createSqlType(SqlTypeName.NULL);
            break;
        case BOOLEAN:
            convertedType = dtFactory.createSqlType(SqlTypeName.BOOLEAN);
            break;
        case BYTE:
            convertedType = dtFactory.createSqlType(SqlTypeName.TINYINT);
            break;
        case SHORT:
            convertedType = dtFactory.createSqlType(SqlTypeName.SMALLINT);
            break;
        case INT:
            convertedType = dtFactory.createSqlType(SqlTypeName.INTEGER);
            break;
        case LONG:
            convertedType = dtFactory.createSqlType(SqlTypeName.BIGINT);
            break;
        case FLOAT:
            convertedType = dtFactory.createSqlType(SqlTypeName.FLOAT);
            break;
        case DOUBLE:
            convertedType = dtFactory.createSqlType(SqlTypeName.DOUBLE);
            break;
        case STRING:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case DATE:
            convertedType = dtFactory.createSqlType(SqlTypeName.DATE);
            break;
        case TIMESTAMP:
            convertedType = dtFactory.createSqlType(SqlTypeName.TIMESTAMP);
            break;
        case INTERVAL_YEAR_MONTH:
            convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
            break;
        case INTERVAL_DAY_TIME:
            convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, new SqlParserPos(1, 1)));
            break;
        case BINARY:
            convertedType = dtFactory.createSqlType(SqlTypeName.BINARY);
            break;
        case DECIMAL:
            DecimalTypeInfo dtInf = (DecimalTypeInfo) type;
            convertedType = dtFactory.createSqlType(SqlTypeName.DECIMAL, dtInf.precision(), dtInf.scale());
            break;
        case VARCHAR:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case CHAR:
            convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.CHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            break;
        case UNKNOWN:
            convertedType = dtFactory.createSqlType(SqlTypeName.OTHER);
            break;
    }
    if (null == convertedType) {
        throw new RuntimeException("Unsupported Type : " + type.getTypeName());
    }
    return dtFactory.createTypeWithNullability(convertedType, true);
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) RelDataType(org.apache.calcite.rel.type.RelDataType)

Aggregations

RelDataType (org.apache.calcite.rel.type.RelDataType)2 SqlIntervalQualifier (org.apache.calcite.sql.SqlIntervalQualifier)2 SqlParserPos (org.apache.calcite.sql.parser.SqlParserPos)2 BigDecimal (java.math.BigDecimal)1 BigInteger (java.math.BigInteger)1 Calendar (java.util.Calendar)1 GregorianCalendar (java.util.GregorianCalendar)1 RelDataTypeFactory (org.apache.calcite.rel.type.RelDataTypeFactory)1 RexBuilder (org.apache.calcite.rex.RexBuilder)1 RexNode (org.apache.calcite.rex.RexNode)1 Decimal128 (org.apache.hadoop.hive.common.type.Decimal128)1 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)1 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)1 HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)1 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)1 CalciteSemanticException (org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException)1 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)1 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)1 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)1 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)1