Search in sources :

Example 11 with HiveIntervalYearMonth

use of org.apache.hadoop.hive.common.type.HiveIntervalYearMonth in project hive by apache.

the class ASTBuilder method literal.

public static ASTNode literal(RexLiteral literal) {
    Object val = null;
    int type = 0;
    SqlTypeName sqlType = literal.getType().getSqlTypeName();
    switch(sqlType) {
        case BINARY:
        case DATE:
        case TIME:
        case TIMESTAMP:
        case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
        case INTERVAL_DAY:
        case INTERVAL_DAY_HOUR:
        case INTERVAL_DAY_MINUTE:
        case INTERVAL_DAY_SECOND:
        case INTERVAL_HOUR:
        case INTERVAL_HOUR_MINUTE:
        case INTERVAL_HOUR_SECOND:
        case INTERVAL_MINUTE:
        case INTERVAL_MINUTE_SECOND:
        case INTERVAL_MONTH:
        case INTERVAL_SECOND:
        case INTERVAL_YEAR:
        case INTERVAL_YEAR_MONTH:
        case MAP:
        case ARRAY:
        case ROW:
            if (literal.getValue() == null) {
                return ASTBuilder.construct(HiveParser.TOK_NULL, "TOK_NULL").node();
            }
            break;
        case TINYINT:
        case SMALLINT:
        case INTEGER:
        case BIGINT:
        case DOUBLE:
        case DECIMAL:
        case FLOAT:
        case REAL:
        case VARCHAR:
        case CHAR:
        case BOOLEAN:
            if (literal.getValue3() == null) {
                return ASTBuilder.construct(HiveParser.TOK_NULL, "TOK_NULL").node();
            }
    }
    switch(sqlType) {
        case TINYINT:
        case SMALLINT:
        case INTEGER:
        case BIGINT:
            val = literal.getValue3();
            // Hive makes a distinction between them most importantly IntegralLiteral
            if (val instanceof BigDecimal) {
                val = ((BigDecimal) val).longValue();
            }
            switch(sqlType) {
                case TINYINT:
                    val += "Y";
                    break;
                case SMALLINT:
                    val += "S";
                    break;
                case INTEGER:
                    val += "";
                    break;
                case BIGINT:
                    val += "L";
                    break;
            }
            type = HiveParser.IntegralLiteral;
            break;
        case DOUBLE:
            val = literal.getValue3() + "D";
            type = HiveParser.NumberLiteral;
            break;
        case DECIMAL:
            val = literal.getValue3() + "BD";
            type = HiveParser.NumberLiteral;
            break;
        case FLOAT:
        case REAL:
            val = literal.getValue3() + "F";
            type = HiveParser.Number;
            break;
        case VARCHAR:
        case CHAR:
            val = literal.getValue3();
            String escapedVal = BaseSemanticAnalyzer.escapeSQLString(String.valueOf(val));
            type = HiveParser.StringLiteral;
            val = "'" + escapedVal + "'";
            break;
        case BOOLEAN:
            val = literal.getValue3();
            type = ((Boolean) val).booleanValue() ? HiveParser.KW_TRUE : HiveParser.KW_FALSE;
            break;
        case DATE:
            val = "'" + literal.getValueAs(DateString.class).toString() + "'";
            type = HiveParser.TOK_DATELITERAL;
            break;
        case TIME:
            val = "'" + literal.getValueAs(TimeString.class).toString() + "'";
            type = HiveParser.TOK_TIMESTAMPLITERAL;
            break;
        case TIMESTAMP:
            val = "'" + literal.getValueAs(TimestampString.class).toString() + "'";
            type = HiveParser.TOK_TIMESTAMPLITERAL;
            break;
        case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
            // Calcite stores timestamp with local time-zone in UTC internally, thus
            // when we bring it back, we need to add the UTC suffix.
            val = "'" + literal.getValueAs(TimestampString.class).toString() + " UTC'";
            type = HiveParser.TOK_TIMESTAMPLOCALTZLITERAL;
            break;
        case INTERVAL_YEAR:
        case INTERVAL_MONTH:
        case INTERVAL_YEAR_MONTH:
            {
                type = HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL;
                BigDecimal monthsBd = (BigDecimal) literal.getValue();
                HiveIntervalYearMonth intervalYearMonth = new HiveIntervalYearMonth(monthsBd.intValue());
                val = "'" + intervalYearMonth.toString() + "'";
            }
            break;
        case INTERVAL_DAY:
        case INTERVAL_DAY_HOUR:
        case INTERVAL_DAY_MINUTE:
        case INTERVAL_DAY_SECOND:
        case INTERVAL_HOUR:
        case INTERVAL_HOUR_MINUTE:
        case INTERVAL_HOUR_SECOND:
        case INTERVAL_MINUTE:
        case INTERVAL_MINUTE_SECOND:
        case INTERVAL_SECOND:
            {
                type = HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL;
                BigDecimal millisBd = (BigDecimal) literal.getValue();
                // Calcite literal is in millis, convert to seconds
                BigDecimal secsBd = millisBd.divide(BigDecimal.valueOf(1000));
                HiveIntervalDayTime intervalDayTime = new HiveIntervalDayTime(secsBd);
                val = "'" + intervalDayTime.toString() + "'";
            }
            break;
        case NULL:
            type = HiveParser.TOK_NULL;
            break;
        // binary, ROW type should not be seen.
        case BINARY:
        case ROW:
        default:
            throw new RuntimeException("Unsupported Type: " + sqlType);
    }
    return (ASTNode) ParseDriver.adaptor.create(type, String.valueOf(val));
}
Also used : HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) TimeString(org.apache.calcite.util.TimeString) DateString(org.apache.calcite.util.DateString) TimestampString(org.apache.calcite.util.TimestampString) BigDecimal(java.math.BigDecimal) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Example 12 with HiveIntervalYearMonth

use of org.apache.hadoop.hive.common.type.HiveIntervalYearMonth in project hive by apache.

the class GenericUDF method getIntervalYearMonthValue.

protected HiveIntervalYearMonth getIntervalYearMonthValue(DeferredObject[] arguments, int i, PrimitiveCategory[] inputTypes, Converter[] converters) throws HiveException {
    Object obj;
    if ((obj = arguments[i].get()) == null) {
        return null;
    }
    HiveIntervalYearMonth intervalYearMonth;
    switch(inputTypes[i]) {
        case STRING:
        case VARCHAR:
        case CHAR:
            String intervalYearMonthStr = converters[i].convert(obj).toString();
            intervalYearMonth = HiveIntervalYearMonth.valueOf(intervalYearMonthStr);
            break;
        case INTERVAL_YEAR_MONTH:
            Object writableValue = converters[i].convert(obj);
            intervalYearMonth = ((HiveIntervalYearMonthWritable) writableValue).getHiveIntervalYearMonth();
            break;
        default:
            throw new UDFArgumentTypeException(0, getFuncName() + " only takes INTERVAL_YEAR_MONTH and STRING_GROUP types, got " + inputTypes[i]);
    }
    return intervalYearMonth;
}
Also used : HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)

Example 13 with HiveIntervalYearMonth

use of org.apache.hadoop.hive.common.type.HiveIntervalYearMonth in project hive by apache.

the class RexNodeExprFactory method createIntervalYearConstantExpr.

/**
 * {@inheritDoc}
 */
@Override
protected RexLiteral createIntervalYearConstantExpr(String value) {
    HiveIntervalYearMonth v = new HiveIntervalYearMonth(Integer.parseInt(value), 0);
    BigDecimal totalMonths = BigDecimal.valueOf(v.getTotalMonths());
    return rexBuilder.makeIntervalLiteral(totalMonths, new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
}
Also used : HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) BigDecimal(java.math.BigDecimal)

Example 14 with HiveIntervalYearMonth

use of org.apache.hadoop.hive.common.type.HiveIntervalYearMonth in project hive by apache.

the class DateTimeMath method add.

public HiveIntervalYearMonth add(HiveIntervalYearMonth left, HiveIntervalYearMonth right) {
    HiveIntervalYearMonth result = null;
    if (left == null || right == null) {
        return null;
    }
    result = new HiveIntervalYearMonth(left.getTotalMonths() + right.getTotalMonths());
    return result;
}
Also used : HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth)

Example 15 with HiveIntervalYearMonth

use of org.apache.hadoop.hive.common.type.HiveIntervalYearMonth in project hive by apache.

the class VerifyFastRow method serializeWrite.

public static void serializeWrite(SerializeWrite serializeWrite, TypeInfo typeInfo, Object object) throws IOException {
    if (object == null) {
        serializeWrite.writeNull();
        return;
    }
    switch(typeInfo.getCategory()) {
        case PRIMITIVE:
            {
                PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
                switch(primitiveTypeInfo.getPrimitiveCategory()) {
                    case BOOLEAN:
                        {
                            boolean value = ((BooleanWritable) object).get();
                            serializeWrite.writeBoolean(value);
                        }
                        break;
                    case BYTE:
                        {
                            byte value = ((ByteWritable) object).get();
                            serializeWrite.writeByte(value);
                        }
                        break;
                    case SHORT:
                        {
                            short value = ((ShortWritable) object).get();
                            serializeWrite.writeShort(value);
                        }
                        break;
                    case INT:
                        {
                            int value = ((IntWritable) object).get();
                            serializeWrite.writeInt(value);
                        }
                        break;
                    case LONG:
                        {
                            long value = ((LongWritable) object).get();
                            serializeWrite.writeLong(value);
                        }
                        break;
                    case FLOAT:
                        {
                            float value = ((FloatWritable) object).get();
                            serializeWrite.writeFloat(value);
                        }
                        break;
                    case DOUBLE:
                        {
                            double value = ((DoubleWritable) object).get();
                            serializeWrite.writeDouble(value);
                        }
                        break;
                    case STRING:
                        {
                            Text value = (Text) object;
                            byte[] stringBytes = value.getBytes();
                            int stringLength = stringBytes.length;
                            serializeWrite.writeString(stringBytes, 0, stringLength);
                        }
                        break;
                    case CHAR:
                        {
                            HiveChar value = ((HiveCharWritable) object).getHiveChar();
                            serializeWrite.writeHiveChar(value);
                        }
                        break;
                    case VARCHAR:
                        {
                            HiveVarchar value = ((HiveVarcharWritable) object).getHiveVarchar();
                            serializeWrite.writeHiveVarchar(value);
                        }
                        break;
                    case DECIMAL:
                        {
                            HiveDecimal value = ((HiveDecimalWritable) object).getHiveDecimal();
                            DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
                            serializeWrite.writeHiveDecimal(value, decTypeInfo.scale());
                        }
                        break;
                    case DATE:
                        {
                            Date value = ((DateWritableV2) object).get();
                            serializeWrite.writeDate(value);
                        }
                        break;
                    case TIMESTAMP:
                        {
                            Timestamp value = ((TimestampWritableV2) object).getTimestamp();
                            serializeWrite.writeTimestamp(value);
                        }
                        break;
                    case INTERVAL_YEAR_MONTH:
                        {
                            HiveIntervalYearMonth value = ((HiveIntervalYearMonthWritable) object).getHiveIntervalYearMonth();
                            serializeWrite.writeHiveIntervalYearMonth(value);
                        }
                        break;
                    case INTERVAL_DAY_TIME:
                        {
                            HiveIntervalDayTime value = ((HiveIntervalDayTimeWritable) object).getHiveIntervalDayTime();
                            serializeWrite.writeHiveIntervalDayTime(value);
                        }
                        break;
                    case BINARY:
                        {
                            BytesWritable byteWritable = (BytesWritable) object;
                            byte[] binaryBytes = byteWritable.getBytes();
                            int length = byteWritable.getLength();
                            serializeWrite.writeBinary(binaryBytes, 0, length);
                        }
                        break;
                    default:
                        throw new Error("Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory().name());
                }
            }
            break;
        case LIST:
            {
                ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
                TypeInfo elementTypeInfo = listTypeInfo.getListElementTypeInfo();
                ArrayList<Object> elements = (ArrayList<Object>) object;
                serializeWrite.beginList(elements);
                boolean isFirst = true;
                for (Object elementObject : elements) {
                    if (isFirst) {
                        isFirst = false;
                    } else {
                        serializeWrite.separateList();
                    }
                    if (elementObject == null) {
                        serializeWrite.writeNull();
                    } else {
                        serializeWrite(serializeWrite, elementTypeInfo, elementObject);
                    }
                }
                serializeWrite.finishList();
            }
            break;
        case MAP:
            {
                MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
                TypeInfo keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo();
                TypeInfo valueTypeInfo = mapTypeInfo.getMapValueTypeInfo();
                Map<Object, Object> hashMap = (Map<Object, Object>) object;
                serializeWrite.beginMap(hashMap);
                boolean isFirst = true;
                for (Map.Entry<Object, Object> entry : hashMap.entrySet()) {
                    if (isFirst) {
                        isFirst = false;
                    } else {
                        serializeWrite.separateKeyValuePair();
                    }
                    if (entry.getKey() == null) {
                        serializeWrite.writeNull();
                    } else {
                        serializeWrite(serializeWrite, keyTypeInfo, entry.getKey());
                    }
                    serializeWrite.separateKey();
                    if (entry.getValue() == null) {
                        serializeWrite.writeNull();
                    } else {
                        serializeWrite(serializeWrite, valueTypeInfo, entry.getValue());
                    }
                }
                serializeWrite.finishMap();
            }
            break;
        case STRUCT:
            {
                StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
                List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
                List<Object> fieldValues = (List<Object>) object;
                final int size = fieldValues.size();
                serializeWrite.beginStruct(fieldValues);
                boolean isFirst = true;
                for (int i = 0; i < size; i++) {
                    if (isFirst) {
                        isFirst = false;
                    } else {
                        serializeWrite.separateStruct();
                    }
                    serializeWrite(serializeWrite, fieldTypeInfos.get(i), fieldValues.get(i));
                }
                serializeWrite.finishStruct();
            }
            break;
        case UNION:
            {
                UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
                List<TypeInfo> fieldTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
                final int size = fieldTypeInfos.size();
                StandardUnionObjectInspector.StandardUnion standardUnion = (StandardUnionObjectInspector.StandardUnion) object;
                byte tag = standardUnion.getTag();
                serializeWrite.beginUnion(tag);
                serializeWrite(serializeWrite, fieldTypeInfos.get(tag), standardUnion.getObject());
                serializeWrite.finishUnion();
            }
            break;
        default:
            throw new Error("Unknown category " + typeInfo.getCategory().name());
    }
}
Also used : StandardUnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) ArrayList(java.util.ArrayList) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) ArrayList(java.util.ArrayList) List(java.util.List) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) Text(org.apache.hadoop.io.Text) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) Date(org.apache.hadoop.hive.common.type.Date) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Aggregations

HiveIntervalYearMonth (org.apache.hadoop.hive.common.type.HiveIntervalYearMonth)37 HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)19 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)16 BytesWritable (org.apache.hadoop.io.BytesWritable)14 Text (org.apache.hadoop.io.Text)14 Date (org.apache.hadoop.hive.common.type.Date)13 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)13 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)13 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)13 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)12 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)12 Timestamp (org.apache.hadoop.hive.common.type.Timestamp)11 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)11 FloatWritable (org.apache.hadoop.io.FloatWritable)11 IntWritable (org.apache.hadoop.io.IntWritable)11 LongWritable (org.apache.hadoop.io.LongWritable)11 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)10 BooleanWritable (org.apache.hadoop.io.BooleanWritable)10 DateWritableV2 (org.apache.hadoop.hive.serde2.io.DateWritableV2)9 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)9