Search in sources :

Example 1 with HiveIntervalYearMonth

use of org.apache.hadoop.hive.common.type.HiveIntervalYearMonth in project hive by apache.

the class VectorizedRowBatchCtx method addPartitionColsToBatch.

/**
   * Add the partition values to the batch
   *
   * @param batch
   * @param partitionValues
   * @throws HiveException
   */
public void addPartitionColsToBatch(VectorizedRowBatch batch, Object[] partitionValues) {
    if (partitionValues != null) {
        for (int i = 0; i < partitionColumnCount; i++) {
            Object value = partitionValues[i];
            int colIndex = dataColumnCount + i;
            String partitionColumnName = rowColumnNames[colIndex];
            PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) rowColumnTypeInfos[colIndex];
            switch(primitiveTypeInfo.getPrimitiveCategory()) {
                case BOOLEAN:
                    {
                        LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Boolean) value == true ? 1 : 0);
                            lcv.isNull[0] = false;
                        }
                    }
                    break;
                case BYTE:
                    {
                        LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Byte) value);
                            lcv.isNull[0] = false;
                        }
                    }
                    break;
                case SHORT:
                    {
                        LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Short) value);
                            lcv.isNull[0] = false;
                        }
                    }
                    break;
                case INT:
                    {
                        LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Integer) value);
                            lcv.isNull[0] = false;
                        }
                    }
                    break;
                case LONG:
                    {
                        LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Long) value);
                            lcv.isNull[0] = false;
                        }
                    }
                    break;
                case DATE:
                    {
                        LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill(DateWritable.dateToDays((Date) value));
                            lcv.isNull[0] = false;
                        }
                    }
                    break;
                case TIMESTAMP:
                    {
                        TimestampColumnVector lcv = (TimestampColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Timestamp) value);
                            lcv.isNull[0] = false;
                        }
                    }
                    break;
                case INTERVAL_YEAR_MONTH:
                    {
                        LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill(((HiveIntervalYearMonth) value).getTotalMonths());
                            lcv.isNull[0] = false;
                        }
                    }
                case INTERVAL_DAY_TIME:
                    {
                        IntervalDayTimeColumnVector icv = (IntervalDayTimeColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            icv.noNulls = false;
                            icv.isNull[0] = true;
                            icv.isRepeating = true;
                        } else {
                            icv.fill(((HiveIntervalDayTime) value));
                            icv.isNull[0] = false;
                        }
                    }
                case FLOAT:
                    {
                        DoubleColumnVector dcv = (DoubleColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            dcv.noNulls = false;
                            dcv.isNull[0] = true;
                            dcv.isRepeating = true;
                        } else {
                            dcv.fill((Float) value);
                            dcv.isNull[0] = false;
                        }
                    }
                    break;
                case DOUBLE:
                    {
                        DoubleColumnVector dcv = (DoubleColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            dcv.noNulls = false;
                            dcv.isNull[0] = true;
                            dcv.isRepeating = true;
                        } else {
                            dcv.fill((Double) value);
                            dcv.isNull[0] = false;
                        }
                    }
                    break;
                case DECIMAL:
                    {
                        DecimalColumnVector dv = (DecimalColumnVector) batch.cols[colIndex];
                        if (value == null) {
                            dv.noNulls = false;
                            dv.isNull[0] = true;
                            dv.isRepeating = true;
                        } else {
                            HiveDecimal hd = (HiveDecimal) value;
                            dv.set(0, hd);
                            dv.isRepeating = true;
                            dv.isNull[0] = false;
                        }
                    }
                    break;
                case BINARY:
                    {
                        BytesColumnVector bcv = (BytesColumnVector) batch.cols[colIndex];
                        byte[] bytes = (byte[]) value;
                        if (bytes == null) {
                            bcv.noNulls = false;
                            bcv.isNull[0] = true;
                            bcv.isRepeating = true;
                        } else {
                            bcv.fill(bytes);
                            bcv.isNull[0] = false;
                        }
                    }
                    break;
                case STRING:
                case CHAR:
                case VARCHAR:
                    {
                        BytesColumnVector bcv = (BytesColumnVector) batch.cols[colIndex];
                        String sVal = value.toString();
                        if (sVal == null) {
                            bcv.noNulls = false;
                            bcv.isNull[0] = true;
                            bcv.isRepeating = true;
                        } else {
                            bcv.setVal(0, sVal.getBytes());
                            bcv.isRepeating = true;
                        }
                    }
                    break;
                default:
                    throw new RuntimeException("Unable to recognize the partition type " + primitiveTypeInfo.getPrimitiveCategory() + " for column " + partitionColumnName);
            }
        }
    }
}
Also used : Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal)

Example 2 with HiveIntervalYearMonth

use of org.apache.hadoop.hive.common.type.HiveIntervalYearMonth in project hive by apache.

the class VerifyFast method verifyDeserializeRead.

public static void verifyDeserializeRead(DeserializeRead deserializeRead, PrimitiveTypeInfo primitiveTypeInfo, Writable writable) throws IOException {
    boolean isNull;
    isNull = !deserializeRead.readNextField();
    if (isNull) {
        if (writable != null) {
            TestCase.fail("Field reports null but object is not null (class " + writable.getClass().getName() + ", " + writable.toString() + ")");
        }
        return;
    } else if (writable == null) {
        TestCase.fail("Field report not null but object is null");
    }
    switch(primitiveTypeInfo.getPrimitiveCategory()) {
        case BOOLEAN:
            {
                boolean value = deserializeRead.currentBoolean;
                if (!(writable instanceof BooleanWritable)) {
                    TestCase.fail("Boolean expected writable not Boolean");
                }
                boolean expected = ((BooleanWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Boolean field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case BYTE:
            {
                byte value = deserializeRead.currentByte;
                if (!(writable instanceof ByteWritable)) {
                    TestCase.fail("Byte expected writable not Byte");
                }
                byte expected = ((ByteWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Byte field mismatch (expected " + (int) expected + " found " + (int) value + ")");
                }
            }
            break;
        case SHORT:
            {
                short value = deserializeRead.currentShort;
                if (!(writable instanceof ShortWritable)) {
                    TestCase.fail("Short expected writable not Short");
                }
                short expected = ((ShortWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Short field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case INT:
            {
                int value = deserializeRead.currentInt;
                if (!(writable instanceof IntWritable)) {
                    TestCase.fail("Integer expected writable not Integer");
                }
                int expected = ((IntWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Int field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case LONG:
            {
                long value = deserializeRead.currentLong;
                if (!(writable instanceof LongWritable)) {
                    TestCase.fail("Long expected writable not Long");
                }
                Long expected = ((LongWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Long field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case FLOAT:
            {
                float value = deserializeRead.currentFloat;
                if (!(writable instanceof FloatWritable)) {
                    TestCase.fail("Float expected writable not Float");
                }
                float expected = ((FloatWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Float field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case DOUBLE:
            {
                double value = deserializeRead.currentDouble;
                if (!(writable instanceof DoubleWritable)) {
                    TestCase.fail("Double expected writable not Double");
                }
                double expected = ((DoubleWritable) writable).get();
                if (value != expected) {
                    TestCase.fail("Double field mismatch (expected " + expected + " found " + value + ")");
                }
            }
            break;
        case STRING:
            {
                byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                Text text = new Text(stringBytes);
                String string = text.toString();
                String expected = ((Text) writable).toString();
                if (!string.equals(expected)) {
                    TestCase.fail("String field mismatch (expected '" + expected + "' found '" + string + "')");
                }
            }
            break;
        case CHAR:
            {
                byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                Text text = new Text(stringBytes);
                String string = text.toString();
                HiveChar hiveChar = new HiveChar(string, ((CharTypeInfo) primitiveTypeInfo).getLength());
                HiveChar expected = ((HiveCharWritable) writable).getHiveChar();
                if (!hiveChar.equals(expected)) {
                    TestCase.fail("Char field mismatch (expected '" + expected + "' found '" + hiveChar + "')");
                }
            }
            break;
        case VARCHAR:
            {
                byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                Text text = new Text(stringBytes);
                String string = text.toString();
                HiveVarchar hiveVarchar = new HiveVarchar(string, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
                HiveVarchar expected = ((HiveVarcharWritable) writable).getHiveVarchar();
                if (!hiveVarchar.equals(expected)) {
                    TestCase.fail("Varchar field mismatch (expected '" + expected + "' found '" + hiveVarchar + "')");
                }
            }
            break;
        case DECIMAL:
            {
                HiveDecimal value = deserializeRead.currentHiveDecimalWritable.getHiveDecimal();
                if (value == null) {
                    TestCase.fail("Decimal field evaluated to NULL");
                }
                HiveDecimal expected = ((HiveDecimalWritable) writable).getHiveDecimal();
                if (!value.equals(expected)) {
                    DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
                    int precision = decimalTypeInfo.getPrecision();
                    int scale = decimalTypeInfo.getScale();
                    TestCase.fail("Decimal field mismatch (expected " + expected.toString() + " found " + value.toString() + ") precision " + precision + ", scale " + scale);
                }
            }
            break;
        case DATE:
            {
                Date value = deserializeRead.currentDateWritable.get();
                Date expected = ((DateWritable) writable).get();
                if (!value.equals(expected)) {
                    TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case TIMESTAMP:
            {
                Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
                Timestamp expected = ((TimestampWritable) writable).getTimestamp();
                if (!value.equals(expected)) {
                    TestCase.fail("Timestamp field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case INTERVAL_YEAR_MONTH:
            {
                HiveIntervalYearMonth value = deserializeRead.currentHiveIntervalYearMonthWritable.getHiveIntervalYearMonth();
                HiveIntervalYearMonth expected = ((HiveIntervalYearMonthWritable) writable).getHiveIntervalYearMonth();
                if (!value.equals(expected)) {
                    TestCase.fail("HiveIntervalYearMonth field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case INTERVAL_DAY_TIME:
            {
                HiveIntervalDayTime value = deserializeRead.currentHiveIntervalDayTimeWritable.getHiveIntervalDayTime();
                HiveIntervalDayTime expected = ((HiveIntervalDayTimeWritable) writable).getHiveIntervalDayTime();
                if (!value.equals(expected)) {
                    TestCase.fail("HiveIntervalDayTime field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                }
            }
            break;
        case BINARY:
            {
                byte[] byteArray = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                BytesWritable bytesWritable = (BytesWritable) writable;
                byte[] expected = Arrays.copyOfRange(bytesWritable.getBytes(), 0, bytesWritable.getLength());
                if (byteArray.length != expected.length) {
                    TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                }
                for (int b = 0; b < byteArray.length; b++) {
                    if (byteArray[b] != expected[b]) {
                        TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                    }
                }
            }
            break;
        default:
            throw new Error("Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory());
    }
}
Also used : VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) Date(java.sql.Date) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Example 3 with HiveIntervalYearMonth

use of org.apache.hadoop.hive.common.type.HiveIntervalYearMonth in project hive by apache.

the class VectorizedRowBatchCtx method addPartitionColsToBatch.

public void addPartitionColsToBatch(ColumnVector[] cols, Object[] partitionValues) {
    if (partitionValues != null) {
        for (int i = 0; i < partitionColumnCount; i++) {
            Object value = partitionValues[i];
            int colIndex = dataColumnCount + i;
            String partitionColumnName = rowColumnNames[colIndex];
            PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) rowColumnTypeInfos[colIndex];
            switch(primitiveTypeInfo.getPrimitiveCategory()) {
                case BOOLEAN:
                    {
                        LongColumnVector lcv = (LongColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Boolean) value == true ? 1 : 0);
                        }
                    }
                    break;
                case BYTE:
                    {
                        LongColumnVector lcv = (LongColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Byte) value);
                        }
                    }
                    break;
                case SHORT:
                    {
                        LongColumnVector lcv = (LongColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Short) value);
                        }
                    }
                    break;
                case INT:
                    {
                        LongColumnVector lcv = (LongColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Integer) value);
                        }
                    }
                    break;
                case LONG:
                    {
                        LongColumnVector lcv = (LongColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Long) value);
                        }
                    }
                    break;
                case DATE:
                    {
                        LongColumnVector lcv = (LongColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill(DateWritable.dateToDays((Date) value));
                        }
                    }
                    break;
                case TIMESTAMP:
                    {
                        TimestampColumnVector lcv = (TimestampColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill((Timestamp) value);
                        }
                    }
                    break;
                case INTERVAL_YEAR_MONTH:
                    {
                        LongColumnVector lcv = (LongColumnVector) cols[colIndex];
                        if (value == null) {
                            lcv.noNulls = false;
                            lcv.isNull[0] = true;
                            lcv.isRepeating = true;
                        } else {
                            lcv.fill(((HiveIntervalYearMonth) value).getTotalMonths());
                        }
                    }
                case INTERVAL_DAY_TIME:
                    {
                        IntervalDayTimeColumnVector icv = (IntervalDayTimeColumnVector) cols[colIndex];
                        if (value == null) {
                            icv.noNulls = false;
                            icv.isNull[0] = true;
                            icv.isRepeating = true;
                        } else {
                            icv.fill(((HiveIntervalDayTime) value));
                        }
                    }
                case FLOAT:
                    {
                        DoubleColumnVector dcv = (DoubleColumnVector) cols[colIndex];
                        if (value == null) {
                            dcv.noNulls = false;
                            dcv.isNull[0] = true;
                            dcv.isRepeating = true;
                        } else {
                            dcv.fill((Float) value);
                        }
                    }
                    break;
                case DOUBLE:
                    {
                        DoubleColumnVector dcv = (DoubleColumnVector) cols[colIndex];
                        if (value == null) {
                            dcv.noNulls = false;
                            dcv.isNull[0] = true;
                            dcv.isRepeating = true;
                        } else {
                            dcv.fill((Double) value);
                        }
                    }
                    break;
                case DECIMAL:
                    {
                        DecimalColumnVector dv = (DecimalColumnVector) cols[colIndex];
                        if (value == null) {
                            dv.noNulls = false;
                            dv.isNull[0] = true;
                            dv.isRepeating = true;
                        } else {
                            dv.fill((HiveDecimal) value);
                        }
                    }
                    break;
                case BINARY:
                    {
                        BytesColumnVector bcv = (BytesColumnVector) cols[colIndex];
                        byte[] bytes = (byte[]) value;
                        if (bytes == null) {
                            bcv.noNulls = false;
                            bcv.isNull[0] = true;
                            bcv.isRepeating = true;
                        } else {
                            bcv.fill(bytes);
                        }
                    }
                    break;
                case STRING:
                case CHAR:
                case VARCHAR:
                    {
                        BytesColumnVector bcv = (BytesColumnVector) cols[colIndex];
                        String sVal = value.toString();
                        if (sVal == null) {
                            bcv.noNulls = false;
                            bcv.isNull[0] = true;
                            bcv.isRepeating = true;
                        } else {
                            bcv.fill(sVal.getBytes());
                        }
                    }
                    break;
                default:
                    throw new RuntimeException("Unable to recognize the partition type " + primitiveTypeInfo.getPrimitiveCategory() + " for column " + partitionColumnName);
            }
        }
    }
}
Also used : Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal)

Example 4 with HiveIntervalYearMonth

use of org.apache.hadoop.hive.common.type.HiveIntervalYearMonth in project hive by apache.

the class ASTBuilder method literal.

public static ASTNode literal(RexLiteral literal, boolean useTypeQualInLiteral) {
    Object val = null;
    int type = 0;
    SqlTypeName sqlType = literal.getType().getSqlTypeName();
    switch(sqlType) {
        case BINARY:
        case DATE:
        case TIME:
        case TIMESTAMP:
        case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
        case INTERVAL_DAY:
        case INTERVAL_DAY_HOUR:
        case INTERVAL_DAY_MINUTE:
        case INTERVAL_DAY_SECOND:
        case INTERVAL_HOUR:
        case INTERVAL_HOUR_MINUTE:
        case INTERVAL_HOUR_SECOND:
        case INTERVAL_MINUTE:
        case INTERVAL_MINUTE_SECOND:
        case INTERVAL_MONTH:
        case INTERVAL_SECOND:
        case INTERVAL_YEAR:
        case INTERVAL_YEAR_MONTH:
            if (literal.getValue() == null) {
                return ASTBuilder.construct(HiveParser.TOK_NULL, "TOK_NULL").node();
            }
            break;
        case TINYINT:
        case SMALLINT:
        case INTEGER:
        case BIGINT:
        case DOUBLE:
        case DECIMAL:
        case FLOAT:
        case REAL:
        case VARCHAR:
        case CHAR:
        case BOOLEAN:
            if (literal.getValue3() == null) {
                return ASTBuilder.construct(HiveParser.TOK_NULL, "TOK_NULL").node();
            }
    }
    switch(sqlType) {
        case TINYINT:
            if (useTypeQualInLiteral) {
                val = literal.getValue3() + "Y";
            } else {
                val = literal.getValue3();
            }
            type = HiveParser.IntegralLiteral;
            break;
        case SMALLINT:
            if (useTypeQualInLiteral) {
                val = literal.getValue3() + "S";
            } else {
                val = literal.getValue3();
            }
            type = HiveParser.IntegralLiteral;
            break;
        case INTEGER:
            val = literal.getValue3();
            type = HiveParser.IntegralLiteral;
            break;
        case BIGINT:
            if (useTypeQualInLiteral) {
                val = literal.getValue3() + "L";
            } else {
                val = literal.getValue3();
            }
            type = HiveParser.IntegralLiteral;
            break;
        case DOUBLE:
            val = literal.getValue3() + "D";
            type = HiveParser.NumberLiteral;
            break;
        case DECIMAL:
            val = literal.getValue3() + "BD";
            type = HiveParser.NumberLiteral;
            break;
        case FLOAT:
        case REAL:
            val = literal.getValue3();
            type = HiveParser.Number;
            break;
        case VARCHAR:
        case CHAR:
            val = literal.getValue3();
            String escapedVal = BaseSemanticAnalyzer.escapeSQLString(String.valueOf(val));
            type = HiveParser.StringLiteral;
            val = "'" + escapedVal + "'";
            break;
        case BOOLEAN:
            val = literal.getValue3();
            type = ((Boolean) val).booleanValue() ? HiveParser.KW_TRUE : HiveParser.KW_FALSE;
            break;
        case DATE:
            val = "'" + literal.getValueAs(DateString.class).toString() + "'";
            type = HiveParser.TOK_DATELITERAL;
            break;
        case TIME:
            val = "'" + literal.getValueAs(TimeString.class).toString() + "'";
            type = HiveParser.TOK_TIMESTAMPLITERAL;
            break;
        case TIMESTAMP:
            val = "'" + literal.getValueAs(TimestampString.class).toString() + "'";
            type = HiveParser.TOK_TIMESTAMPLITERAL;
            break;
        case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
            // Calcite stores timestamp with local time-zone in UTC internally, thus
            // when we bring it back, we need to add the UTC suffix.
            val = "'" + literal.getValueAs(TimestampString.class).toString() + " UTC'";
            type = HiveParser.TOK_TIMESTAMPLOCALTZLITERAL;
            break;
        case INTERVAL_YEAR:
        case INTERVAL_MONTH:
        case INTERVAL_YEAR_MONTH:
            {
                type = HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL;
                BigDecimal monthsBd = (BigDecimal) literal.getValue();
                HiveIntervalYearMonth intervalYearMonth = new HiveIntervalYearMonth(monthsBd.intValue());
                val = "'" + intervalYearMonth.toString() + "'";
            }
            break;
        case INTERVAL_DAY:
        case INTERVAL_DAY_HOUR:
        case INTERVAL_DAY_MINUTE:
        case INTERVAL_DAY_SECOND:
        case INTERVAL_HOUR:
        case INTERVAL_HOUR_MINUTE:
        case INTERVAL_HOUR_SECOND:
        case INTERVAL_MINUTE:
        case INTERVAL_MINUTE_SECOND:
        case INTERVAL_SECOND:
            {
                type = HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL;
                BigDecimal millisBd = (BigDecimal) literal.getValue();
                // Calcite literal is in millis, convert to seconds
                BigDecimal secsBd = millisBd.divide(BigDecimal.valueOf(1000));
                HiveIntervalDayTime intervalDayTime = new HiveIntervalDayTime(secsBd);
                val = "'" + intervalDayTime.toString() + "'";
            }
            break;
        case NULL:
            type = HiveParser.TOK_NULL;
            break;
        // binary type should not be seen.
        case BINARY:
        default:
            throw new RuntimeException("Unsupported Type: " + sqlType);
    }
    return (ASTNode) ParseDriver.adaptor.create(type, String.valueOf(val));
}
Also used : HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) DateString(org.apache.calcite.util.DateString) TimestampString(org.apache.calcite.util.TimestampString) TimeString(org.apache.calcite.util.TimeString) BigDecimal(java.math.BigDecimal) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Example 5 with HiveIntervalYearMonth

use of org.apache.hadoop.hive.common.type.HiveIntervalYearMonth in project hive by apache.

the class TestJdbcDriver2 method testIntervalTypes.

@Test
public void testIntervalTypes() throws Exception {
    Statement stmt = con.createStatement();
    // Since interval types not currently supported as table columns, need to create them
    // as expressions.
    ResultSet res = stmt.executeQuery("select case when c17 is null then null else interval '1' year end as col1," + " c17 -  c17 as col2 from " + dataTypeTableName);
    ResultSetMetaData meta = res.getMetaData();
    assertEquals("col1", meta.getColumnLabel(1));
    assertEquals(java.sql.Types.OTHER, meta.getColumnType(1));
    assertEquals("interval_year_month", meta.getColumnTypeName(1));
    assertEquals(11, meta.getColumnDisplaySize(1));
    assertEquals(11, meta.getPrecision(1));
    assertEquals(0, meta.getScale(1));
    assertEquals(HiveIntervalYearMonth.class.getName(), meta.getColumnClassName(1));
    assertEquals("col2", meta.getColumnLabel(2));
    assertEquals(java.sql.Types.OTHER, meta.getColumnType(2));
    assertEquals("interval_day_time", meta.getColumnTypeName(2));
    assertEquals(29, meta.getColumnDisplaySize(2));
    assertEquals(29, meta.getPrecision(2));
    assertEquals(0, meta.getScale(2));
    assertEquals(HiveIntervalDayTime.class.getName(), meta.getColumnClassName(2));
    // row 1 - results should be null
    assertTrue(res.next());
    // skip the last (partitioning) column since it is always non-null
    for (int i = 1; i < meta.getColumnCount(); i++) {
        assertNull("Column " + i + " should be null", res.getObject(i));
    }
    // row 2 - results should be null
    assertTrue(res.next());
    for (int i = 1; i < meta.getColumnCount(); i++) {
        assertNull("Column " + i + " should be null", res.getObject(i));
    }
    // row 3
    assertTrue(res.next());
    assertEquals("1-0", res.getString(1));
    assertEquals(1, ((HiveIntervalYearMonth) res.getObject(1)).getYears());
    assertEquals("0 00:00:00.000000000", res.getString(2));
    assertEquals(0, ((HiveIntervalDayTime) res.getObject(2)).getDays());
    stmt.close();
}
Also used : ResultSetMetaData(java.sql.ResultSetMetaData) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) ResultSet(java.sql.ResultSet) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) Test(org.junit.Test)

Aggregations

HiveIntervalYearMonth (org.apache.hadoop.hive.common.type.HiveIntervalYearMonth)37 HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)19 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)16 BytesWritable (org.apache.hadoop.io.BytesWritable)14 Text (org.apache.hadoop.io.Text)14 Date (org.apache.hadoop.hive.common.type.Date)13 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)13 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)13 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)13 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)12 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)12 Timestamp (org.apache.hadoop.hive.common.type.Timestamp)11 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)11 FloatWritable (org.apache.hadoop.io.FloatWritable)11 IntWritable (org.apache.hadoop.io.IntWritable)11 LongWritable (org.apache.hadoop.io.LongWritable)11 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)10 BooleanWritable (org.apache.hadoop.io.BooleanWritable)10 DateWritableV2 (org.apache.hadoop.hive.serde2.io.DateWritableV2)9 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)9