Search in sources :

Example 21 with HiveChar

use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.

the class RexNodeConverter method convert.

protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticException {
    RexBuilder rexBuilder = cluster.getRexBuilder();
    RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
    PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo();
    RelDataType calciteDataType = TypeConverter.convert(hiveType, dtFactory);
    PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory();
    ConstantObjectInspector coi = literal.getWritableObjectInspector();
    Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi);
    RexNode calciteLiteral = null;
    // If value is null, the type should also be VOID.
    if (value == null) {
        hiveTypeCategory = PrimitiveCategory.VOID;
    }
    // TODO: Verify if we need to use ConstantObjectInspector to unwrap data
    switch(hiveTypeCategory) {
        case BOOLEAN:
            calciteLiteral = rexBuilder.makeLiteral(((Boolean) value).booleanValue());
            break;
        case BYTE:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType);
            break;
        case SHORT:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType);
            break;
        case INT:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value));
            break;
        case LONG:
            calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value));
            break;
        // TODO: is Decimal an exact numeric or approximate numeric?
        case DECIMAL:
            if (value instanceof HiveDecimal) {
                value = ((HiveDecimal) value).bigDecimalValue();
            } else if (value instanceof Decimal128) {
                value = ((Decimal128) value).toBigDecimal();
            }
            if (value == null) {
                // literals.
                throw new CalciteSemanticException("Expression " + literal.getExprString() + " is not a valid decimal", UnsupportedFeature.Invalid_decimal);
            // TODO: return createNullLiteral(literal);
            }
            BigDecimal bd = (BigDecimal) value;
            BigInteger unscaled = bd.unscaledValue();
            int precision = bd.unscaledValue().abs().toString().length();
            int scale = bd.scale();
            RelDataType relType;
            if (precision > scale) {
                // bd is greater than or equal to 1
                relType = cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, precision, scale);
            } else {
                // bd is less than 1
                relType = cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, scale + 1, scale);
            }
            calciteLiteral = rexBuilder.makeExactLiteral(bd, relType);
            break;
        case FLOAT:
            calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Float.toString((Float) value)), calciteDataType);
            break;
        case DOUBLE:
            // TODO: The best solution is to support NaN in expression reduction.
            if (Double.isNaN((Double) value)) {
                throw new CalciteSemanticException("NaN", UnsupportedFeature.Invalid_decimal);
            }
            calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Double.toString((Double) value)), calciteDataType);
            break;
        case CHAR:
            if (value instanceof HiveChar) {
                value = ((HiveChar) value).getValue();
            }
            calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
            break;
        case VARCHAR:
            if (value instanceof HiveVarchar) {
                value = ((HiveVarchar) value).getValue();
            }
            calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
            break;
        case STRING:
            calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
            break;
        case DATE:
            final Calendar cal = Calendar.getInstance(Locale.getDefault());
            cal.setTime((Date) value);
            calciteLiteral = rexBuilder.makeDateLiteral(DateString.fromCalendarFields(cal));
            break;
        case TIMESTAMP:
            final TimestampString tsString;
            if (value instanceof Calendar) {
                tsString = TimestampString.fromCalendarFields((Calendar) value);
            } else {
                final Timestamp ts = (Timestamp) value;
                final Calendar calt = Calendar.getInstance(Locale.getDefault());
                calt.setTimeInMillis(ts.getTime());
                tsString = TimestampString.fromCalendarFields(calt).withNanos(ts.getNanos());
            }
            // Must call makeLiteral, not makeTimestampLiteral
            // to have the RexBuilder.roundTime logic kick in
            calciteLiteral = rexBuilder.makeLiteral(tsString, rexBuilder.getTypeFactory().createSqlType(SqlTypeName.TIMESTAMP, rexBuilder.getTypeFactory().getTypeSystem().getDefaultPrecision(SqlTypeName.TIMESTAMP)), false);
            break;
        case TIMESTAMPLOCALTZ:
            final TimestampString tsLocalTZString;
            if (value == null) {
                tsLocalTZString = null;
            } else {
                Instant i = ((TimestampTZ) value).getZonedDateTime().toInstant();
                tsLocalTZString = TimestampString.fromMillisSinceEpoch(i.toEpochMilli()).withNanos(i.getNano());
            }
            calciteLiteral = rexBuilder.makeTimestampWithLocalTimeZoneLiteral(tsLocalTZString, rexBuilder.getTypeFactory().getTypeSystem().getDefaultPrecision(SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE));
            break;
        case INTERVAL_YEAR_MONTH:
            // Calcite year-month literal value is months as BigDecimal
            BigDecimal totalMonths = BigDecimal.valueOf(((HiveIntervalYearMonth) value).getTotalMonths());
            calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths, new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
            break;
        case INTERVAL_DAY_TIME:
            // Calcite day-time interval is millis value as BigDecimal
            // Seconds converted to millis
            BigDecimal secsValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getTotalSeconds() * 1000);
            // Nanos converted to millis
            BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getNanos(), 6);
            calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd), new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, new SqlParserPos(1, 1)));
            break;
        case VOID:
            calciteLiteral = cluster.getRexBuilder().makeLiteral(null, cluster.getTypeFactory().createSqlType(SqlTypeName.NULL), true);
            break;
        case BINARY:
        case UNKNOWN:
        default:
            throw new RuntimeException("UnSupported Literal");
    }
    return calciteLiteral;
}
Also used : HiveChar(org.apache.hadoop.hive.common.type.HiveChar) RelDataType(org.apache.calcite.rel.type.RelDataType) GenericUDFTimestamp(org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp) Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) RexBuilder(org.apache.calcite.rex.RexBuilder) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) Calendar(java.util.Calendar) Instant(java.time.Instant) Decimal128(org.apache.hadoop.hive.common.type.Decimal128) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) BigDecimal(java.math.BigDecimal) BigInteger(java.math.BigInteger) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) TimestampString(org.apache.calcite.util.TimestampString) RexNode(org.apache.calcite.rex.RexNode)

Example 22 with HiveChar

use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.

the class SerdeRandomRowSource method getRandHiveChar.

public static HiveChar getRandHiveChar(Random r, CharTypeInfo charTypeInfo) {
    final int maxLength = 1 + r.nextInt(charTypeInfo.getLength());
    final String randomString = RandomTypeUtil.getRandString(r, "abcdefghijklmnopqrstuvwxyz", 100);
    final HiveChar hiveChar = new HiveChar(randomString, maxLength);
    return hiveChar;
}
Also used : HiveChar(org.apache.hadoop.hive.common.type.HiveChar)

Example 23 with HiveChar

use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.

the class VerifyFast method doVerifyDeserializeRead.

public static void doVerifyDeserializeRead(DeserializeRead deserializeRead, TypeInfo typeInfo, Object object, boolean isNull) throws IOException {
    if (isNull) {
        if (object != null) {
            TestCase.fail("Field reports null but object is not null (class " + object.getClass().getName() + ", " + object.toString() + ")");
        }
        return;
    } else if (object == null) {
        TestCase.fail("Field report not null but object is null");
    }
    switch(typeInfo.getCategory()) {
        case PRIMITIVE:
            {
                PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
                switch(primitiveTypeInfo.getPrimitiveCategory()) {
                    case BOOLEAN:
                        {
                            boolean value = deserializeRead.currentBoolean;
                            if (!(object instanceof BooleanWritable)) {
                                TestCase.fail("Boolean expected writable not Boolean");
                            }
                            boolean expected = ((BooleanWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Boolean field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case BYTE:
                        {
                            byte value = deserializeRead.currentByte;
                            if (!(object instanceof ByteWritable)) {
                                TestCase.fail("Byte expected writable not Byte");
                            }
                            byte expected = ((ByteWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Byte field mismatch (expected " + (int) expected + " found " + (int) value + ")");
                            }
                        }
                        break;
                    case SHORT:
                        {
                            short value = deserializeRead.currentShort;
                            if (!(object instanceof ShortWritable)) {
                                TestCase.fail("Short expected writable not Short");
                            }
                            short expected = ((ShortWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Short field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case INT:
                        {
                            int value = deserializeRead.currentInt;
                            if (!(object instanceof IntWritable)) {
                                TestCase.fail("Integer expected writable not Integer");
                            }
                            int expected = ((IntWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Int field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case LONG:
                        {
                            long value = deserializeRead.currentLong;
                            if (!(object instanceof LongWritable)) {
                                TestCase.fail("Long expected writable not Long");
                            }
                            Long expected = ((LongWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Long field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case FLOAT:
                        {
                            float value = deserializeRead.currentFloat;
                            if (!(object instanceof FloatWritable)) {
                                TestCase.fail("Float expected writable not Float");
                            }
                            float expected = ((FloatWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Float field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case DOUBLE:
                        {
                            double value = deserializeRead.currentDouble;
                            if (!(object instanceof DoubleWritable)) {
                                TestCase.fail("Double expected writable not Double");
                            }
                            double expected = ((DoubleWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Double field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case STRING:
                        {
                            byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                            Text text = new Text(stringBytes);
                            String string = text.toString();
                            String expected = ((Text) object).toString();
                            if (!string.equals(expected)) {
                                TestCase.fail("String field mismatch (expected '" + expected + "' found '" + string + "')");
                            }
                        }
                        break;
                    case CHAR:
                        {
                            byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                            Text text = new Text(stringBytes);
                            String string = text.toString();
                            HiveChar hiveChar = new HiveChar(string, ((CharTypeInfo) primitiveTypeInfo).getLength());
                            HiveChar expected = ((HiveCharWritable) object).getHiveChar();
                            if (!hiveChar.equals(expected)) {
                                TestCase.fail("Char field mismatch (expected '" + expected + "' found '" + hiveChar + "')");
                            }
                        }
                        break;
                    case VARCHAR:
                        {
                            byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                            Text text = new Text(stringBytes);
                            String string = text.toString();
                            HiveVarchar hiveVarchar = new HiveVarchar(string, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
                            HiveVarchar expected = ((HiveVarcharWritable) object).getHiveVarchar();
                            if (!hiveVarchar.equals(expected)) {
                                TestCase.fail("Varchar field mismatch (expected '" + expected + "' found '" + hiveVarchar + "')");
                            }
                        }
                        break;
                    case DECIMAL:
                        {
                            HiveDecimal value = deserializeRead.currentHiveDecimalWritable.getHiveDecimal();
                            if (value == null) {
                                TestCase.fail("Decimal field evaluated to NULL");
                            }
                            HiveDecimal expected = ((HiveDecimalWritable) object).getHiveDecimal();
                            if (!value.equals(expected)) {
                                DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
                                int precision = decimalTypeInfo.getPrecision();
                                int scale = decimalTypeInfo.getScale();
                                TestCase.fail("Decimal field mismatch (expected " + expected.toString() + " found " + value.toString() + ") precision " + precision + ", scale " + scale);
                            }
                        }
                        break;
                    case DATE:
                        {
                            Date value = deserializeRead.currentDateWritable.get();
                            Date expected = ((DateWritable) object).get();
                            if (!value.equals(expected)) {
                                TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                            }
                        }
                        break;
                    case TIMESTAMP:
                        {
                            Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
                            Timestamp expected = ((TimestampWritable) object).getTimestamp();
                            if (!value.equals(expected)) {
                                TestCase.fail("Timestamp field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                            }
                        }
                        break;
                    case INTERVAL_YEAR_MONTH:
                        {
                            HiveIntervalYearMonth value = deserializeRead.currentHiveIntervalYearMonthWritable.getHiveIntervalYearMonth();
                            HiveIntervalYearMonth expected = ((HiveIntervalYearMonthWritable) object).getHiveIntervalYearMonth();
                            if (!value.equals(expected)) {
                                TestCase.fail("HiveIntervalYearMonth field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                            }
                        }
                        break;
                    case INTERVAL_DAY_TIME:
                        {
                            HiveIntervalDayTime value = deserializeRead.currentHiveIntervalDayTimeWritable.getHiveIntervalDayTime();
                            HiveIntervalDayTime expected = ((HiveIntervalDayTimeWritable) object).getHiveIntervalDayTime();
                            if (!value.equals(expected)) {
                                TestCase.fail("HiveIntervalDayTime field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                            }
                        }
                        break;
                    case BINARY:
                        {
                            byte[] byteArray = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                            BytesWritable bytesWritable = (BytesWritable) object;
                            byte[] expected = Arrays.copyOfRange(bytesWritable.getBytes(), 0, bytesWritable.getLength());
                            if (byteArray.length != expected.length) {
                                TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                            }
                            for (int b = 0; b < byteArray.length; b++) {
                                if (byteArray[b] != expected[b]) {
                                    TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                                }
                            }
                        }
                        break;
                    default:
                        throw new Error("Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory());
                }
            }
            break;
        case LIST:
        case MAP:
        case STRUCT:
        case UNION:
            throw new Error("Complex types need to be handled separately");
        default:
            throw new Error("Unknown category " + typeInfo.getCategory());
    }
}
Also used : HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) Date(java.sql.Date) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable)

Example 24 with HiveChar

use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.

the class TestObjectInspectorConverters method testObjectInspectorConverters.

public void testObjectInspectorConverters() throws Throwable {
    try {
        // Boolean
        Converter booleanConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.writableBooleanObjectInspector);
        assertEquals("BooleanConverter", new BooleanWritable(false), booleanConverter.convert(Integer.valueOf(0)));
        assertEquals("BooleanConverter", new BooleanWritable(true), booleanConverter.convert(Integer.valueOf(1)));
        assertEquals("BooleanConverter", null, booleanConverter.convert(null));
        // Byte
        Converter byteConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.writableByteObjectInspector);
        assertEquals("ByteConverter", new ByteWritable((byte) 0), byteConverter.convert(Integer.valueOf(0)));
        assertEquals("ByteConverter", new ByteWritable((byte) 1), byteConverter.convert(Integer.valueOf(1)));
        assertEquals("ByteConverter", null, byteConverter.convert(null));
        // Short
        Converter shortConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.writableShortObjectInspector);
        assertEquals("ShortConverter", new ShortWritable((short) 0), shortConverter.convert(Integer.valueOf(0)));
        assertEquals("ShortConverter", new ShortWritable((short) 1), shortConverter.convert(Integer.valueOf(1)));
        assertEquals("ShortConverter", null, shortConverter.convert(null));
        // Int
        Converter intConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.writableIntObjectInspector);
        assertEquals("IntConverter", new IntWritable(0), intConverter.convert(Integer.valueOf(0)));
        assertEquals("IntConverter", new IntWritable(1), intConverter.convert(Integer.valueOf(1)));
        assertEquals("IntConverter", null, intConverter.convert(null));
        // Long
        Converter longConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.writableLongObjectInspector);
        assertEquals("LongConverter", new LongWritable(0), longConverter.convert(Integer.valueOf(0)));
        assertEquals("LongConverter", new LongWritable(1), longConverter.convert(Integer.valueOf(1)));
        assertEquals("LongConverter", null, longConverter.convert(null));
        // Float
        Converter floatConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.writableFloatObjectInspector);
        assertEquals("LongConverter", new FloatWritable(0), floatConverter.convert(Integer.valueOf(0)));
        assertEquals("LongConverter", new FloatWritable(1), floatConverter.convert(Integer.valueOf(1)));
        assertEquals("LongConverter", null, floatConverter.convert(null));
        // Double
        Converter doubleConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
        assertEquals("DoubleConverter", new DoubleWritable(0), doubleConverter.convert(Integer.valueOf(0)));
        assertEquals("DoubleConverter", new DoubleWritable(1), doubleConverter.convert(Integer.valueOf(1)));
        assertEquals("DoubleConverter", null, doubleConverter.convert(null));
        // Char
        Converter charConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, PrimitiveObjectInspectorFactory.javaHiveCharObjectInspector);
        assertEquals("CharConverter", new HiveChar("TRUE", -1), charConverter.convert(Boolean.valueOf(true)));
        assertEquals("CharConverter", new HiveChar("FALSE", -1), charConverter.convert(Boolean.valueOf(false)));
        charConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, PrimitiveObjectInspectorFactory.writableHiveCharObjectInspector);
        assertEquals("CharConverter", new HiveCharWritable(new HiveChar("TRUE", -1)), charConverter.convert(Boolean.valueOf(true)));
        assertEquals("CharConverter", new HiveCharWritable(new HiveChar("FALSE", -1)), charConverter.convert(Boolean.valueOf(false)));
        charConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.javaHiveCharObjectInspector);
        assertEquals("CharConverter", new HiveChar("0", -1), charConverter.convert(Integer.valueOf(0)));
        assertEquals("CharConverter", new HiveChar("1", -1), charConverter.convert(Integer.valueOf(1)));
        charConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.writableHiveCharObjectInspector);
        assertEquals("CharConverter", new HiveCharWritable(new HiveChar("0", -1)), charConverter.convert(Integer.valueOf(0)));
        assertEquals("CharConverter", new HiveCharWritable(new HiveChar("1", -1)), charConverter.convert(Integer.valueOf(1)));
        charConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, PrimitiveObjectInspectorFactory.javaHiveCharObjectInspector);
        assertEquals("CharConverter", new HiveChar("hive", -1), charConverter.convert(String.valueOf("hive")));
        charConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, PrimitiveObjectInspectorFactory.writableHiveCharObjectInspector);
        assertEquals("CharConverter", new HiveCharWritable(new HiveChar("hive", -1)), charConverter.convert(String.valueOf("hive")));
        // VarChar
        Converter varcharConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector);
        assertEquals("VarCharConverter", new HiveVarchar("TRUE", -1), varcharConverter.convert(Boolean.valueOf(true)));
        assertEquals("VarCharConverter", new HiveVarchar("FALSE", -1), varcharConverter.convert(Boolean.valueOf(false)));
        varcharConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector);
        assertEquals("VarCharConverter", new HiveVarcharWritable(new HiveVarchar("TRUE", -1)), varcharConverter.convert(Boolean.valueOf(true)));
        assertEquals("VarCharConverter", new HiveVarcharWritable(new HiveVarchar("FALSE", -1)), varcharConverter.convert(Boolean.valueOf(false)));
        varcharConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector);
        assertEquals("VarCharConverter", new HiveVarchar("0", -1), varcharConverter.convert(Integer.valueOf(0)));
        assertEquals("VarCharConverter", new HiveVarchar("1", -1), varcharConverter.convert(Integer.valueOf(1)));
        varcharConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector);
        assertEquals("VarCharConverter", new HiveVarcharWritable(new HiveVarchar("0", -1)), varcharConverter.convert(Integer.valueOf(0)));
        assertEquals("VarCharConverter", new HiveVarcharWritable(new HiveVarchar("1", -1)), varcharConverter.convert(Integer.valueOf(1)));
        varcharConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector);
        assertEquals("VarCharConverter", new HiveVarchar("hive", -1), varcharConverter.convert(String.valueOf("hive")));
        varcharConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector);
        assertEquals("VarCharConverter", new HiveVarcharWritable(new HiveVarchar("hive", -1)), varcharConverter.convert(String.valueOf("hive")));
        // Text
        Converter textConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.writableStringObjectInspector);
        assertEquals("TextConverter", new Text("0"), textConverter.convert(Integer.valueOf(0)));
        assertEquals("TextConverter", new Text("1"), textConverter.convert(Integer.valueOf(1)));
        assertEquals("TextConverter", null, textConverter.convert(null));
        textConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.writableBinaryObjectInspector, PrimitiveObjectInspectorFactory.writableStringObjectInspector);
        assertEquals("TextConverter", new Text("hive"), textConverter.convert(new BytesWritable(new byte[] { (byte) 'h', (byte) 'i', (byte) 'v', (byte) 'e' })));
        assertEquals("TextConverter", null, textConverter.convert(null));
        textConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.writableStringObjectInspector, PrimitiveObjectInspectorFactory.writableStringObjectInspector);
        assertEquals("TextConverter", new Text("hive"), textConverter.convert(new Text("hive")));
        assertEquals("TextConverter", null, textConverter.convert(null));
        textConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, PrimitiveObjectInspectorFactory.writableStringObjectInspector);
        assertEquals("TextConverter", new Text("hive"), textConverter.convert(new String("hive")));
        assertEquals("TextConverter", null, textConverter.convert(null));
        textConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector, PrimitiveObjectInspectorFactory.writableStringObjectInspector);
        assertEquals("TextConverter", new Text("100.001"), textConverter.convert(HiveDecimal.create("100.001")));
        assertEquals("TextConverter", null, textConverter.convert(null));
        // Binary
        Converter baConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, PrimitiveObjectInspectorFactory.writableBinaryObjectInspector);
        assertEquals("BAConverter", new BytesWritable(new byte[] { (byte) 'h', (byte) 'i', (byte) 'v', (byte) 'e' }), baConverter.convert("hive"));
        assertEquals("BAConverter", null, baConverter.convert(null));
        baConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.writableStringObjectInspector, PrimitiveObjectInspectorFactory.writableBinaryObjectInspector);
        assertEquals("BAConverter", new BytesWritable(new byte[] { (byte) 'h', (byte) 'i', (byte) 'v', (byte) 'e' }), baConverter.convert(new Text("hive")));
        assertEquals("BAConverter", null, baConverter.convert(null));
        // Union
        ArrayList<String> fieldNames = new ArrayList<String>();
        fieldNames.add("firstInteger");
        fieldNames.add("secondString");
        fieldNames.add("thirdBoolean");
        ArrayList<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>();
        fieldObjectInspectors.add(PrimitiveObjectInspectorFactory.javaIntObjectInspector);
        fieldObjectInspectors.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        fieldObjectInspectors.add(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
        ArrayList<String> fieldNames2 = new ArrayList<String>();
        fieldNames2.add("firstString");
        fieldNames2.add("secondInteger");
        fieldNames2.add("thirdBoolean");
        ArrayList<ObjectInspector> fieldObjectInspectors2 = new ArrayList<ObjectInspector>();
        fieldObjectInspectors2.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        fieldObjectInspectors2.add(PrimitiveObjectInspectorFactory.javaIntObjectInspector);
        fieldObjectInspectors2.add(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
        Converter unionConverter0 = ObjectInspectorConverters.getConverter(ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors), ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors2));
        Object convertedObject0 = unionConverter0.convert(new StandardUnion((byte) 0, 1));
        StandardUnion expectedObject0 = new StandardUnion();
        expectedObject0.setTag((byte) 0);
        expectedObject0.setObject("1");
        assertEquals(expectedObject0, convertedObject0);
        Converter unionConverter1 = ObjectInspectorConverters.getConverter(ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors), ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors2));
        Object convertedObject1 = unionConverter1.convert(new StandardUnion((byte) 1, "1"));
        StandardUnion expectedObject1 = new StandardUnion();
        expectedObject1.setTag((byte) 1);
        expectedObject1.setObject(1);
        assertEquals(expectedObject1, convertedObject1);
        Converter unionConverter2 = ObjectInspectorConverters.getConverter(ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors), ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectors2));
        Object convertedObject2 = unionConverter2.convert(new StandardUnion((byte) 2, true));
        StandardUnion expectedObject2 = new StandardUnion();
        expectedObject2.setTag((byte) 2);
        expectedObject2.setObject(true);
        assertEquals(expectedObject2, convertedObject2);
        // Union (extra fields)
        ArrayList<String> fieldNamesExtra = new ArrayList<String>();
        fieldNamesExtra.add("firstInteger");
        fieldNamesExtra.add("secondString");
        fieldNamesExtra.add("thirdBoolean");
        ArrayList<ObjectInspector> fieldObjectInspectorsExtra = new ArrayList<ObjectInspector>();
        fieldObjectInspectorsExtra.add(PrimitiveObjectInspectorFactory.javaIntObjectInspector);
        fieldObjectInspectorsExtra.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        fieldObjectInspectorsExtra.add(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
        ArrayList<String> fieldNamesExtra2 = new ArrayList<String>();
        fieldNamesExtra2.add("firstString");
        fieldNamesExtra2.add("secondInteger");
        ArrayList<ObjectInspector> fieldObjectInspectorsExtra2 = new ArrayList<ObjectInspector>();
        fieldObjectInspectorsExtra2.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        fieldObjectInspectorsExtra2.add(PrimitiveObjectInspectorFactory.javaIntObjectInspector);
        Converter unionConverterExtra = ObjectInspectorConverters.getConverter(ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectorsExtra), ObjectInspectorFactory.getStandardUnionObjectInspector(fieldObjectInspectorsExtra2));
        Object convertedObjectExtra = unionConverterExtra.convert(new StandardUnion((byte) 2, true));
        StandardUnion expectedObjectExtra = new StandardUnion();
        expectedObjectExtra.setTag((byte) -1);
        expectedObjectExtra.setObject(null);
        // we should get back null
        assertEquals(expectedObjectExtra, convertedObjectExtra);
    } catch (Throwable e) {
        e.printStackTrace();
        throw e;
    }
}
Also used : HiveChar(org.apache.hadoop.hive.common.type.HiveChar) ArrayList(java.util.ArrayList) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) StandardUnion(org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion) Converter(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 25 with HiveChar

use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.

the class ReaderWriter method writeDatum.

public static void writeDatum(DataOutput out, Object val) throws IOException {
    // write the data type
    byte type = DataType.findType(val);
    out.write(type);
    switch(type) {
        case DataType.LIST:
            List<?> list = (List<?>) val;
            int sz = list.size();
            out.writeInt(sz);
            for (int i = 0; i < sz; i++) {
                writeDatum(out, list.get(i));
            }
            return;
        case DataType.MAP:
            Map<?, ?> m = (Map<?, ?>) val;
            out.writeInt(m.size());
            Iterator<?> i = m.entrySet().iterator();
            while (i.hasNext()) {
                Entry<?, ?> entry = (Entry<?, ?>) i.next();
                writeDatum(out, entry.getKey());
                writeDatum(out, entry.getValue());
            }
            return;
        case DataType.INTEGER:
            new VIntWritable((Integer) val).write(out);
            return;
        case DataType.LONG:
            new VLongWritable((Long) val).write(out);
            return;
        case DataType.FLOAT:
            out.writeFloat((Float) val);
            return;
        case DataType.DOUBLE:
            out.writeDouble((Double) val);
            return;
        case DataType.BOOLEAN:
            out.writeBoolean((Boolean) val);
            return;
        case DataType.BYTE:
            out.writeByte((Byte) val);
            return;
        case DataType.SHORT:
            out.writeShort((Short) val);
            return;
        case DataType.STRING:
            String s = (String) val;
            byte[] utfBytes = s.getBytes(ReaderWriter.UTF8);
            out.writeInt(utfBytes.length);
            out.write(utfBytes);
            return;
        case DataType.BINARY:
            byte[] ba = (byte[]) val;
            out.writeInt(ba.length);
            out.write(ba);
            return;
        case DataType.NULL:
            // for NULL we just write out the type
            return;
        case DataType.CHAR:
            new HiveCharWritable((HiveChar) val).write(out);
            return;
        case DataType.VARCHAR:
            new HiveVarcharWritable((HiveVarchar) val).write(out);
            return;
        case DataType.DECIMAL:
            new HiveDecimalWritable((HiveDecimal) val).write(out);
            return;
        case DataType.DATE:
            new DateWritable((Date) val).write(out);
            return;
        case DataType.TIMESTAMP:
            new TimestampWritable((java.sql.Timestamp) val).write(out);
            return;
        default:
            throw new IOException("Unexpected data type " + type + " found in stream.");
    }
}
Also used : VIntWritable(org.apache.hadoop.io.VIntWritable) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) Entry(java.util.Map.Entry) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) ArrayList(java.util.ArrayList) List(java.util.List) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) IOException(java.io.IOException) Date(java.sql.Date) VLongWritable(org.apache.hadoop.io.VLongWritable) HashMap(java.util.HashMap) Map(java.util.Map)

Aggregations

HiveChar (org.apache.hadoop.hive.common.type.HiveChar)62 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)41 Timestamp (java.sql.Timestamp)25 Date (java.sql.Date)22 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)21 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)21 Test (org.junit.Test)21 Text (org.apache.hadoop.io.Text)19 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)18 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)16 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)16 BytesWritable (org.apache.hadoop.io.BytesWritable)16 LongWritable (org.apache.hadoop.io.LongWritable)16 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)15 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)15 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)15 HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)14 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)14 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)14 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)14