Search in sources :

Example 11 with HiveIntervalDayTime

use of org.apache.hadoop.hive.common.type.HiveIntervalDayTime in project hive by apache.

the class TestJdbcDriver2 method testIntervalTypes.

@Test
public void testIntervalTypes() throws Exception {
    Statement stmt = con.createStatement();
    // Since interval types not currently supported as table columns, need to create them
    // as expressions.
    ResultSet res = stmt.executeQuery("select case when c17 is null then null else interval '1' year end as col1," + " c17 -  c17 as col2 from " + dataTypeTableName);
    ResultSetMetaData meta = res.getMetaData();
    assertEquals("col1", meta.getColumnLabel(1));
    assertEquals(java.sql.Types.OTHER, meta.getColumnType(1));
    assertEquals("interval_year_month", meta.getColumnTypeName(1));
    assertEquals(11, meta.getColumnDisplaySize(1));
    assertEquals(11, meta.getPrecision(1));
    assertEquals(0, meta.getScale(1));
    assertEquals(HiveIntervalYearMonth.class.getName(), meta.getColumnClassName(1));
    assertEquals("col2", meta.getColumnLabel(2));
    assertEquals(java.sql.Types.OTHER, meta.getColumnType(2));
    assertEquals("interval_day_time", meta.getColumnTypeName(2));
    assertEquals(29, meta.getColumnDisplaySize(2));
    assertEquals(29, meta.getPrecision(2));
    assertEquals(0, meta.getScale(2));
    assertEquals(HiveIntervalDayTime.class.getName(), meta.getColumnClassName(2));
    // row 1 - results should be null
    assertTrue(res.next());
    // skip the last (partitioning) column since it is always non-null
    for (int i = 1; i < meta.getColumnCount(); i++) {
        assertNull("Column " + i + " should be null", res.getObject(i));
    }
    // row 2 - results should be null
    assertTrue(res.next());
    for (int i = 1; i < meta.getColumnCount(); i++) {
        assertNull("Column " + i + " should be null", res.getObject(i));
    }
    // row 3
    assertTrue(res.next());
    assertEquals("1-0", res.getString(1));
    assertEquals(1, ((HiveIntervalYearMonth) res.getObject(1)).getYears());
    assertEquals("0 00:00:00.000000000", res.getString(2));
    assertEquals(0, ((HiveIntervalDayTime) res.getObject(2)).getDays());
    stmt.close();
}
Also used : ResultSetMetaData(java.sql.ResultSetMetaData) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) ResultSet(java.sql.ResultSet) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) Test(org.junit.Test)

Example 12 with HiveIntervalDayTime

use of org.apache.hadoop.hive.common.type.HiveIntervalDayTime in project hive by apache.

the class SerdeRandomRowSource method getRandIntervalDayTime.

public static HiveIntervalDayTime getRandIntervalDayTime(Random r) {
    String optionalNanos = "";
    if (r.nextInt(2) == 1) {
        optionalNanos = String.format(".%09d", Integer.valueOf(0 + r.nextInt(DateUtils.NANOS_PER_SEC)));
    }
    final String yearMonthSignStr = r.nextInt(2) == 0 ? "" : "-";
    final String dayTimeStr = String.format("%s%d %02d:%02d:%02d%s", yearMonthSignStr, // day
    Integer.valueOf(1 + r.nextInt(28)), // hour
    Integer.valueOf(0 + r.nextInt(24)), // minute
    Integer.valueOf(0 + r.nextInt(60)), // second
    Integer.valueOf(0 + r.nextInt(60)), optionalNanos);
    final HiveIntervalDayTime intervalDayTimeVal = HiveIntervalDayTime.valueOf(dayTimeStr);
    return intervalDayTimeVal;
}
Also used : HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Example 13 with HiveIntervalDayTime

use of org.apache.hadoop.hive.common.type.HiveIntervalDayTime in project hive by apache.

the class VerifyFast method doVerifyDeserializeRead.

public static void doVerifyDeserializeRead(DeserializeRead deserializeRead, TypeInfo typeInfo, Object object, boolean isNull) throws IOException {
    if (isNull) {
        if (object != null) {
            TestCase.fail("Field reports null but object is not null (class " + object.getClass().getName() + ", " + object.toString() + ")");
        }
        return;
    } else if (object == null) {
        TestCase.fail("Field report not null but object is null");
    }
    switch(typeInfo.getCategory()) {
        case PRIMITIVE:
            {
                PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
                switch(primitiveTypeInfo.getPrimitiveCategory()) {
                    case BOOLEAN:
                        {
                            boolean value = deserializeRead.currentBoolean;
                            if (!(object instanceof BooleanWritable)) {
                                TestCase.fail("Boolean expected writable not Boolean");
                            }
                            boolean expected = ((BooleanWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Boolean field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case BYTE:
                        {
                            byte value = deserializeRead.currentByte;
                            if (!(object instanceof ByteWritable)) {
                                TestCase.fail("Byte expected writable not Byte");
                            }
                            byte expected = ((ByteWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Byte field mismatch (expected " + (int) expected + " found " + (int) value + ")");
                            }
                        }
                        break;
                    case SHORT:
                        {
                            short value = deserializeRead.currentShort;
                            if (!(object instanceof ShortWritable)) {
                                TestCase.fail("Short expected writable not Short");
                            }
                            short expected = ((ShortWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Short field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case INT:
                        {
                            int value = deserializeRead.currentInt;
                            if (!(object instanceof IntWritable)) {
                                TestCase.fail("Integer expected writable not Integer");
                            }
                            int expected = ((IntWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Int field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case LONG:
                        {
                            long value = deserializeRead.currentLong;
                            if (!(object instanceof LongWritable)) {
                                TestCase.fail("Long expected writable not Long");
                            }
                            Long expected = ((LongWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Long field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case FLOAT:
                        {
                            float value = deserializeRead.currentFloat;
                            if (!(object instanceof FloatWritable)) {
                                TestCase.fail("Float expected writable not Float");
                            }
                            float expected = ((FloatWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Float field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case DOUBLE:
                        {
                            double value = deserializeRead.currentDouble;
                            if (!(object instanceof DoubleWritable)) {
                                TestCase.fail("Double expected writable not Double");
                            }
                            double expected = ((DoubleWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Double field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case STRING:
                        {
                            byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                            Text text = new Text(stringBytes);
                            String string = text.toString();
                            String expected = ((Text) object).toString();
                            if (!string.equals(expected)) {
                                TestCase.fail("String field mismatch (expected '" + expected + "' found '" + string + "')");
                            }
                        }
                        break;
                    case CHAR:
                        {
                            byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                            Text text = new Text(stringBytes);
                            String string = text.toString();
                            HiveChar hiveChar = new HiveChar(string, ((CharTypeInfo) primitiveTypeInfo).getLength());
                            HiveChar expected = ((HiveCharWritable) object).getHiveChar();
                            if (!hiveChar.equals(expected)) {
                                TestCase.fail("Char field mismatch (expected '" + expected + "' found '" + hiveChar + "')");
                            }
                        }
                        break;
                    case VARCHAR:
                        {
                            byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                            Text text = new Text(stringBytes);
                            String string = text.toString();
                            HiveVarchar hiveVarchar = new HiveVarchar(string, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
                            HiveVarchar expected = ((HiveVarcharWritable) object).getHiveVarchar();
                            if (!hiveVarchar.equals(expected)) {
                                TestCase.fail("Varchar field mismatch (expected '" + expected + "' found '" + hiveVarchar + "')");
                            }
                        }
                        break;
                    case DECIMAL:
                        {
                            HiveDecimal value = deserializeRead.currentHiveDecimalWritable.getHiveDecimal();
                            if (value == null) {
                                TestCase.fail("Decimal field evaluated to NULL");
                            }
                            HiveDecimal expected = ((HiveDecimalWritable) object).getHiveDecimal();
                            if (!value.equals(expected)) {
                                DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
                                int precision = decimalTypeInfo.getPrecision();
                                int scale = decimalTypeInfo.getScale();
                                TestCase.fail("Decimal field mismatch (expected " + expected.toString() + " found " + value.toString() + ") precision " + precision + ", scale " + scale);
                            }
                        }
                        break;
                    case DATE:
                        {
                            Date value = deserializeRead.currentDateWritable.get();
                            Date expected = ((DateWritable) object).get();
                            if (!value.equals(expected)) {
                                TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                            }
                        }
                        break;
                    case TIMESTAMP:
                        {
                            Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
                            Timestamp expected = ((TimestampWritable) object).getTimestamp();
                            if (!value.equals(expected)) {
                                TestCase.fail("Timestamp field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                            }
                        }
                        break;
                    case INTERVAL_YEAR_MONTH:
                        {
                            HiveIntervalYearMonth value = deserializeRead.currentHiveIntervalYearMonthWritable.getHiveIntervalYearMonth();
                            HiveIntervalYearMonth expected = ((HiveIntervalYearMonthWritable) object).getHiveIntervalYearMonth();
                            if (!value.equals(expected)) {
                                TestCase.fail("HiveIntervalYearMonth field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                            }
                        }
                        break;
                    case INTERVAL_DAY_TIME:
                        {
                            HiveIntervalDayTime value = deserializeRead.currentHiveIntervalDayTimeWritable.getHiveIntervalDayTime();
                            HiveIntervalDayTime expected = ((HiveIntervalDayTimeWritable) object).getHiveIntervalDayTime();
                            if (!value.equals(expected)) {
                                TestCase.fail("HiveIntervalDayTime field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                            }
                        }
                        break;
                    case BINARY:
                        {
                            byte[] byteArray = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                            BytesWritable bytesWritable = (BytesWritable) object;
                            byte[] expected = Arrays.copyOfRange(bytesWritable.getBytes(), 0, bytesWritable.getLength());
                            if (byteArray.length != expected.length) {
                                TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                            }
                            for (int b = 0; b < byteArray.length; b++) {
                                if (byteArray[b] != expected[b]) {
                                    TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                                }
                            }
                        }
                        break;
                    default:
                        throw new Error("Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory());
                }
            }
            break;
        case LIST:
        case MAP:
        case STRUCT:
        case UNION:
            throw new Error("Complex types need to be handled separately");
        default:
            throw new Error("Unknown category " + typeInfo.getCategory());
    }
}
Also used : HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) Date(java.sql.Date) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable)

Example 14 with HiveIntervalDayTime

use of org.apache.hadoop.hive.common.type.HiveIntervalDayTime in project hive by apache.

the class VectorAssignRow method assignConvertRowColumn.

private void assignConvertRowColumn(ColumnVector columnVector, int batchIndex, TypeInfo targetTypeInfo, ObjectInspector sourceObjectInspector, Writable convertTargetWritable, Object object) {
    final Category targetCategory = targetTypeInfo.getCategory();
    if (targetCategory == null) {
        /*
       * This is a column that we don't want (i.e. not included) -- we are done.
       */
        return;
    }
    if (object == null) {
        VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
        return;
    }
    try {
        switch(targetCategory) {
            case PRIMITIVE:
                final PrimitiveObjectInspector sourcePrimitiveOI = (PrimitiveObjectInspector) sourceObjectInspector;
                final PrimitiveCategory targetPrimitiveCategory = ((PrimitiveTypeInfo) targetTypeInfo).getPrimitiveCategory();
                switch(targetPrimitiveCategory) {
                    case VOID:
                        VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                        return;
                    case BOOLEAN:
                        ((LongColumnVector) columnVector).vector[batchIndex] = (PrimitiveObjectInspectorUtils.getBoolean(object, sourcePrimitiveOI) ? 1 : 0);
                        break;
                    case BYTE:
                        ((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getByte(object, sourcePrimitiveOI);
                        break;
                    case SHORT:
                        ((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getShort(object, sourcePrimitiveOI);
                        break;
                    case INT:
                        ((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getInt(object, sourcePrimitiveOI);
                        break;
                    case LONG:
                        ((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getLong(object, sourcePrimitiveOI);
                        break;
                    case TIMESTAMP:
                        {
                            final Timestamp timestamp = PrimitiveObjectInspectorUtils.getTimestamp(object, sourcePrimitiveOI);
                            if (timestamp == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            ((TimestampColumnVector) columnVector).set(batchIndex, timestamp);
                        }
                        break;
                    case DATE:
                        {
                            final Date date = PrimitiveObjectInspectorUtils.getDate(object, sourcePrimitiveOI);
                            if (date == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            DateWritable dateWritable = (DateWritable) convertTargetWritable;
                            if (dateWritable == null) {
                                dateWritable = new DateWritable();
                            }
                            dateWritable.set(date);
                            ((LongColumnVector) columnVector).vector[batchIndex] = dateWritable.getDays();
                        }
                        break;
                    case FLOAT:
                        ((DoubleColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getFloat(object, sourcePrimitiveOI);
                        break;
                    case DOUBLE:
                        ((DoubleColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getDouble(object, sourcePrimitiveOI);
                        break;
                    case BINARY:
                        {
                            final BytesWritable bytesWritable = PrimitiveObjectInspectorUtils.getBinary(object, sourcePrimitiveOI);
                            if (bytesWritable == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            ((BytesColumnVector) columnVector).setVal(batchIndex, bytesWritable.getBytes(), 0, bytesWritable.getLength());
                        }
                        break;
                    case STRING:
                        {
                            final String string = PrimitiveObjectInspectorUtils.getString(object, sourcePrimitiveOI);
                            if (string == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            Text text = (Text) convertTargetWritable;
                            if (text == null) {
                                text = new Text();
                            }
                            text.set(string);
                            ((BytesColumnVector) columnVector).setVal(batchIndex, text.getBytes(), 0, text.getLength());
                        }
                        break;
                    case VARCHAR:
                        {
                            // UNDONE: Performance problem with conversion to String, then bytes...
                            final HiveVarchar hiveVarchar = PrimitiveObjectInspectorUtils.getHiveVarchar(object, sourcePrimitiveOI);
                            if (hiveVarchar == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            // TODO: Do we need maxLength checking?
                            byte[] bytes = hiveVarchar.getValue().getBytes();
                            ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                        }
                        break;
                    case CHAR:
                        {
                            // UNDONE: Performance problem with conversion to String, then bytes...
                            final HiveChar hiveChar = PrimitiveObjectInspectorUtils.getHiveChar(object, sourcePrimitiveOI);
                            if (hiveChar == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            // We store CHAR in vector row batch with padding stripped.
                            // TODO: Do we need maxLength checking?
                            final byte[] bytes = hiveChar.getStrippedValue().getBytes();
                            ((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
                        }
                        break;
                    case DECIMAL:
                        {
                            final HiveDecimal hiveDecimal = PrimitiveObjectInspectorUtils.getHiveDecimal(object, sourcePrimitiveOI);
                            if (hiveDecimal == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            if (columnVector instanceof Decimal64ColumnVector) {
                                Decimal64ColumnVector dec64ColVector = (Decimal64ColumnVector) columnVector;
                                dec64ColVector.set(batchIndex, hiveDecimal);
                                if (dec64ColVector.isNull[batchIndex]) {
                                    return;
                                }
                            } else {
                                ((DecimalColumnVector) columnVector).set(batchIndex, hiveDecimal);
                            }
                        }
                        break;
                    case INTERVAL_YEAR_MONTH:
                        {
                            final HiveIntervalYearMonth intervalYearMonth = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(object, sourcePrimitiveOI);
                            if (intervalYearMonth == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            ((LongColumnVector) columnVector).vector[batchIndex] = intervalYearMonth.getTotalMonths();
                        }
                        break;
                    case INTERVAL_DAY_TIME:
                        {
                            final HiveIntervalDayTime intervalDayTime = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime(object, sourcePrimitiveOI);
                            if (intervalDayTime == null) {
                                VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
                                return;
                            }
                            ((IntervalDayTimeColumnVector) columnVector).set(batchIndex, intervalDayTime);
                        }
                        break;
                    default:
                        throw new RuntimeException("Primitive category " + targetPrimitiveCategory.name() + " not supported");
                }
                break;
            case LIST:
                {
                    final ListColumnVector listColumnVector = (ListColumnVector) columnVector;
                    final ListObjectInspector sourceListOI = (ListObjectInspector) sourceObjectInspector;
                    final ObjectInspector sourceElementOI = sourceListOI.getListElementObjectInspector();
                    final int size = sourceListOI.getListLength(object);
                    final TypeInfo targetElementTypeInfo = ((ListTypeInfo) targetTypeInfo).getListElementTypeInfo();
                    listColumnVector.offsets[batchIndex] = listColumnVector.childCount;
                    listColumnVector.childCount += size;
                    listColumnVector.ensureSize(listColumnVector.childCount, true);
                    listColumnVector.lengths[batchIndex] = size;
                    for (int i = 0; i < size; i++) {
                        final Object element = sourceListOI.getListElement(object, i);
                        final int offset = (int) (listColumnVector.offsets[batchIndex] + i);
                        assignConvertRowColumn(listColumnVector.child, offset, targetElementTypeInfo, sourceElementOI, null, element);
                    }
                }
                break;
            case MAP:
                {
                    final MapColumnVector mapColumnVector = (MapColumnVector) columnVector;
                    final MapObjectInspector mapObjectInspector = (MapObjectInspector) sourceObjectInspector;
                    final MapTypeInfo mapTypeInfo = (MapTypeInfo) targetTypeInfo;
                    final Map<?, ?> map = mapObjectInspector.getMap(object);
                    for (Map.Entry<?, ?> entry : map.entrySet()) {
                        assignConvertRowColumn(mapColumnVector.keys, batchIndex, mapTypeInfo.getMapKeyTypeInfo(), mapObjectInspector.getMapKeyObjectInspector(), null, entry.getKey());
                        assignConvertRowColumn(mapColumnVector.values, batchIndex, mapTypeInfo.getMapValueTypeInfo(), mapObjectInspector.getMapValueObjectInspector(), null, entry.getValue());
                    }
                }
                break;
            case STRUCT:
                {
                    final StructColumnVector structColumnVector = (StructColumnVector) columnVector;
                    final StructObjectInspector sourceStructOI = (StructObjectInspector) sourceObjectInspector;
                    final List<? extends StructField> sourceFields = sourceStructOI.getAllStructFieldRefs();
                    final StructTypeInfo targetStructTypeInfo = (StructTypeInfo) targetTypeInfo;
                    final List<TypeInfo> targetTypeInfos = targetStructTypeInfo.getAllStructFieldTypeInfos();
                    final int size = targetTypeInfos.size();
                    for (int i = 0; i < size; i++) {
                        if (i < sourceFields.size()) {
                            final StructField sourceStructField = sourceFields.get(i);
                            final ObjectInspector sourceFieldOI = sourceStructField.getFieldObjectInspector();
                            final Object sourceData = sourceStructOI.getStructFieldData(object, sourceStructField);
                            assignConvertRowColumn(structColumnVector.fields[i], batchIndex, targetTypeInfos.get(i), sourceFieldOI, null, sourceData);
                        } else {
                            final ColumnVector fieldColumnVector = structColumnVector.fields[i];
                            VectorizedBatchUtil.setNullColIsNullValue(fieldColumnVector, batchIndex);
                        }
                    }
                }
                break;
            case UNION:
                {
                    final UnionColumnVector unionColumnVector = (UnionColumnVector) columnVector;
                    final UnionObjectInspector unionObjectInspector = (UnionObjectInspector) sourceObjectInspector;
                    final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) targetTypeInfo;
                    final int tag = unionObjectInspector.getTag(object);
                    assignConvertRowColumn(unionColumnVector.fields[tag], batchIndex, unionTypeInfo.getAllUnionObjectTypeInfos().get(tag), unionObjectInspector.getObjectInspectors().get(tag), null, unionObjectInspector.getField(tag));
                }
                break;
            default:
                throw new RuntimeException("Category " + targetCategory.name() + " not supported");
        }
    } catch (NumberFormatException e) {
        // Some of the conversion methods throw this exception on numeric parsing errors.
        VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
        return;
    }
    // We always set the null flag to false when there is a value.
    columnVector.isNull[batchIndex] = false;
}
Also used : PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Category(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) List(java.util.List) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) Date(java.sql.Date) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) Map(java.util.Map) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Example 15 with HiveIntervalDayTime

use of org.apache.hadoop.hive.common.type.HiveIntervalDayTime in project hive by apache.

the class VectorBatchDebug method debugDisplayOneRow.

public static void debugDisplayOneRow(VectorizedRowBatch batch, int index, String prefix) {
    StringBuilder sb = new StringBuilder();
    sb.append(prefix + " row " + index + " ");
    for (int p = 0; p < batch.projectionSize; p++) {
        int column = batch.projectedColumns[p];
        if (p == column) {
            sb.append("(col " + p + ") ");
        } else {
            sb.append("(proj col " + p + " col " + column + ") ");
        }
        ColumnVector colVector = batch.cols[column];
        if (colVector == null) {
            sb.append("(null ColumnVector)");
        } else {
            boolean isRepeating = colVector.isRepeating;
            if (isRepeating) {
                sb.append("(repeating)");
            }
            index = (isRepeating ? 0 : index);
            if (colVector.noNulls || !colVector.isNull[index]) {
                if (colVector instanceof LongColumnVector) {
                    sb.append(((LongColumnVector) colVector).vector[index]);
                } else if (colVector instanceof DoubleColumnVector) {
                    sb.append(((DoubleColumnVector) colVector).vector[index]);
                } else if (colVector instanceof BytesColumnVector) {
                    BytesColumnVector bytesColumnVector = (BytesColumnVector) colVector;
                    byte[] bytes = bytesColumnVector.vector[index];
                    int start = bytesColumnVector.start[index];
                    int length = bytesColumnVector.length[index];
                    if (bytes == null) {
                        sb.append("(Unexpected null bytes with start " + start + " length " + length + ")");
                    } else {
                        sb.append("bytes: '" + displayBytes(bytes, start, length) + "'");
                    }
                } else if (colVector instanceof DecimalColumnVector) {
                    sb.append(((DecimalColumnVector) colVector).vector[index].toString());
                } else if (colVector instanceof TimestampColumnVector) {
                    Timestamp timestamp = new Timestamp(0);
                    ((TimestampColumnVector) colVector).timestampUpdate(timestamp, index);
                    sb.append(timestamp.toString());
                } else if (colVector instanceof IntervalDayTimeColumnVector) {
                    HiveIntervalDayTime intervalDayTime = ((IntervalDayTimeColumnVector) colVector).asScratchIntervalDayTime(index);
                    sb.append(intervalDayTime.toString());
                } else {
                    sb.append("Unknown");
                }
            } else {
                sb.append("NULL");
            }
        }
        sb.append(" ");
    }
    System.err.println(sb.toString());
// LOG.info(sb.toString());
}
Also used : Timestamp(java.sql.Timestamp) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Aggregations

HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)35 Timestamp (java.sql.Timestamp)18 HiveIntervalYearMonth (org.apache.hadoop.hive.common.type.HiveIntervalYearMonth)18 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)14 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)14 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)14 BytesWritable (org.apache.hadoop.io.BytesWritable)14 Text (org.apache.hadoop.io.Text)14 Date (java.sql.Date)12 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)12 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)12 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)11 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)11 FloatWritable (org.apache.hadoop.io.FloatWritable)11 IntWritable (org.apache.hadoop.io.IntWritable)11 LongWritable (org.apache.hadoop.io.LongWritable)11 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)10 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)10 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)10 BooleanWritable (org.apache.hadoop.io.BooleanWritable)10