Search in sources :

Example 1 with DecimalLogicalTypeAnnotation

use of org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation in project hive by apache.

the class VectorizedListColumnReader method fillColumnVector.

private void fillColumnVector(PrimitiveObjectInspector.PrimitiveCategory category, ListColumnVector lcv, List valueList, int elementNum) {
    int total = valueList.size();
    setChildrenInfo(lcv, total, elementNum);
    switch(category) {
        case BOOLEAN:
            lcv.child = new LongColumnVector(total);
            for (int i = 0; i < valueList.size(); i++) {
                if (valueList.get(i) == null) {
                    lcv.child.isNull[i] = true;
                } else {
                    ((LongColumnVector) lcv.child).vector[i] = ((List<Integer>) valueList).get(i);
                }
            }
            break;
        case INT:
        case BYTE:
        case SHORT:
        case DATE:
        case INTERVAL_YEAR_MONTH:
        case LONG:
            lcv.child = new LongColumnVector(total);
            for (int i = 0; i < valueList.size(); i++) {
                if (valueList.get(i) == null) {
                    lcv.child.isNull[i] = true;
                } else {
                    ((LongColumnVector) lcv.child).vector[i] = ((List<Long>) valueList).get(i);
                }
            }
            break;
        case DOUBLE:
            lcv.child = new DoubleColumnVector(total);
            for (int i = 0; i < valueList.size(); i++) {
                if (valueList.get(i) == null) {
                    lcv.child.isNull[i] = true;
                } else {
                    ((DoubleColumnVector) lcv.child).vector[i] = ((List<Double>) valueList).get(i);
                }
            }
            break;
        case BINARY:
        case STRING:
        case CHAR:
        case VARCHAR:
            lcv.child = new BytesColumnVector(total);
            lcv.child.init();
            for (int i = 0; i < valueList.size(); i++) {
                byte[] src = ((List<byte[]>) valueList).get(i);
                if (src == null) {
                    ((BytesColumnVector) lcv.child).setRef(i, src, 0, 0);
                    lcv.child.isNull[i] = true;
                } else {
                    ((BytesColumnVector) lcv.child).setRef(i, src, 0, src.length);
                }
            }
            break;
        case FLOAT:
            lcv.child = new DoubleColumnVector(total);
            for (int i = 0; i < valueList.size(); i++) {
                if (valueList.get(i) == null) {
                    lcv.child.isNull[i] = true;
                } else {
                    ((DoubleColumnVector) lcv.child).vector[i] = ((List<Float>) valueList).get(i);
                }
            }
            break;
        case DECIMAL:
            decimalTypeCheck(type);
            DecimalLogicalTypeAnnotation logicalType = (DecimalLogicalTypeAnnotation) type.getLogicalTypeAnnotation();
            int precision = logicalType.getPrecision();
            int scale = logicalType.getScale();
            lcv.child = new DecimalColumnVector(total, precision, scale);
            for (int i = 0; i < valueList.size(); i++) {
                if (valueList.get(i) == null) {
                    lcv.child.isNull[i] = true;
                } else {
                    ((DecimalColumnVector) lcv.child).vector[i].set(((List<byte[]>) valueList).get(i), scale);
                }
            }
            break;
        case INTERVAL_DAY_TIME:
        case TIMESTAMP:
        default:
            throw new RuntimeException("Unsupported type in the list: " + type);
    }
}
Also used : DecimalLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) DoubleColumnVector(org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector) BytesColumnVector(org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector) ArrayList(java.util.ArrayList) List(java.util.List) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector)

Example 2 with DecimalLogicalTypeAnnotation

use of org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation in project hive by apache.

the class ParquetDataColumnReaderFactory method getConvertorFromBinary.

private static ParquetDataColumnReader getConvertorFromBinary(boolean isDict, PrimitiveType parquetType, TypeInfo hiveType, ValuesReader valuesReader, Dictionary dictionary) {
    LogicalTypeAnnotation logicalType = parquetType.getLogicalTypeAnnotation();
    // max length for varchar and char cases
    int length = getVarcharLength(hiveType);
    TypeInfo realHiveType = (hiveType instanceof ListTypeInfo) ? ((ListTypeInfo) hiveType).getListElementTypeInfo() : (hiveType instanceof MapTypeInfo) ? ((MapTypeInfo) hiveType).getMapValueTypeInfo() : hiveType;
    String typeName = TypeInfoUtils.getBaseName(realHiveType.getTypeName());
    int hivePrecision = (typeName.equalsIgnoreCase(serdeConstants.DECIMAL_TYPE_NAME)) ? ((DecimalTypeInfo) realHiveType).getPrecision() : 0;
    int hiveScale = (typeName.equalsIgnoreCase(serdeConstants.DECIMAL_TYPE_NAME)) ? ((DecimalTypeInfo) realHiveType).getScale() : 0;
    if (logicalType == null) {
        return isDict ? new DefaultParquetDataColumnReader(dictionary, length) : new DefaultParquetDataColumnReader(valuesReader, length);
    }
    Optional<ParquetDataColumnReader> reader = parquetType.getLogicalTypeAnnotation().accept(new LogicalTypeAnnotationVisitor<ParquetDataColumnReader>() {

        @Override
        public Optional<ParquetDataColumnReader> visit(DecimalLogicalTypeAnnotation logicalTypeAnnotation) {
            final short scale = (short) logicalTypeAnnotation.getScale();
            return isDict ? Optional.of(new TypesFromDecimalPageReader(dictionary, length, scale, hivePrecision, hiveScale)) : Optional.of(new TypesFromDecimalPageReader(valuesReader, length, scale, hivePrecision, hiveScale));
        }

        @Override
        public Optional<ParquetDataColumnReader> visit(StringLogicalTypeAnnotation logicalTypeAnnotation) {
            return isDict ? Optional.of(new TypesFromStringPageReader(dictionary, length)) : Optional.of(new TypesFromStringPageReader(valuesReader, length));
        }
    });
    if (reader.isPresent()) {
        return reader.get();
    }
    return isDict ? new DefaultParquetDataColumnReader(dictionary, length) : new DefaultParquetDataColumnReader(valuesReader, length);
}
Also used : DecimalLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation) Optional(java.util.Optional) StringLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.StringLogicalTypeAnnotation) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) StringLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.StringLogicalTypeAnnotation) DecimalLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation) LogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation) TimestampLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.TimestampLogicalTypeAnnotation) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo)

Example 3 with DecimalLogicalTypeAnnotation

use of org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation in project hive by apache.

the class VectorizedPrimitiveColumnReader method decodeDictionaryIds.

/**
 * Reads `num` values into column, decoding the values from `dictionaryIds` and `dictionary`.
 */
private void decodeDictionaryIds(int rowId, int num, ColumnVector column, TypeInfo columnType, LongColumnVector dictionaryIds) {
    System.arraycopy(dictionaryIds.isNull, rowId, column.isNull, rowId, num);
    if (column.noNulls) {
        column.noNulls = dictionaryIds.noNulls;
    }
    column.isRepeating = column.isRepeating && dictionaryIds.isRepeating;
    PrimitiveTypeInfo primitiveColumnType = (PrimitiveTypeInfo) columnType;
    switch(primitiveColumnType.getPrimitiveCategory()) {
        case INT:
            for (int i = rowId; i < rowId + num; ++i) {
                ((LongColumnVector) column).vector[i] = dictionary.readInteger((int) dictionaryIds.vector[i]);
                if (!dictionary.isValid()) {
                    setNullValue(column, i);
                    ((LongColumnVector) column).vector[i] = 0;
                }
            }
            break;
        case BYTE:
            for (int i = rowId; i < rowId + num; ++i) {
                ((LongColumnVector) column).vector[i] = dictionary.readTinyInt((int) dictionaryIds.vector[i]);
                if (!dictionary.isValid()) {
                    setNullValue(column, i);
                    ((LongColumnVector) column).vector[i] = 0;
                }
            }
            break;
        case SHORT:
            for (int i = rowId; i < rowId + num; ++i) {
                ((LongColumnVector) column).vector[i] = dictionary.readSmallInt((int) dictionaryIds.vector[i]);
                if (!dictionary.isValid()) {
                    setNullValue(column, i);
                    ((LongColumnVector) column).vector[i] = 0;
                }
            }
            break;
        case DATE:
            DateColumnVector dc = (DateColumnVector) column;
            dc.setUsingProlepticCalendar(true);
            for (int i = rowId; i < rowId + num; ++i) {
                dc.vector[i] = skipProlepticConversion ? dictionary.readLong((int) dictionaryIds.vector[i]) : CalendarUtils.convertDateToProleptic((int) dictionary.readLong((int) dictionaryIds.vector[i]));
                if (!dictionary.isValid()) {
                    setNullValue(column, i);
                    dc.vector[i] = 0;
                }
            }
            break;
        case INTERVAL_YEAR_MONTH:
        case LONG:
            for (int i = rowId; i < rowId + num; ++i) {
                ((LongColumnVector) column).vector[i] = dictionary.readLong((int) dictionaryIds.vector[i]);
                if (!dictionary.isValid()) {
                    setNullValue(column, i);
                    ((LongColumnVector) column).vector[i] = 0;
                }
            }
            break;
        case BOOLEAN:
            for (int i = rowId; i < rowId + num; ++i) {
                ((LongColumnVector) column).vector[i] = dictionary.readBoolean((int) dictionaryIds.vector[i]) ? 1 : 0;
            }
            break;
        case DOUBLE:
            for (int i = rowId; i < rowId + num; ++i) {
                ((DoubleColumnVector) column).vector[i] = dictionary.readDouble((int) dictionaryIds.vector[i]);
                if (!dictionary.isValid()) {
                    setNullValue(column, i);
                    ((DoubleColumnVector) column).vector[i] = 0;
                }
            }
            break;
        case BINARY:
            for (int i = rowId; i < rowId + num; ++i) {
                ((BytesColumnVector) column).setVal(i, dictionary.readBytes((int) dictionaryIds.vector[i]));
            }
            break;
        case STRING:
            for (int i = rowId; i < rowId + num; ++i) {
                ((BytesColumnVector) column).setVal(i, dictionary.readString((int) dictionaryIds.vector[i]));
            }
            break;
        case VARCHAR:
            for (int i = rowId; i < rowId + num; ++i) {
                ((BytesColumnVector) column).setVal(i, dictionary.readVarchar((int) dictionaryIds.vector[i]));
            }
            break;
        case CHAR:
            for (int i = rowId; i < rowId + num; ++i) {
                ((BytesColumnVector) column).setVal(i, dictionary.readChar((int) dictionaryIds.vector[i]));
            }
            break;
        case FLOAT:
            for (int i = rowId; i < rowId + num; ++i) {
                ((DoubleColumnVector) column).vector[i] = dictionary.readFloat((int) dictionaryIds.vector[i]);
                if (!dictionary.isValid()) {
                    setNullValue(column, i);
                    ((DoubleColumnVector) column).vector[i] = 0;
                }
            }
            break;
        case DECIMAL:
            DecimalLogicalTypeAnnotation decimalLogicalType = null;
            if (type.getLogicalTypeAnnotation() instanceof DecimalLogicalTypeAnnotation) {
                decimalLogicalType = (DecimalLogicalTypeAnnotation) type.getLogicalTypeAnnotation();
            }
            DecimalColumnVector decimalColumnVector = ((DecimalColumnVector) column);
            byte[] decimalData = null;
            fillDecimalPrecisionScale(decimalLogicalType, decimalColumnVector);
            for (int i = rowId; i < rowId + num; ++i) {
                decimalData = dictionary.readDecimal((int) dictionaryIds.vector[i]);
                if (dictionary.isValid()) {
                    decimalColumnVector.vector[i].set(decimalData, decimalColumnVector.scale);
                } else {
                    setNullValue(column, i);
                }
            }
            break;
        case TIMESTAMP:
            TimestampColumnVector tsc = (TimestampColumnVector) column;
            tsc.setUsingProlepticCalendar(true);
            for (int i = rowId; i < rowId + num; ++i) {
                tsc.set(i, dictionary.readTimestamp((int) dictionaryIds.vector[i]).toSqlTimestamp());
            }
            break;
        case INTERVAL_DAY_TIME:
        default:
            throw new UnsupportedOperationException("Unsupported type: " + type);
    }
}
Also used : TimestampColumnVector(org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector) DecimalLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) DoubleColumnVector(org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector) BytesColumnVector(org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector) DateColumnVector(org.apache.hadoop.hive.ql.exec.vector.DateColumnVector)

Example 4 with DecimalLogicalTypeAnnotation

use of org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation in project hive by apache.

the class VectorizedPrimitiveColumnReader method readDecimal.

private void readDecimal(int total, DecimalColumnVector c, int rowId) throws IOException {
    DecimalLogicalTypeAnnotation decimalLogicalType = null;
    if (type.getLogicalTypeAnnotation() instanceof DecimalLogicalTypeAnnotation) {
        decimalLogicalType = (DecimalLogicalTypeAnnotation) type.getLogicalTypeAnnotation();
    }
    byte[] decimalData = null;
    fillDecimalPrecisionScale(decimalLogicalType, c);
    int left = total;
    while (left > 0) {
        readRepetitionAndDefinitionLevels();
        if (definitionLevel >= maxDefLevel) {
            decimalData = dataColumn.readDecimal();
            if (dataColumn.isValid()) {
                c.vector[rowId].set(decimalData, c.scale);
                c.isNull[rowId] = false;
                c.isRepeating = c.isRepeating && (c.vector[0] == c.vector[rowId]);
            } else {
                setNullValue(c, rowId);
            }
        } else {
            setNullValue(c, rowId);
        }
        rowId++;
        left--;
    }
}
Also used : DecimalLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation)

Example 5 with DecimalLogicalTypeAnnotation

use of org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation in project hive by apache.

the class ParquetDataColumnReaderFactory method getDataColumnReaderByTypeHelper.

private static ParquetDataColumnReader getDataColumnReaderByTypeHelper(boolean isDictionary, PrimitiveType parquetType, TypeInfo hiveType, Dictionary dictionary, ValuesReader valuesReader, boolean skipTimestampConversion, ZoneId writerTimezone, boolean legacyConversionEnabled) throws IOException {
    // max length for varchar and char cases
    int length = getVarcharLength(hiveType);
    TypeInfo realHiveType = (hiveType instanceof ListTypeInfo) ? ((ListTypeInfo) hiveType).getListElementTypeInfo() : hiveType;
    String typeName = TypeInfoUtils.getBaseName(realHiveType.getTypeName());
    int hivePrecision = (typeName.equalsIgnoreCase(serdeConstants.DECIMAL_TYPE_NAME)) ? ((DecimalTypeInfo) realHiveType).getPrecision() : 0;
    int hiveScale = (typeName.equalsIgnoreCase(serdeConstants.DECIMAL_TYPE_NAME)) ? ((DecimalTypeInfo) realHiveType).getScale() : 0;
    switch(parquetType.getPrimitiveTypeName()) {
        case INT32:
            if (ETypeConverter.isUnsignedInteger(parquetType)) {
                return isDictionary ? new TypesFromUInt32PageReader(dictionary, length, hivePrecision, hiveScale) : new TypesFromUInt32PageReader(valuesReader, length, hivePrecision, hiveScale);
            } else if (parquetType.getLogicalTypeAnnotation() instanceof DecimalLogicalTypeAnnotation) {
                DecimalLogicalTypeAnnotation logicalType = (DecimalLogicalTypeAnnotation) parquetType.getLogicalTypeAnnotation();
                final short scale = (short) logicalType.getScale();
                return isDictionary ? new TypesFromInt32DecimalPageReader(dictionary, length, scale, hivePrecision, hiveScale) : new TypesFromInt32DecimalPageReader(valuesReader, length, scale, hivePrecision, hiveScale);
            } else {
                return isDictionary ? new TypesFromInt32PageReader(dictionary, length, hivePrecision, hiveScale) : new TypesFromInt32PageReader(valuesReader, length, hivePrecision, hiveScale);
            }
        case INT64:
            LogicalTypeAnnotation logicalType = parquetType.getLogicalTypeAnnotation();
            if (logicalType instanceof TimestampLogicalTypeAnnotation) {
                TimestampLogicalTypeAnnotation timestampLogicalType = (TimestampLogicalTypeAnnotation) logicalType;
                boolean isAdjustedToUTC = timestampLogicalType.isAdjustedToUTC();
                TimeUnit timeUnit = timestampLogicalType.getUnit();
                return isDictionary ? new TypesFromInt64PageReader(dictionary, length, isAdjustedToUTC, timeUnit) : new TypesFromInt64PageReader(valuesReader, length, isAdjustedToUTC, timeUnit);
            }
            if (ETypeConverter.isUnsignedInteger(parquetType)) {
                return isDictionary ? new TypesFromUInt64PageReader(dictionary, length, hivePrecision, hiveScale) : new TypesFromUInt64PageReader(valuesReader, length, hivePrecision, hiveScale);
            }
            if (logicalType instanceof DecimalLogicalTypeAnnotation) {
                DecimalLogicalTypeAnnotation decimalLogicalType = (DecimalLogicalTypeAnnotation) logicalType;
                final short scale = (short) decimalLogicalType.getScale();
                return isDictionary ? new TypesFromInt64DecimalPageReader(dictionary, length, scale, hivePrecision, hiveScale) : new TypesFromInt64DecimalPageReader(valuesReader, length, scale, hivePrecision, hiveScale);
            }
            return isDictionary ? new TypesFromInt64PageReader(dictionary, length, hivePrecision, hiveScale) : new TypesFromInt64PageReader(valuesReader, length, hivePrecision, hiveScale);
        case FLOAT:
            return isDictionary ? new TypesFromFloatPageReader(dictionary, length, hivePrecision, hiveScale) : new TypesFromFloatPageReader(valuesReader, length, hivePrecision, hiveScale);
        case INT96:
            ZoneId targetZone = skipTimestampConversion ? ZoneOffset.UTC : firstNonNull(writerTimezone, TimeZone.getDefault().toZoneId());
            return isDictionary ? new TypesFromInt96PageReader(dictionary, length, targetZone, legacyConversionEnabled) : new TypesFromInt96PageReader(valuesReader, length, targetZone, legacyConversionEnabled);
        case BOOLEAN:
            return isDictionary ? new TypesFromBooleanPageReader(dictionary, length) : new TypesFromBooleanPageReader(valuesReader, length);
        case BINARY:
        case FIXED_LEN_BYTE_ARRAY:
            return getConvertorFromBinary(isDictionary, parquetType, hiveType, valuesReader, dictionary);
        case DOUBLE:
            return isDictionary ? new TypesFromDoublePageReader(dictionary, length, hivePrecision, hiveScale) : new TypesFromDoublePageReader(valuesReader, length, hivePrecision, hiveScale);
        default:
            return isDictionary ? new DefaultParquetDataColumnReader(dictionary, length, hivePrecision, hiveScale) : new DefaultParquetDataColumnReader(valuesReader, length, hivePrecision, hiveScale);
    }
}
Also used : DecimalLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation) ZoneId(java.time.ZoneId) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) StringLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.StringLogicalTypeAnnotation) DecimalLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation) LogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation) TimestampLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.TimestampLogicalTypeAnnotation) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TimestampLogicalTypeAnnotation(org.apache.parquet.schema.LogicalTypeAnnotation.TimestampLogicalTypeAnnotation) TimeUnit(org.apache.parquet.schema.LogicalTypeAnnotation.TimeUnit)

Aggregations

DecimalLogicalTypeAnnotation (org.apache.parquet.schema.LogicalTypeAnnotation.DecimalLogicalTypeAnnotation)5 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)3 BytesColumnVector (org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector)2 DecimalColumnVector (org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector)2 DoubleColumnVector (org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector)2 LongColumnVector (org.apache.hadoop.hive.ql.exec.vector.LongColumnVector)2 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)2 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)2 ListTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo)2 MapTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo)2 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)2 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)2 LogicalTypeAnnotation (org.apache.parquet.schema.LogicalTypeAnnotation)2 StringLogicalTypeAnnotation (org.apache.parquet.schema.LogicalTypeAnnotation.StringLogicalTypeAnnotation)2 TimestampLogicalTypeAnnotation (org.apache.parquet.schema.LogicalTypeAnnotation.TimestampLogicalTypeAnnotation)2 ZoneId (java.time.ZoneId)1 ArrayList (java.util.ArrayList)1 List (java.util.List)1 Optional (java.util.Optional)1 DateColumnVector (org.apache.hadoop.hive.ql.exec.vector.DateColumnVector)1