Search in sources :

Example 1 with TimestampLocalTZTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo in project hive by apache.

the class DDLSemanticAnalyzer method getTypeName.

public static String getTypeName(ASTNode node) throws SemanticException {
    int token = node.getType();
    String typeName;
    // datetime type isn't currently supported
    if (token == HiveParser.TOK_DATETIME) {
        throw new SemanticException(ErrorMsg.UNSUPPORTED_TYPE.getMsg());
    }
    switch(token) {
        case HiveParser.TOK_CHAR:
            CharTypeInfo charTypeInfo = ParseUtils.getCharTypeInfo(node);
            typeName = charTypeInfo.getQualifiedName();
            break;
        case HiveParser.TOK_VARCHAR:
            VarcharTypeInfo varcharTypeInfo = ParseUtils.getVarcharTypeInfo(node);
            typeName = varcharTypeInfo.getQualifiedName();
            break;
        case HiveParser.TOK_TIMESTAMPLOCALTZ:
            HiveConf conf;
            try {
                conf = Hive.get().getConf();
            } catch (HiveException e) {
                throw new SemanticException(e);
            }
            TimestampLocalTZTypeInfo timestampLocalTZTypeInfo = TypeInfoFactory.getTimestampTZTypeInfo(conf.getLocalTimeZone());
            typeName = timestampLocalTZTypeInfo.getQualifiedName();
            break;
        case HiveParser.TOK_DECIMAL:
            DecimalTypeInfo decTypeInfo = ParseUtils.getDecimalTypeTypeInfo(node);
            typeName = decTypeInfo.getQualifiedName();
            break;
        default:
            typeName = TokenToTypeName.get(token);
    }
    return typeName;
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveConf(org.apache.hadoop.hive.conf.HiveConf) TimestampLocalTZTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Example 2 with TimestampLocalTZTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo in project hive by apache.

the class DruidSerDe method initialize.

@Override
public void initialize(Configuration configuration, Properties tableProperties, Properties partitionProperties) throws SerDeException {
    super.initialize(configuration, tableProperties, partitionProperties);
    tsTZTypeInfo = new TimestampLocalTZTypeInfo(configuration.get(HiveConf.ConfVars.HIVE_LOCAL_TIME_ZONE.varname));
    // Druid query
    final String druidQuery = properties.getProperty(Constants.DRUID_QUERY_JSON, null);
    if (druidQuery != null && !druidQuery.isEmpty()) {
        initFromDruidQueryPlan(properties, druidQuery);
    } else {
        // No query. Either it is a CTAS, or we need to create a Druid meta data Query
        if (!org.apache.commons.lang3.StringUtils.isEmpty(properties.getProperty(serdeConstants.LIST_COLUMNS)) && !org.apache.commons.lang3.StringUtils.isEmpty(properties.getProperty(serdeConstants.LIST_COLUMN_TYPES))) {
            // CASE CTAS statement
            initFromProperties(properties);
        } else {
            // Segment Metadata query that retrieves all columns present in
            // the data source (dimensions and metrics).
            initFromMetaDataQuery(configuration, properties);
        }
    }
    if (log.isDebugEnabled()) {
        log.debug("DruidSerDe initialized with\n" + "\t columns: " + Arrays.toString(columns) + "\n\t types: " + Arrays.toString(types));
    }
}
Also used : TimestampLocalTZTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo)

Example 3 with TimestampLocalTZTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo in project hive by apache.

the class DruidSerDe method initFromMetaDataQuery.

private void initFromMetaDataQuery(final Configuration configuration, final Properties properties) throws SerDeException {
    final List<String> columnNames = new ArrayList<>();
    final List<PrimitiveTypeInfo> columnTypes = new ArrayList<>();
    final List<ObjectInspector> inspectors = new ArrayList<>();
    String dataSource = properties.getProperty(Constants.DRUID_DATA_SOURCE);
    if (dataSource == null) {
        throw new SerDeException("Druid data source not specified; use " + Constants.DRUID_DATA_SOURCE + " in table properties");
    }
    SegmentMetadataQueryBuilder builder = new Druids.SegmentMetadataQueryBuilder();
    builder.dataSource(dataSource);
    builder.merge(true);
    builder.analysisTypes();
    SegmentMetadataQuery query = builder.build();
    // Execute query in Druid
    String address = HiveConf.getVar(configuration, HiveConf.ConfVars.HIVE_DRUID_BROKER_DEFAULT_ADDRESS);
    if (org.apache.commons.lang3.StringUtils.isEmpty(address)) {
        throw new SerDeException("Druid broker address not specified in configuration");
    }
    // Infer schema
    SegmentAnalysis schemaInfo;
    try {
        schemaInfo = submitMetadataRequest(address, query);
    } catch (IOException e) {
        throw new SerDeException(e);
    }
    for (Entry<String, ColumnAnalysis> columnInfo : schemaInfo.getColumns().entrySet()) {
        if (columnInfo.getKey().equals(DruidConstants.DEFAULT_TIMESTAMP_COLUMN)) {
            // Special handling for timestamp column
            // field name
            columnNames.add(columnInfo.getKey());
            // field type
            PrimitiveTypeInfo type = tsTZTypeInfo;
            columnTypes.add(type);
            inspectors.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type));
            continue;
        }
        // field name
        columnNames.add(columnInfo.getKey());
        // field type
        PrimitiveTypeInfo type = DruidSerDeUtils.convertDruidToHiveType(columnInfo.getValue().getType());
        columnTypes.add(type instanceof TimestampLocalTZTypeInfo ? tsTZTypeInfo : type);
        inspectors.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type));
    }
    columns = columnNames.toArray(new String[0]);
    types = columnTypes.toArray(new PrimitiveTypeInfo[0]);
    inspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, inspectors);
}
Also used : BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) TimestampLocalTZObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampLocalTZObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) ArrayList(java.util.ArrayList) TimestampLocalTZTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo) IOException(java.io.IOException) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) ColumnAnalysis(org.apache.druid.query.metadata.metadata.ColumnAnalysis) SegmentMetadataQueryBuilder(org.apache.druid.query.Druids.SegmentMetadataQueryBuilder) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 4 with TimestampLocalTZTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo in project hive by apache.

the class BaseSemanticAnalyzer method getTypeName.

private static String getTypeName(ASTNode node) throws SemanticException {
    int token = node.getType();
    String typeName;
    // datetime type isn't currently supported
    if (token == HiveParser.TOK_DATETIME) {
        throw new SemanticException(ErrorMsg.UNSUPPORTED_TYPE.getMsg());
    }
    switch(token) {
        case HiveParser.TOK_CHAR:
            CharTypeInfo charTypeInfo = ParseUtils.getCharTypeInfo(node);
            typeName = charTypeInfo.getQualifiedName();
            break;
        case HiveParser.TOK_VARCHAR:
            VarcharTypeInfo varcharTypeInfo = ParseUtils.getVarcharTypeInfo(node);
            typeName = varcharTypeInfo.getQualifiedName();
            break;
        case HiveParser.TOK_TIMESTAMPLOCALTZ:
            TimestampLocalTZTypeInfo timestampLocalTZTypeInfo = TypeInfoFactory.getTimestampTZTypeInfo(null);
            typeName = timestampLocalTZTypeInfo.getQualifiedName();
            break;
        case HiveParser.TOK_DECIMAL:
            DecimalTypeInfo decTypeInfo = ParseUtils.getDecimalTypeTypeInfo(node);
            typeName = decTypeInfo.getQualifiedName();
            break;
        default:
            typeName = TOKEN_TO_TYPE.get(token);
    }
    return typeName;
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) TimestampLocalTZTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Example 5 with TimestampLocalTZTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo in project hive by apache.

the class ArrowColumnarBatchSerDe method toField.

private static Field toField(String name, TypeInfo typeInfo) {
    switch(typeInfo.getCategory()) {
        case PRIMITIVE:
            final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
            switch(primitiveTypeInfo.getPrimitiveCategory()) {
                case BOOLEAN:
                    return Field.nullable(name, MinorType.BIT.getType());
                case BYTE:
                    return Field.nullable(name, MinorType.TINYINT.getType());
                case SHORT:
                    return Field.nullable(name, MinorType.SMALLINT.getType());
                case INT:
                    return Field.nullable(name, MinorType.INT.getType());
                case LONG:
                    return Field.nullable(name, MinorType.BIGINT.getType());
                case FLOAT:
                    return Field.nullable(name, MinorType.FLOAT4.getType());
                case DOUBLE:
                    return Field.nullable(name, MinorType.FLOAT8.getType());
                case STRING:
                case VARCHAR:
                case CHAR:
                    return Field.nullable(name, MinorType.VARCHAR.getType());
                case DATE:
                    return Field.nullable(name, MinorType.DATEDAY.getType());
                case TIMESTAMP:
                    return Field.nullable(name, MinorType.TIMESTAMPMILLI.getType());
                case TIMESTAMPLOCALTZ:
                    final TimestampLocalTZTypeInfo timestampLocalTZTypeInfo = (TimestampLocalTZTypeInfo) typeInfo;
                    final String timeZone = timestampLocalTZTypeInfo.getTimeZone().toString();
                    return Field.nullable(name, new ArrowType.Timestamp(TimeUnit.MILLISECOND, timeZone));
                case BINARY:
                    return Field.nullable(name, MinorType.VARBINARY.getType());
                case DECIMAL:
                    final DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
                    final int precision = decimalTypeInfo.precision();
                    final int scale = decimalTypeInfo.scale();
                    return Field.nullable(name, new ArrowType.Decimal(precision, scale));
                case INTERVAL_YEAR_MONTH:
                    return Field.nullable(name, MinorType.INTERVALYEAR.getType());
                case INTERVAL_DAY_TIME:
                    return Field.nullable(name, MinorType.INTERVALDAY.getType());
                default:
                    throw new IllegalArgumentException();
            }
        case LIST:
            final ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
            final TypeInfo elementTypeInfo = listTypeInfo.getListElementTypeInfo();
            return new Field(name, FieldType.nullable(MinorType.LIST.getType()), Lists.newArrayList(toField(DEFAULT_ARROW_FIELD_NAME, elementTypeInfo)));
        case STRUCT:
            final StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
            final List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
            final List<String> fieldNames = structTypeInfo.getAllStructFieldNames();
            final List<Field> structFields = Lists.newArrayList();
            final int structSize = fieldNames.size();
            for (int i = 0; i < structSize; i++) {
                structFields.add(toField(fieldNames.get(i), fieldTypeInfos.get(i)));
            }
            return new Field(name, FieldType.nullable(MinorType.STRUCT.getType()), structFields);
        case UNION:
            final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
            final List<TypeInfo> objectTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
            final List<Field> unionFields = Lists.newArrayList();
            final int unionSize = unionFields.size();
            for (int i = 0; i < unionSize; i++) {
                unionFields.add(toField(DEFAULT_ARROW_FIELD_NAME, objectTypeInfos.get(i)));
            }
            return new Field(name, FieldType.nullable(MinorType.UNION.getType()), unionFields);
        case MAP:
            final MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
            final TypeInfo keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo();
            final TypeInfo valueTypeInfo = mapTypeInfo.getMapValueTypeInfo();
            final List<Field> mapFields = Lists.newArrayList();
            mapFields.add(toField(name + "_keys", keyTypeInfo));
            mapFields.add(toField(name + "_values", valueTypeInfo));
            FieldType struct = new FieldType(false, new ArrowType.Struct(), null);
            List<Field> childrenOfList = Lists.newArrayList(new Field(name, struct, mapFields));
            return new Field(name, FieldType.nullable(MinorType.LIST.getType()), childrenOfList);
        default:
            throw new IllegalArgumentException();
    }
}
Also used : ArrowType(org.apache.arrow.vector.types.pojo.ArrowType) TimestampLocalTZTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo) TimestampLocalTZTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) FieldType(org.apache.arrow.vector.types.pojo.FieldType) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) Field(org.apache.arrow.vector.types.pojo.Field) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Aggregations

TimestampLocalTZTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo)16 TimestampLocalTZWritable (org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable)8 TimestampTZ (org.apache.hadoop.hive.common.type.TimestampTZ)6 SerDeException (org.apache.hadoop.hive.serde2.SerDeException)5 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)4 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)3 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)3 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)3 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)3 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)3 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)3 HiveVarcharWritable (org.apache.hadoop.hive.serde2.io.HiveVarcharWritable)3 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)3 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)3 BooleanWritable (org.apache.hadoop.io.BooleanWritable)3 FloatWritable (org.apache.hadoop.io.FloatWritable)3 IntWritable (org.apache.hadoop.io.IntWritable)3 LongWritable (org.apache.hadoop.io.LongWritable)3 Text (org.apache.hadoop.io.Text)3 ArrayList (java.util.ArrayList)2