use of org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo in project hive by apache.
the class DDLSemanticAnalyzer method getTypeName.
public static String getTypeName(ASTNode node) throws SemanticException {
int token = node.getType();
String typeName;
// datetime type isn't currently supported
if (token == HiveParser.TOK_DATETIME) {
throw new SemanticException(ErrorMsg.UNSUPPORTED_TYPE.getMsg());
}
switch(token) {
case HiveParser.TOK_CHAR:
CharTypeInfo charTypeInfo = ParseUtils.getCharTypeInfo(node);
typeName = charTypeInfo.getQualifiedName();
break;
case HiveParser.TOK_VARCHAR:
VarcharTypeInfo varcharTypeInfo = ParseUtils.getVarcharTypeInfo(node);
typeName = varcharTypeInfo.getQualifiedName();
break;
case HiveParser.TOK_TIMESTAMPLOCALTZ:
HiveConf conf;
try {
conf = Hive.get().getConf();
} catch (HiveException e) {
throw new SemanticException(e);
}
TimestampLocalTZTypeInfo timestampLocalTZTypeInfo = TypeInfoFactory.getTimestampTZTypeInfo(conf.getLocalTimeZone());
typeName = timestampLocalTZTypeInfo.getQualifiedName();
break;
case HiveParser.TOK_DECIMAL:
DecimalTypeInfo decTypeInfo = ParseUtils.getDecimalTypeTypeInfo(node);
typeName = decTypeInfo.getQualifiedName();
break;
default:
typeName = TokenToTypeName.get(token);
}
return typeName;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo in project hive by apache.
the class DruidSerDe method initialize.
@Override
public void initialize(Configuration configuration, Properties tableProperties, Properties partitionProperties) throws SerDeException {
super.initialize(configuration, tableProperties, partitionProperties);
tsTZTypeInfo = new TimestampLocalTZTypeInfo(configuration.get(HiveConf.ConfVars.HIVE_LOCAL_TIME_ZONE.varname));
// Druid query
final String druidQuery = properties.getProperty(Constants.DRUID_QUERY_JSON, null);
if (druidQuery != null && !druidQuery.isEmpty()) {
initFromDruidQueryPlan(properties, druidQuery);
} else {
// No query. Either it is a CTAS, or we need to create a Druid meta data Query
if (!org.apache.commons.lang3.StringUtils.isEmpty(properties.getProperty(serdeConstants.LIST_COLUMNS)) && !org.apache.commons.lang3.StringUtils.isEmpty(properties.getProperty(serdeConstants.LIST_COLUMN_TYPES))) {
// CASE CTAS statement
initFromProperties(properties);
} else {
// Segment Metadata query that retrieves all columns present in
// the data source (dimensions and metrics).
initFromMetaDataQuery(configuration, properties);
}
}
if (log.isDebugEnabled()) {
log.debug("DruidSerDe initialized with\n" + "\t columns: " + Arrays.toString(columns) + "\n\t types: " + Arrays.toString(types));
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo in project hive by apache.
the class DruidSerDe method initFromMetaDataQuery.
private void initFromMetaDataQuery(final Configuration configuration, final Properties properties) throws SerDeException {
final List<String> columnNames = new ArrayList<>();
final List<PrimitiveTypeInfo> columnTypes = new ArrayList<>();
final List<ObjectInspector> inspectors = new ArrayList<>();
String dataSource = properties.getProperty(Constants.DRUID_DATA_SOURCE);
if (dataSource == null) {
throw new SerDeException("Druid data source not specified; use " + Constants.DRUID_DATA_SOURCE + " in table properties");
}
SegmentMetadataQueryBuilder builder = new Druids.SegmentMetadataQueryBuilder();
builder.dataSource(dataSource);
builder.merge(true);
builder.analysisTypes();
SegmentMetadataQuery query = builder.build();
// Execute query in Druid
String address = HiveConf.getVar(configuration, HiveConf.ConfVars.HIVE_DRUID_BROKER_DEFAULT_ADDRESS);
if (org.apache.commons.lang3.StringUtils.isEmpty(address)) {
throw new SerDeException("Druid broker address not specified in configuration");
}
// Infer schema
SegmentAnalysis schemaInfo;
try {
schemaInfo = submitMetadataRequest(address, query);
} catch (IOException e) {
throw new SerDeException(e);
}
for (Entry<String, ColumnAnalysis> columnInfo : schemaInfo.getColumns().entrySet()) {
if (columnInfo.getKey().equals(DruidConstants.DEFAULT_TIMESTAMP_COLUMN)) {
// Special handling for timestamp column
// field name
columnNames.add(columnInfo.getKey());
// field type
PrimitiveTypeInfo type = tsTZTypeInfo;
columnTypes.add(type);
inspectors.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type));
continue;
}
// field name
columnNames.add(columnInfo.getKey());
// field type
PrimitiveTypeInfo type = DruidSerDeUtils.convertDruidToHiveType(columnInfo.getValue().getType());
columnTypes.add(type instanceof TimestampLocalTZTypeInfo ? tsTZTypeInfo : type);
inspectors.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type));
}
columns = columnNames.toArray(new String[0]);
types = columnTypes.toArray(new PrimitiveTypeInfo[0]);
inspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, inspectors);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo in project hive by apache.
the class BaseSemanticAnalyzer method getTypeName.
private static String getTypeName(ASTNode node) throws SemanticException {
int token = node.getType();
String typeName;
// datetime type isn't currently supported
if (token == HiveParser.TOK_DATETIME) {
throw new SemanticException(ErrorMsg.UNSUPPORTED_TYPE.getMsg());
}
switch(token) {
case HiveParser.TOK_CHAR:
CharTypeInfo charTypeInfo = ParseUtils.getCharTypeInfo(node);
typeName = charTypeInfo.getQualifiedName();
break;
case HiveParser.TOK_VARCHAR:
VarcharTypeInfo varcharTypeInfo = ParseUtils.getVarcharTypeInfo(node);
typeName = varcharTypeInfo.getQualifiedName();
break;
case HiveParser.TOK_TIMESTAMPLOCALTZ:
TimestampLocalTZTypeInfo timestampLocalTZTypeInfo = TypeInfoFactory.getTimestampTZTypeInfo(null);
typeName = timestampLocalTZTypeInfo.getQualifiedName();
break;
case HiveParser.TOK_DECIMAL:
DecimalTypeInfo decTypeInfo = ParseUtils.getDecimalTypeTypeInfo(node);
typeName = decTypeInfo.getQualifiedName();
break;
default:
typeName = TOKEN_TO_TYPE.get(token);
}
return typeName;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo in project hive by apache.
the class ArrowColumnarBatchSerDe method toField.
private static Field toField(String name, TypeInfo typeInfo) {
switch(typeInfo.getCategory()) {
case PRIMITIVE:
final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
switch(primitiveTypeInfo.getPrimitiveCategory()) {
case BOOLEAN:
return Field.nullable(name, MinorType.BIT.getType());
case BYTE:
return Field.nullable(name, MinorType.TINYINT.getType());
case SHORT:
return Field.nullable(name, MinorType.SMALLINT.getType());
case INT:
return Field.nullable(name, MinorType.INT.getType());
case LONG:
return Field.nullable(name, MinorType.BIGINT.getType());
case FLOAT:
return Field.nullable(name, MinorType.FLOAT4.getType());
case DOUBLE:
return Field.nullable(name, MinorType.FLOAT8.getType());
case STRING:
case VARCHAR:
case CHAR:
return Field.nullable(name, MinorType.VARCHAR.getType());
case DATE:
return Field.nullable(name, MinorType.DATEDAY.getType());
case TIMESTAMP:
return Field.nullable(name, MinorType.TIMESTAMPMILLI.getType());
case TIMESTAMPLOCALTZ:
final TimestampLocalTZTypeInfo timestampLocalTZTypeInfo = (TimestampLocalTZTypeInfo) typeInfo;
final String timeZone = timestampLocalTZTypeInfo.getTimeZone().toString();
return Field.nullable(name, new ArrowType.Timestamp(TimeUnit.MILLISECOND, timeZone));
case BINARY:
return Field.nullable(name, MinorType.VARBINARY.getType());
case DECIMAL:
final DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
final int precision = decimalTypeInfo.precision();
final int scale = decimalTypeInfo.scale();
return Field.nullable(name, new ArrowType.Decimal(precision, scale));
case INTERVAL_YEAR_MONTH:
return Field.nullable(name, MinorType.INTERVALYEAR.getType());
case INTERVAL_DAY_TIME:
return Field.nullable(name, MinorType.INTERVALDAY.getType());
default:
throw new IllegalArgumentException();
}
case LIST:
final ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
final TypeInfo elementTypeInfo = listTypeInfo.getListElementTypeInfo();
return new Field(name, FieldType.nullable(MinorType.LIST.getType()), Lists.newArrayList(toField(DEFAULT_ARROW_FIELD_NAME, elementTypeInfo)));
case STRUCT:
final StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
final List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
final List<String> fieldNames = structTypeInfo.getAllStructFieldNames();
final List<Field> structFields = Lists.newArrayList();
final int structSize = fieldNames.size();
for (int i = 0; i < structSize; i++) {
structFields.add(toField(fieldNames.get(i), fieldTypeInfos.get(i)));
}
return new Field(name, FieldType.nullable(MinorType.STRUCT.getType()), structFields);
case UNION:
final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
final List<TypeInfo> objectTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
final List<Field> unionFields = Lists.newArrayList();
final int unionSize = unionFields.size();
for (int i = 0; i < unionSize; i++) {
unionFields.add(toField(DEFAULT_ARROW_FIELD_NAME, objectTypeInfos.get(i)));
}
return new Field(name, FieldType.nullable(MinorType.UNION.getType()), unionFields);
case MAP:
final MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
final TypeInfo keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo();
final TypeInfo valueTypeInfo = mapTypeInfo.getMapValueTypeInfo();
final List<Field> mapFields = Lists.newArrayList();
mapFields.add(toField(name + "_keys", keyTypeInfo));
mapFields.add(toField(name + "_values", valueTypeInfo));
FieldType struct = new FieldType(false, new ArrowType.Struct(), null);
List<Field> childrenOfList = Lists.newArrayList(new Field(name, struct, mapFields));
return new Field(name, FieldType.nullable(MinorType.LIST.getType()), childrenOfList);
default:
throw new IllegalArgumentException();
}
}
Aggregations