use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.
the class VectorExtractRow method init.
/*
* Initialize using data type names.
* No projection -- the column range 0 .. types.size()-1
*/
public void init(List<String> typeNames) throws HiveException {
final int count = typeNames.size();
allocateArrays(count);
for (int i = 0; i < count; i++) {
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeNames.get(i));
initEntry(i, i, typeInfo);
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.
the class ConstantVectorExpression method setTypeString.
private void setTypeString(String typeString) {
this.outputType = typeString;
String typeName = VectorizationContext.mapTypeNameSynonyms(outputType);
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
this.type = VectorizationContext.getColumnVectorTypeFromTypeInfo(typeInfo);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.
the class VectorizedParquetRecordReader method buildVectorizedParquetReader.
// Build VectorizedParquetColumnReader via Hive typeInfo and Parquet schema
private VectorizedColumnReader buildVectorizedParquetReader(TypeInfo typeInfo, Type type, PageReadStore pages, List<ColumnDescriptor> columnDescriptors, String conversionTimeZone, int depth) throws IOException {
List<ColumnDescriptor> descriptors = getAllColumnDescriptorByType(depth, type, columnDescriptors);
switch(typeInfo.getCategory()) {
case PRIMITIVE:
if (columnDescriptors == null || columnDescriptors.isEmpty()) {
throw new RuntimeException("Failed to find related Parquet column descriptor with type " + type);
} else {
return new VectorizedPrimitiveColumnReader(descriptors.get(0), pages.getPageReader(descriptors.get(0)), conversionTimeZone, type);
}
case STRUCT:
StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
List<VectorizedColumnReader> fieldReaders = new ArrayList<>();
List<TypeInfo> fieldTypes = structTypeInfo.getAllStructFieldTypeInfos();
List<Type> types = type.asGroupType().getFields();
for (int i = 0; i < fieldTypes.size(); i++) {
VectorizedColumnReader r = buildVectorizedParquetReader(fieldTypes.get(i), types.get(i), pages, descriptors, conversionTimeZone, depth + 1);
if (r != null) {
fieldReaders.add(r);
} else {
throw new RuntimeException("Fail to build Parquet vectorized reader based on Hive type " + fieldTypes.get(i).getTypeName() + " and Parquet type" + types.get(i).toString());
}
}
return new VectorizedStructColumnReader(fieldReaders);
case LIST:
case MAP:
case UNION:
default:
throw new RuntimeException("Unsupported category " + typeInfo.getCategory().name());
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.
the class InputSignature method toString.
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getName());
sb.append("(");
boolean isfirst = true;
for (TypeInfo cls : getTypeArray()) {
if (!isfirst) {
sb.append(",");
}
sb.append(cls.toString());
isfirst = false;
}
sb.append(")");
return sb.toString();
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo in project hive by apache.
the class MapWork method checkVectorizerSupportedTypes.
private boolean checkVectorizerSupportedTypes(boolean hasLlap) {
for (Map.Entry<String, Operator<? extends OperatorDesc>> entry : aliasToWork.entrySet()) {
final String alias = entry.getKey();
Operator<? extends OperatorDesc> op = entry.getValue();
PartitionDesc partitionDesc = aliasToPartnInfo.get(alias);
if (op instanceof TableScanOperator && partitionDesc != null && partitionDesc.getTableDesc() != null) {
final TableScanOperator tsOp = (TableScanOperator) op;
final List<String> readColumnNames = tsOp.getNeededColumns();
final Properties props = partitionDesc.getTableDesc().getProperties();
final List<TypeInfo> typeInfos = TypeInfoUtils.getTypeInfosFromTypeString(props.getProperty(serdeConstants.LIST_COLUMN_TYPES));
final List<String> allColumnTypes = TypeInfoUtils.getTypeStringsFromTypeInfo(typeInfos);
final List<String> allColumnNames = Utilities.getColumnNames(props);
hasLlap = Utilities.checkVectorizerSupportedTypes(readColumnNames, allColumnNames, allColumnTypes);
}
}
return hasLlap;
}
Aggregations