use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.
the class PartitionPruner method extractPartColTypes.
private static List<PrimitiveTypeInfo> extractPartColTypes(Table tab) {
List<FieldSchema> pCols = tab.getPartCols();
List<PrimitiveTypeInfo> partColTypeInfos = new ArrayList<PrimitiveTypeInfo>(pCols.size());
for (FieldSchema pCol : pCols) {
partColTypeInfos.add(TypeInfoFactory.getPrimitiveTypeInfo(pCol.getType()));
}
return partColTypeInfos;
}
use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.
the class ReplicationSemanticAnalyzer method genPartSpecs.
private Map<Integer, List<ExprNodeGenericFuncDesc>> genPartSpecs(Table table, List<Map<String, String>> partitions) throws SemanticException {
Map<Integer, List<ExprNodeGenericFuncDesc>> partSpecs = new HashMap<Integer, List<ExprNodeGenericFuncDesc>>();
int partPrefixLength = 0;
if ((partitions != null) && (partitions.size() > 0)) {
partPrefixLength = partitions.get(0).size();
// pick the length of the first ptn, we expect all ptns listed to have the same number of
// key-vals.
}
List<ExprNodeGenericFuncDesc> ptnDescs = new ArrayList<ExprNodeGenericFuncDesc>();
for (Map<String, String> ptn : partitions) {
// convert each key-value-map to appropriate expression.
ExprNodeGenericFuncDesc expr = null;
for (Map.Entry<String, String> kvp : ptn.entrySet()) {
String key = kvp.getKey();
Object val = kvp.getValue();
String type = table.getPartColByName(key).getType();
;
PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type);
ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, key, null, true);
ExprNodeGenericFuncDesc op = DDLSemanticAnalyzer.makeBinaryPredicate("=", column, new ExprNodeConstantDesc(pti, val));
expr = (expr == null) ? op : DDLSemanticAnalyzer.makeBinaryPredicate("and", expr, op);
}
if (expr != null) {
ptnDescs.add(expr);
}
}
if (ptnDescs.size() > 0) {
partSpecs.put(partPrefixLength, ptnDescs);
}
return partSpecs;
}
use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.
the class GenericUDFBaseNumeric method deriveResultApproxTypeInfo.
/**
* Default implementation for getting the approximate type info for the operator result.
* Divide operator overrides this.
* @return
*/
protected PrimitiveTypeInfo deriveResultApproxTypeInfo() {
PrimitiveTypeInfo left = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(leftOI);
PrimitiveTypeInfo right = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(rightOI);
// string types get converted to double
if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(left.getPrimitiveCategory()) == PrimitiveGrouping.STRING_GROUP) {
left = TypeInfoFactory.doubleTypeInfo;
}
if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(right.getPrimitiveCategory()) == PrimitiveGrouping.STRING_GROUP) {
right = TypeInfoFactory.doubleTypeInfo;
}
// Use type promotion
PrimitiveCategory commonCat = FunctionRegistry.getPrimitiveCommonCategory(left, right);
if (commonCat == PrimitiveCategory.DECIMAL) {
// Hive 0.12 behavior where double * decimal -> decimal is gone.
return TypeInfoFactory.doubleTypeInfo;
} else if (commonCat == null) {
return TypeInfoFactory.doubleTypeInfo;
} else {
return left.getPrimitiveCategory() == commonCat ? left : right;
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.
the class GenericUDFBaseNumeric method deriveResultTypeInfo.
/**
* Default implementation for deriving typeinfo instance for the operator result.
*
* @param leftOI TypeInfo instance of the left operand
* @param rightOI TypeInfo instance of the right operand
* @return
* @throws UDFArgumentException
*/
private PrimitiveTypeInfo deriveResultTypeInfo() throws UDFArgumentException {
PrimitiveTypeInfo left = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(leftOI);
PrimitiveTypeInfo right = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(rightOI);
if (!FunctionRegistry.isNumericType(left) || !FunctionRegistry.isNumericType(right)) {
List<TypeInfo> argTypeInfos = new ArrayList<TypeInfo>(2);
argTypeInfos.add(left);
argTypeInfos.add(right);
throw new NoMatchingMethodException(this.getClass(), argTypeInfos, null);
}
// If any of the type isn't exact, double is chosen.
if (!FunctionRegistry.isExactNumericType(left) || !FunctionRegistry.isExactNumericType(right)) {
return deriveResultApproxTypeInfo();
}
return deriveResultExactTypeInfo();
}
use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.
the class GenericUDFBaseNumeric method deriveResultExactTypeInfo.
/**
* Default implementation for getting the exact type info for the operator result. It worked for all
* but divide operator.
*
* @return
*/
protected PrimitiveTypeInfo deriveResultExactTypeInfo() {
PrimitiveTypeInfo left = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(leftOI);
PrimitiveTypeInfo right = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(rightOI);
// Now we are handling exact types. Base implementation handles type promotion.
PrimitiveCategory commonCat = FunctionRegistry.getPrimitiveCommonCategory(left, right);
if (commonCat == PrimitiveCategory.DECIMAL) {
return deriveResultDecimalTypeInfo();
} else {
return left.getPrimitiveCategory() == commonCat ? left : right;
}
}
Aggregations