use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDefaultDesc in project hive by apache.
the class DDLSemanticAnalyzer method getFullPartitionSpecs.
/**
* Get the partition specs from the tree. This stores the full specification
* with the comparator operator into the output list.
*
* @param ast Tree to extract partitions from.
* @param tab Table.
* @return Map of partitions by prefix length. Most of the time prefix length will
* be the same for all partition specs, so we can just OR the expressions.
*/
private Map<Integer, List<ExprNodeGenericFuncDesc>> getFullPartitionSpecs(CommonTree ast, Table tab, boolean canGroupExprs) throws SemanticException {
String defaultPartitionName = HiveConf.getVar(conf, HiveConf.ConfVars.DEFAULTPARTITIONNAME);
Map<String, String> colTypes = new HashMap<String, String>();
for (FieldSchema fs : tab.getPartitionKeys()) {
colTypes.put(fs.getName().toLowerCase(), fs.getType());
}
Map<Integer, List<ExprNodeGenericFuncDesc>> result = new HashMap<Integer, List<ExprNodeGenericFuncDesc>>();
for (int childIndex = 0; childIndex < ast.getChildCount(); childIndex++) {
Tree partSpecTree = ast.getChild(childIndex);
if (partSpecTree.getType() != HiveParser.TOK_PARTSPEC)
continue;
ExprNodeGenericFuncDesc expr = null;
HashSet<String> names = new HashSet<String>(partSpecTree.getChildCount());
for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
String key = stripIdentifierQuotes(partSpecSingleKey.getChild(0).getText()).toLowerCase();
String operator = partSpecSingleKey.getChild(1).getText();
ASTNode partValNode = (ASTNode) partSpecSingleKey.getChild(2);
TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null);
ExprNodeConstantDesc valExpr = (ExprNodeConstantDesc) TypeCheckProcFactory.genExprNode(partValNode, typeCheckCtx).get(partValNode);
Object val = valExpr.getValue();
boolean isDefaultPartitionName = val.equals(defaultPartitionName);
String type = colTypes.get(key);
PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type);
if (type == null) {
throw new SemanticException("Column " + key + " not found");
}
// Create the corresponding hive expression to filter on partition columns.
if (!isDefaultPartitionName) {
if (!valExpr.getTypeString().equals(type)) {
Converter converter = ObjectInspectorConverters.getConverter(TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(valExpr.getTypeInfo()), TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(pti));
val = converter.convert(valExpr.getValue());
}
}
ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, key, null, true);
ExprNodeGenericFuncDesc op = makeBinaryPredicate(operator, column, isDefaultPartitionName ? new ExprNodeConstantDefaultDesc(pti, defaultPartitionName) : new ExprNodeConstantDesc(pti, val));
// If it's multi-expr filter (e.g. a='5', b='2012-01-02'), AND with previous exprs.
expr = (expr == null) ? op : makeBinaryPredicate("and", expr, op);
names.add(key);
}
if (expr == null)
continue;
// We got the expr for one full partition spec. Determine the prefix length.
int prefixLength = calculatePartPrefix(tab, names);
List<ExprNodeGenericFuncDesc> orExpr = result.get(prefixLength);
// If we don't, create a new separate filter. In most cases there will only be one.
if (orExpr == null) {
result.put(prefixLength, Lists.newArrayList(expr));
} else if (canGroupExprs) {
orExpr.set(0, makeBinaryPredicate("or", expr, orExpr.get(0)));
} else {
orExpr.add(expr);
}
}
return result;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDefaultDesc in project hive by apache.
the class PartitionPruner method prunePartitionNames.
/**
* Prunes partition names to see if they match the prune expression.
* @param partColumnNames name of partition columns
* @param partColumnTypeInfos types of partition columns
* @param prunerExpr The expression to match.
* @param defaultPartitionName name of default partition
* @param partNames Partition names to filter. The list is modified in place.
* @return Whether the list has any partitions for which the expression may or may not match.
*/
public static boolean prunePartitionNames(List<String> partColumnNames, List<PrimitiveTypeInfo> partColumnTypeInfos, ExprNodeGenericFuncDesc prunerExpr, String defaultPartitionName, List<String> partNames) throws HiveException, MetaException {
// Prepare the expression to filter on the columns.
ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> handle = PartExprEvalUtils.prepareExpr(prunerExpr, partColumnNames, partColumnTypeInfos);
// Filter the name list. Removing elements one by one can be slow on e.g. ArrayList,
// so let's create a new list and copy it if we don't have a linked list
boolean inPlace = partNames instanceof AbstractSequentialList<?>;
List<String> partNamesSeq = inPlace ? partNames : new LinkedList<String>(partNames);
// Array for the values to pass to evaluator.
ArrayList<String> values = new ArrayList<String>(partColumnNames.size());
for (int i = 0; i < partColumnNames.size(); ++i) {
values.add(null);
}
boolean hasUnknownPartitions = false;
Iterator<String> partIter = partNamesSeq.iterator();
while (partIter.hasNext()) {
String partName = partIter.next();
Warehouse.makeValsFromName(partName, values);
ArrayList<Object> convertedValues = new ArrayList<Object>(values.size());
for (int i = 0; i < values.size(); i++) {
String partitionValue = values.get(i);
PrimitiveTypeInfo typeInfo = partColumnTypeInfos.get(i);
if (partitionValue.equals(defaultPartitionName)) {
convertedValues.add(new ExprNodeConstantDefaultDesc(typeInfo, defaultPartitionName));
} else {
Object o = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(typeInfo)).convert(partitionValue);
convertedValues.add(o);
}
}
// Evaluate the expression tree.
Boolean isNeeded = (Boolean) PartExprEvalUtils.evaluateExprOnPart(handle, convertedValues);
boolean isUnknown = (isNeeded == null);
if (!isUnknown && !isNeeded) {
partIter.remove();
continue;
}
if (isUnknown && values.contains(defaultPartitionName)) {
// Note that predicate would only contains partition column parts of original predicate.
if (LOG.isDebugEnabled()) {
LOG.debug("skipping default/bad partition: " + partName);
}
partIter.remove();
continue;
}
hasUnknownPartitions |= isUnknown;
if (LOG.isDebugEnabled()) {
LOG.debug("retained " + (isUnknown ? "unknown " : "") + "partition: " + partName);
}
}
if (!inPlace) {
partNames.clear();
partNames.addAll(partNamesSeq);
}
return hasUnknownPartitions;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDefaultDesc in project hive by apache.
the class GenericUDFOPNotEqual method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Object o0, o1;
o0 = arguments[0].get();
if (o0 == null) {
return null;
}
o1 = arguments[1].get();
if (o1 == null) {
return null;
}
// Handle 'default' constant which has a data type with special value
if (o0 instanceof ExprNodeConstantDefaultDesc || o1 instanceof ExprNodeConstantDefaultDesc) {
ExprNodeConstantDefaultDesc default0 = o0 instanceof ExprNodeConstantDefaultDesc ? (ExprNodeConstantDefaultDesc) o0 : null;
ExprNodeConstantDefaultDesc default1 = o1 instanceof ExprNodeConstantDefaultDesc ? (ExprNodeConstantDefaultDesc) o1 : null;
result.set(default0 == null || default1 == null || !default0.isSame(default1));
return result;
}
switch(compareType) {
case COMPARE_TEXT:
result.set(!soi0.getPrimitiveWritableObject(o0).equals(soi1.getPrimitiveWritableObject(o1)));
break;
case COMPARE_INT:
result.set(ioi0.get(o0) != ioi1.get(o1));
break;
case COMPARE_LONG:
result.set(loi0.get(o0) != loi1.get(o1));
break;
case COMPARE_BYTE:
result.set(byoi0.get(o0) != byoi1.get(o1));
break;
case COMPARE_BOOL:
result.set(boi0.get(o0) != boi1.get(o1));
break;
case COMPARE_STRING:
result.set(!soi0.getPrimitiveJavaObject(o0).equals(soi1.getPrimitiveJavaObject(o1)));
break;
case SAME_TYPE:
result.set(ObjectInspectorUtils.compare(o0, argumentOIs[0], o1, argumentOIs[1]) != 0);
break;
default:
Object converted_o0 = converter0.convert(o0);
if (converted_o0 == null) {
return null;
}
Object converted_o1 = converter1.convert(o1);
if (converted_o1 == null) {
return null;
}
result.set(ObjectInspectorUtils.compare(converted_o0, compareOI, converted_o1, compareOI) != 0);
}
return result;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDefaultDesc in project hive by apache.
the class GenericUDFOPEqual method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Object o0, o1;
o0 = arguments[0].get();
if (o0 == null) {
return null;
}
o1 = arguments[1].get();
if (o1 == null) {
return null;
}
// Handle 'default' constant which has a data type with special value
if (o0 instanceof ExprNodeConstantDefaultDesc || o1 instanceof ExprNodeConstantDefaultDesc) {
ExprNodeConstantDefaultDesc default0 = o0 instanceof ExprNodeConstantDefaultDesc ? (ExprNodeConstantDefaultDesc) o0 : null;
ExprNodeConstantDefaultDesc default1 = o1 instanceof ExprNodeConstantDefaultDesc ? (ExprNodeConstantDefaultDesc) o1 : null;
result.set(default0 != null && default1 != null && default0.isSame(default1));
return result;
}
switch(compareType) {
case COMPARE_TEXT:
result.set(soi0.getPrimitiveWritableObject(o0).equals(soi1.getPrimitiveWritableObject(o1)));
break;
case COMPARE_INT:
result.set(ioi0.get(o0) == ioi1.get(o1));
break;
case COMPARE_LONG:
result.set(loi0.get(o0) == loi1.get(o1));
break;
case COMPARE_BYTE:
result.set(byoi0.get(o0) == byoi1.get(o1));
break;
case COMPARE_BOOL:
result.set(boi0.get(o0) == boi1.get(o1));
break;
case COMPARE_STRING:
result.set(soi0.getPrimitiveJavaObject(o0).equals(soi1.getPrimitiveJavaObject(o1)));
break;
case SAME_TYPE:
result.set(ObjectInspectorUtils.compare(o0, argumentOIs[0], o1, argumentOIs[1]) == 0);
break;
default:
Object converted_o0 = converter0.convert(o0);
if (converted_o0 == null) {
return null;
}
Object converted_o1 = converter1.convert(o1);
if (converted_o1 == null) {
return null;
}
result.set(ObjectInspectorUtils.compare(converted_o0, compareOI, converted_o1, compareOI) == 0);
}
return result;
}
Aggregations