use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project hive by apache.
the class PartExprEvalUtils method evalExprWithPart.
/**
* Evaluate expression with partition columns
*
* @param expr
* @param partSpec
* @param rowObjectInspector
* @return value returned by the expression
* @throws HiveException
*/
public static synchronized Object evalExprWithPart(ExprNodeDesc expr, Partition p, List<VirtualColumn> vcs, StructObjectInspector rowObjectInspector) throws HiveException {
LinkedHashMap<String, String> partSpec = p.getSpec();
Properties partProps = p.getSchema();
String pcolTypes = partProps.getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES);
String[] partKeyTypes = pcolTypes.trim().split(":");
if (partSpec.size() != partKeyTypes.length) {
throw new HiveException("Internal error : Partition Spec size, " + partSpec.size() + " doesn't match partition key definition size, " + partKeyTypes.length);
}
boolean hasVC = vcs != null && !vcs.isEmpty();
Object[] rowWithPart = new Object[hasVC ? 3 : 2];
// Create the row object
ArrayList<String> partNames = new ArrayList<String>();
ArrayList<Object> partValues = new ArrayList<Object>();
ArrayList<ObjectInspector> partObjectInspectors = new ArrayList<ObjectInspector>();
int i = 0;
for (Map.Entry<String, String> entry : partSpec.entrySet()) {
partNames.add(entry.getKey());
ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(partKeyTypes[i++]));
partValues.add(ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi).convert(entry.getValue()));
partObjectInspectors.add(oi);
}
StructObjectInspector partObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partNames, partObjectInspectors);
rowWithPart[1] = partValues;
ArrayList<StructObjectInspector> ois = new ArrayList<StructObjectInspector>(2);
ois.add(rowObjectInspector);
ois.add(partObjectInspector);
if (hasVC) {
ois.add(VirtualColumn.getVCSObjectInspector(vcs));
}
StructObjectInspector rowWithPartObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(ois);
ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(expr);
ObjectInspector evaluateResultOI = evaluator.initialize(rowWithPartObjectInspector);
Object evaluateResultO = evaluator.evaluate(rowWithPart);
return ((PrimitiveObjectInspector) evaluateResultOI).getPrimitiveJavaObject(evaluateResultO);
}
use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project hive by apache.
the class PartitionPruner method prunePartitionNames.
/**
* Prunes partition names to see if they match the prune expression.
* @param partColumnNames name of partition columns
* @param partColumnTypeInfos types of partition columns
* @param prunerExpr The expression to match.
* @param defaultPartitionName name of default partition
* @param partNames Partition names to filter. The list is modified in place.
* @return Whether the list has any partitions for which the expression may or may not match.
*/
public static boolean prunePartitionNames(List<String> partColumnNames, List<PrimitiveTypeInfo> partColumnTypeInfos, ExprNodeGenericFuncDesc prunerExpr, String defaultPartitionName, List<String> partNames) throws HiveException, MetaException {
// Prepare the expression to filter on the columns.
ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> handle = PartExprEvalUtils.prepareExpr(prunerExpr, partColumnNames, partColumnTypeInfos);
// Filter the name list. Removing elements one by one can be slow on e.g. ArrayList,
// so let's create a new list and copy it if we don't have a linked list
boolean inPlace = partNames instanceof AbstractSequentialList<?>;
List<String> partNamesSeq = inPlace ? partNames : new LinkedList<String>(partNames);
// Array for the values to pass to evaluator.
ArrayList<String> values = new ArrayList<String>(partColumnNames.size());
for (int i = 0; i < partColumnNames.size(); ++i) {
values.add(null);
}
boolean hasUnknownPartitions = false;
Iterator<String> partIter = partNamesSeq.iterator();
while (partIter.hasNext()) {
String partName = partIter.next();
Warehouse.makeValsFromName(partName, values);
ArrayList<Object> convertedValues = new ArrayList<Object>(values.size());
for (int i = 0; i < values.size(); i++) {
String partitionValue = values.get(i);
PrimitiveTypeInfo typeInfo = partColumnTypeInfos.get(i);
if (partitionValue.equals(defaultPartitionName)) {
// Null for default partition.
convertedValues.add(null);
} else {
Object o = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(typeInfo)).convert(partitionValue);
convertedValues.add(o);
}
}
// Evaluate the expression tree.
Boolean isNeeded = (Boolean) PartExprEvalUtils.evaluateExprOnPart(handle, convertedValues);
boolean isUnknown = (isNeeded == null);
if (!isUnknown && !isNeeded) {
partIter.remove();
continue;
}
if (isUnknown && values.contains(defaultPartitionName)) {
// Note that predicate would only contains partition column parts of original predicate.
if (LOG.isDebugEnabled()) {
LOG.debug("skipping default/bad partition: " + partName);
}
partIter.remove();
continue;
}
hasUnknownPartitions |= isUnknown;
if (LOG.isDebugEnabled()) {
LOG.debug("retained " + (isUnknown ? "unknown " : "") + "partition: " + partName);
}
}
if (!inPlace) {
partNames.clear();
partNames.addAll(partNamesSeq);
}
return hasUnknownPartitions;
}
use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project hive by apache.
the class ExprNodeDescUtils method foldConstant.
private static ExprNodeConstantDesc foldConstant(ExprNodeGenericFuncDesc func) {
GenericUDF udf = func.getGenericUDF();
if (!FunctionRegistry.isConsistentWithinQuery(udf)) {
return null;
}
try {
// resources may not be available at compile time.
if (udf instanceof GenericUDFBridge) {
UDF internal = ReflectionUtils.newInstance(((GenericUDFBridge) udf).getUdfClass(), null);
if (internal.getRequiredFiles() != null || internal.getRequiredJars() != null) {
return null;
}
} else {
if (udf.getRequiredFiles() != null || udf.getRequiredJars() != null) {
return null;
}
}
if (func.getChildren() != null) {
for (ExprNodeDesc child : func.getChildren()) {
if (child instanceof ExprNodeConstantDesc) {
continue;
}
if (child instanceof ExprNodeGenericFuncDesc) {
if (foldConstant((ExprNodeGenericFuncDesc) child) != null) {
continue;
}
}
return null;
}
}
ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(func);
ObjectInspector output = evaluator.initialize(null);
Object constant = evaluator.evaluate(null);
Object java = ObjectInspectorUtils.copyToStandardJavaObject(constant, output);
return new ExprNodeConstantDesc(java);
} catch (Exception e) {
return null;
}
}
use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project SQLWindowing by hbutani.
the class TranslateUtils method initExprNodeEvaluator.
public static ObjectInspector initExprNodeEvaluator(QueryDef qDef, ExprNodeDesc exprNode, ExprNodeEvaluator exprEval, InputInfo iInfo) throws WindowingException {
ObjectInspector OI;
try {
OI = exprEval.initialize(iInfo.getOI());
} catch (HiveException he) {
throw new WindowingException(he);
}
/*
* if there are any LeadLag functions in this Expression Tree: - setup a
* duplicate Evaluator for the 1st arg of the LLFuncDesc - initialize it
* using the InputInfo provided for this Expr tree - set the duplicate
* evaluator on the LLUDF instance.
*/
LeadLagInfo llInfo = qDef.getTranslationInfo().getLLInfo();
List<ExprNodeGenericFuncDesc> llFuncExprs = llInfo.getLLFuncExprsInTopExpr(exprNode);
if (llFuncExprs != null) {
for (ExprNodeGenericFuncDesc llFuncExpr : llFuncExprs) {
ExprNodeDesc firstArg = llFuncExpr.getChildren().get(0);
ExprNodeEvaluator dupExprEval = WindowingExprNodeEvaluatorFactory.get(qDef.getTranslationInfo(), firstArg);
try {
dupExprEval.initialize(iInfo.getOI());
} catch (HiveException he) {
throw new WindowingException(he);
}
GenericUDFLeadLag llFn = (GenericUDFLeadLag) llFuncExpr.getGenericUDF();
llFn.setExprEvaluator(dupExprEval);
}
}
return OI;
}
use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project SQLWindowing by hbutani.
the class Executor method executeSelectList.
/**
* For each row in the partition:
* 1. evaluate the where condition if applicable.
* 2. evaluate the value for each column retrieved
* from the select list
* 3. Forward the writable value or object based on the
* implementation of the ForwardSink
* @param qDef
* @param oPart
* @param rS
* @throws WindowingException
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public static void executeSelectList(QueryDef qDef, Partition oPart, ForwardSink rS) throws WindowingException {
ArrayList<ColumnDef> cols = qDef.getSelectList().getColumns();
ObjectInspector selectOI = qDef.getSelectList().getOI();
SerDe oSerDe = qDef.getOutput().getSerDe();
Object[] output = new Object[cols.size()];
WhereDef whDef = qDef.getWhere();
boolean applyWhere = whDef != null;
Converter whConverter = !applyWhere ? null : ObjectInspectorConverters.getConverter(whDef.getOI(), PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
ExprNodeEvaluator whCondEval = !applyWhere ? null : whDef.getExprEvaluator();
Writable value = null;
PartitionIterator<Object> pItr = oPart.iterator();
RuntimeUtils.connectLeadLagFunctionsToPartition(qDef, pItr);
while (pItr.hasNext()) {
int colCnt = 0;
ArrayList selectList = new ArrayList();
Object oRow = pItr.next();
if (applyWhere) {
Object whCond = null;
try {
whCond = whCondEval.evaluate(oRow);
whCond = whConverter.convert(whCond);
} catch (HiveException he) {
throw new WindowingException(he);
}
if (whCond == null || !((Boolean) whCond).booleanValue()) {
continue;
}
}
for (ColumnDef cDef : cols) {
try {
Object newCol = cDef.getExprEvaluator().evaluate(oRow);
output[colCnt++] = newCol;
selectList.add(newCol);
} catch (HiveException he) {
throw new WindowingException(he);
}
}
// else collect the writable key-value pairs for outstream
if (rS.acceptObject()) {
rS.collectOutput(output);
} else {
try {
value = oSerDe.serialize(selectList, selectOI);
} catch (SerDeException se) {
throw new WindowingException(se);
}
rS.collectOutput(NullWritable.get(), value);
}
}
}
Aggregations