use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project hive by apache.
the class MatchPath method execute.
@Override
public void execute(PTFPartitionIterator<Object> pItr, PTFPartition outP) throws HiveException {
while (pItr.hasNext()) {
Object iRow = pItr.next();
SymbolFunctionResult syFnRes = SymbolFunction.match(syFn, iRow, pItr);
if (syFnRes.matches) {
int sz = syFnRes.nextRow - (pItr.getIndex() - 1);
Object selectListInput = MatchPath.getSelectListInput(iRow, tableDef.getInput().getOutputShape().getOI(), pItr, sz);
ArrayList<Object> oRow = new ArrayList<Object>();
for (ExprNodeEvaluator resExprEval : resultExprInfo.resultExprEvals) {
oRow.add(resExprEval.evaluate(selectListInput));
}
outP.append(oRow);
}
}
}
use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project hive by apache.
the class PartExprEvalUtils method evalExprWithPart.
/**
* Evaluate expression with partition columns
*
* @param expr
* @param partSpec
* @param rowObjectInspector
* @return value returned by the expression
* @throws HiveException
*/
public static synchronized Object evalExprWithPart(ExprNodeDesc expr, Partition p, List<VirtualColumn> vcs, StructObjectInspector rowObjectInspector) throws HiveException {
LinkedHashMap<String, String> partSpec = p.getSpec();
Properties partProps = p.getSchema();
String pcolTypes = partProps.getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES);
String[] partKeyTypes = pcolTypes.trim().split(":");
if (partSpec.size() != partKeyTypes.length) {
throw new HiveException("Internal error : Partition Spec size, " + partProps.size() + " doesn't match partition key definition size, " + partKeyTypes.length);
}
boolean hasVC = vcs != null && !vcs.isEmpty();
Object[] rowWithPart = new Object[hasVC ? 3 : 2];
// Create the row object
ArrayList<String> partNames = new ArrayList<String>();
ArrayList<Object> partValues = new ArrayList<Object>();
ArrayList<ObjectInspector> partObjectInspectors = new ArrayList<ObjectInspector>();
int i = 0;
for (Map.Entry<String, String> entry : partSpec.entrySet()) {
partNames.add(entry.getKey());
ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(partKeyTypes[i++]));
partValues.add(ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi).convert(entry.getValue()));
partObjectInspectors.add(oi);
}
StructObjectInspector partObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partNames, partObjectInspectors);
rowWithPart[1] = partValues;
ArrayList<StructObjectInspector> ois = new ArrayList<StructObjectInspector>(2);
ois.add(rowObjectInspector);
ois.add(partObjectInspector);
if (hasVC) {
ois.add(VirtualColumn.getVCSObjectInspector(vcs));
}
StructObjectInspector rowWithPartObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(ois);
ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(expr);
ObjectInspector evaluateResultOI = evaluator.initialize(rowWithPartObjectInspector);
Object evaluateResultO = evaluator.evaluate(rowWithPart);
return ((PrimitiveObjectInspector) evaluateResultOI).getPrimitiveJavaObject(evaluateResultO);
}
use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project hive by apache.
the class PartExprEvalUtils method prepareExpr.
public static synchronized ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> prepareExpr(ExprNodeGenericFuncDesc expr, List<String> partColumnNames, List<PrimitiveTypeInfo> partColumnTypeInfos) throws HiveException {
// Create the row object
List<ObjectInspector> partObjectInspectors = new ArrayList<ObjectInspector>();
for (int i = 0; i < partColumnNames.size(); i++) {
partObjectInspectors.add(PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(partColumnTypeInfos.get(i)));
}
StructObjectInspector objectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partColumnNames, partObjectInspectors);
ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(expr);
ObjectInspector evaluateResultOI = evaluator.initialize(objectInspector);
return ObjectPair.create((PrimitiveObjectInspector) evaluateResultOI, evaluator);
}
use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project hive by apache.
the class PartitionPruner method prunePartitionNames.
/**
* Prunes partition names to see if they match the prune expression.
* @param partColumnNames name of partition columns
* @param partColumnTypeInfos types of partition columns
* @param prunerExpr The expression to match.
* @param defaultPartitionName name of default partition
* @param partNames Partition names to filter. The list is modified in place.
* @return Whether the list has any partitions for which the expression may or may not match.
*/
public static boolean prunePartitionNames(List<String> partColumnNames, List<PrimitiveTypeInfo> partColumnTypeInfos, ExprNodeGenericFuncDesc prunerExpr, String defaultPartitionName, List<String> partNames) throws HiveException, MetaException {
// Prepare the expression to filter on the columns.
ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> handle = PartExprEvalUtils.prepareExpr(prunerExpr, partColumnNames, partColumnTypeInfos);
// Filter the name list. Removing elements one by one can be slow on e.g. ArrayList,
// so let's create a new list and copy it if we don't have a linked list
boolean inPlace = partNames instanceof AbstractSequentialList<?>;
List<String> partNamesSeq = inPlace ? partNames : new LinkedList<String>(partNames);
// Array for the values to pass to evaluator.
ArrayList<String> values = new ArrayList<String>(partColumnNames.size());
for (int i = 0; i < partColumnNames.size(); ++i) {
values.add(null);
}
boolean hasUnknownPartitions = false;
Iterator<String> partIter = partNamesSeq.iterator();
while (partIter.hasNext()) {
String partName = partIter.next();
Warehouse.makeValsFromName(partName, values);
ArrayList<Object> convertedValues = new ArrayList<Object>(values.size());
for (int i = 0; i < values.size(); i++) {
String partitionValue = values.get(i);
PrimitiveTypeInfo typeInfo = partColumnTypeInfos.get(i);
if (partitionValue.equals(defaultPartitionName)) {
convertedValues.add(new ExprNodeConstantDefaultDesc(typeInfo, defaultPartitionName));
} else {
Object o = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(typeInfo)).convert(partitionValue);
convertedValues.add(o);
}
}
// Evaluate the expression tree.
Boolean isNeeded = (Boolean) PartExprEvalUtils.evaluateExprOnPart(handle, convertedValues);
boolean isUnknown = (isNeeded == null);
if (!isUnknown && !isNeeded) {
partIter.remove();
continue;
}
if (isUnknown && values.contains(defaultPartitionName)) {
// Note that predicate would only contains partition column parts of original predicate.
if (LOG.isDebugEnabled()) {
LOG.debug("skipping default/bad partition: " + partName);
}
partIter.remove();
continue;
}
hasUnknownPartitions |= isUnknown;
if (LOG.isDebugEnabled()) {
LOG.debug("retained " + (isUnknown ? "unknown " : "") + "partition: " + partName);
}
}
if (!inPlace) {
partNames.clear();
partNames.addAll(partNamesSeq);
}
return hasUnknownPartitions;
}
use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project hive by apache.
the class DynamicPartitionPruner method prunePartitionSingleSource.
@VisibleForTesting
protected void prunePartitionSingleSource(String source, SourceInfo si) throws HiveException {
if (si.skipPruning.get()) {
// in this case we've determined that there's too much data
// to prune dynamically.
LOG.info("Skip pruning on " + source + ", column " + si.columnName);
return;
}
Set<Object> values = si.values;
String columnName = si.columnName;
if (LOG.isDebugEnabled()) {
StringBuilder sb = new StringBuilder("Pruning ");
sb.append(columnName);
sb.append(" with ");
for (Object value : values) {
sb.append(value == null ? null : value.toString());
sb.append(", ");
}
LOG.debug(sb.toString());
}
ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(si.columnType));
Converter converter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi);
StructObjectInspector soi = ObjectInspectorFactory.getStandardStructObjectInspector(Collections.singletonList(columnName), Collections.singletonList(oi));
@SuppressWarnings("rawtypes") ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(si.partKey);
eval.initialize(soi);
applyFilterToPartitions(converter, eval, columnName, values);
}
Aggregations