Search in sources :

Example 16 with ExprNodeEvaluator

use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project hive by apache.

the class MatchPath method execute.

@Override
public void execute(PTFPartitionIterator<Object> pItr, PTFPartition outP) throws HiveException {
    while (pItr.hasNext()) {
        Object iRow = pItr.next();
        SymbolFunctionResult syFnRes = SymbolFunction.match(syFn, iRow, pItr);
        if (syFnRes.matches) {
            int sz = syFnRes.nextRow - (pItr.getIndex() - 1);
            Object selectListInput = MatchPath.getSelectListInput(iRow, tableDef.getInput().getOutputShape().getOI(), pItr, sz);
            ArrayList<Object> oRow = new ArrayList<Object>();
            for (ExprNodeEvaluator resExprEval : resultExprInfo.resultExprEvals) {
                oRow.add(resExprEval.evaluate(selectListInput));
            }
            outP.append(oRow);
        }
    }
}
Also used : ArrayList(java.util.ArrayList) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator)

Example 17 with ExprNodeEvaluator

use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project hive by apache.

the class PartExprEvalUtils method evalExprWithPart.

/**
   * Evaluate expression with partition columns
   *
   * @param expr
   * @param partSpec
   * @param rowObjectInspector
   * @return value returned by the expression
   * @throws HiveException
   */
public static synchronized Object evalExprWithPart(ExprNodeDesc expr, Partition p, List<VirtualColumn> vcs, StructObjectInspector rowObjectInspector) throws HiveException {
    LinkedHashMap<String, String> partSpec = p.getSpec();
    Properties partProps = p.getSchema();
    String pcolTypes = partProps.getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES);
    String[] partKeyTypes = pcolTypes.trim().split(":");
    if (partSpec.size() != partKeyTypes.length) {
        throw new HiveException("Internal error : Partition Spec size, " + partProps.size() + " doesn't match partition key definition size, " + partKeyTypes.length);
    }
    boolean hasVC = vcs != null && !vcs.isEmpty();
    Object[] rowWithPart = new Object[hasVC ? 3 : 2];
    // Create the row object
    ArrayList<String> partNames = new ArrayList<String>();
    ArrayList<Object> partValues = new ArrayList<Object>();
    ArrayList<ObjectInspector> partObjectInspectors = new ArrayList<ObjectInspector>();
    int i = 0;
    for (Map.Entry<String, String> entry : partSpec.entrySet()) {
        partNames.add(entry.getKey());
        ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(partKeyTypes[i++]));
        partValues.add(ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi).convert(entry.getValue()));
        partObjectInspectors.add(oi);
    }
    StructObjectInspector partObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partNames, partObjectInspectors);
    rowWithPart[1] = partValues;
    ArrayList<StructObjectInspector> ois = new ArrayList<StructObjectInspector>(2);
    ois.add(rowObjectInspector);
    ois.add(partObjectInspector);
    if (hasVC) {
        ois.add(VirtualColumn.getVCSObjectInspector(vcs));
    }
    StructObjectInspector rowWithPartObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(ois);
    ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(expr);
    ObjectInspector evaluateResultOI = evaluator.initialize(rowWithPartObjectInspector);
    Object evaluateResultO = evaluator.evaluate(rowWithPart);
    return ((PrimitiveObjectInspector) evaluateResultOI).getPrimitiveJavaObject(evaluateResultO);
}
Also used : StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ArrayList(java.util.ArrayList) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) Properties(java.util.Properties) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 18 with ExprNodeEvaluator

use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project hive by apache.

the class PartExprEvalUtils method prepareExpr.

public static synchronized ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> prepareExpr(ExprNodeGenericFuncDesc expr, List<String> partColumnNames, List<PrimitiveTypeInfo> partColumnTypeInfos) throws HiveException {
    // Create the row object
    List<ObjectInspector> partObjectInspectors = new ArrayList<ObjectInspector>();
    for (int i = 0; i < partColumnNames.size(); i++) {
        partObjectInspectors.add(PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(partColumnTypeInfos.get(i)));
    }
    StructObjectInspector objectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partColumnNames, partObjectInspectors);
    ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(expr);
    ObjectInspector evaluateResultOI = evaluator.initialize(objectInspector);
    return ObjectPair.create((PrimitiveObjectInspector) evaluateResultOI, evaluator);
}
Also used : StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ArrayList(java.util.ArrayList) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 19 with ExprNodeEvaluator

use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project hive by apache.

the class PartitionPruner method prunePartitionNames.

/**
   * Prunes partition names to see if they match the prune expression.
   * @param partColumnNames name of partition columns
   * @param partColumnTypeInfos types of partition columns
   * @param prunerExpr The expression to match.
   * @param defaultPartitionName name of default partition
   * @param partNames Partition names to filter. The list is modified in place.
   * @return Whether the list has any partitions for which the expression may or may not match.
   */
public static boolean prunePartitionNames(List<String> partColumnNames, List<PrimitiveTypeInfo> partColumnTypeInfos, ExprNodeGenericFuncDesc prunerExpr, String defaultPartitionName, List<String> partNames) throws HiveException, MetaException {
    // Prepare the expression to filter on the columns.
    ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> handle = PartExprEvalUtils.prepareExpr(prunerExpr, partColumnNames, partColumnTypeInfos);
    // Filter the name list. Removing elements one by one can be slow on e.g. ArrayList,
    // so let's create a new list and copy it if we don't have a linked list
    boolean inPlace = partNames instanceof AbstractSequentialList<?>;
    List<String> partNamesSeq = inPlace ? partNames : new LinkedList<String>(partNames);
    // Array for the values to pass to evaluator.
    ArrayList<String> values = new ArrayList<String>(partColumnNames.size());
    for (int i = 0; i < partColumnNames.size(); ++i) {
        values.add(null);
    }
    boolean hasUnknownPartitions = false;
    Iterator<String> partIter = partNamesSeq.iterator();
    while (partIter.hasNext()) {
        String partName = partIter.next();
        Warehouse.makeValsFromName(partName, values);
        ArrayList<Object> convertedValues = new ArrayList<Object>(values.size());
        for (int i = 0; i < values.size(); i++) {
            String partitionValue = values.get(i);
            PrimitiveTypeInfo typeInfo = partColumnTypeInfos.get(i);
            if (partitionValue.equals(defaultPartitionName)) {
                convertedValues.add(new ExprNodeConstantDefaultDesc(typeInfo, defaultPartitionName));
            } else {
                Object o = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(typeInfo)).convert(partitionValue);
                convertedValues.add(o);
            }
        }
        // Evaluate the expression tree.
        Boolean isNeeded = (Boolean) PartExprEvalUtils.evaluateExprOnPart(handle, convertedValues);
        boolean isUnknown = (isNeeded == null);
        if (!isUnknown && !isNeeded) {
            partIter.remove();
            continue;
        }
        if (isUnknown && values.contains(defaultPartitionName)) {
            // Note that predicate would only contains partition column parts of original predicate.
            if (LOG.isDebugEnabled()) {
                LOG.debug("skipping default/bad partition: " + partName);
            }
            partIter.remove();
            continue;
        }
        hasUnknownPartitions |= isUnknown;
        if (LOG.isDebugEnabled()) {
            LOG.debug("retained " + (isUnknown ? "unknown " : "") + "partition: " + partName);
        }
    }
    if (!inPlace) {
        partNames.clear();
        partNames.addAll(partNamesSeq);
    }
    return hasUnknownPartitions;
}
Also used : ExprNodeConstantDefaultDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDefaultDesc) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) ArrayList(java.util.ArrayList) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) AbstractSequentialList(java.util.AbstractSequentialList) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 20 with ExprNodeEvaluator

use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project hive by apache.

the class DynamicPartitionPruner method prunePartitionSingleSource.

@VisibleForTesting
protected void prunePartitionSingleSource(String source, SourceInfo si) throws HiveException {
    if (si.skipPruning.get()) {
        // in this case we've determined that there's too much data
        // to prune dynamically.
        LOG.info("Skip pruning on " + source + ", column " + si.columnName);
        return;
    }
    Set<Object> values = si.values;
    String columnName = si.columnName;
    if (LOG.isDebugEnabled()) {
        StringBuilder sb = new StringBuilder("Pruning ");
        sb.append(columnName);
        sb.append(" with ");
        for (Object value : values) {
            sb.append(value == null ? null : value.toString());
            sb.append(", ");
        }
        LOG.debug(sb.toString());
    }
    ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(si.columnType));
    Converter converter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi);
    StructObjectInspector soi = ObjectInspectorFactory.getStandardStructObjectInspector(Collections.singletonList(columnName), Collections.singletonList(oi));
    @SuppressWarnings("rawtypes") ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(si.partKey);
    eval.initialize(soi);
    applyFilterToPartitions(converter, eval, columnName, values);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) Converter(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Aggregations

ExprNodeEvaluator (org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator)30 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)27 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)18 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)13 ArrayList (java.util.ArrayList)10 WindowingException (com.sap.hadoop.windowing.WindowingException)8 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)8 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)7 ColumnDef (com.sap.hadoop.windowing.query2.definition.ColumnDef)3 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)3 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)3 ListObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector)3 WhereDef (com.sap.hadoop.windowing.query2.definition.WhereDef)2 List (java.util.List)2 VectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)2 VectorExpressionWriter (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter)2 VectorExpressionWriterFactory (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory)2 Converter (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter)2 VisibleForTesting (com.google.common.annotations.VisibleForTesting)1 GenericUDFLeadLag (com.sap.hadoop.windowing.functions2.GenericUDFLeadLag)1