Search in sources :

Example 1 with DecimalColDivideDecimalScalar

use of org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DecimalColDivideDecimalScalar in project hive by apache.

the class Vectorizer method fixDecimalDataTypePhysicalVariations.

private static VectorExpression fixDecimalDataTypePhysicalVariations(final VectorExpression parent, final VectorExpression[] children, final VectorizationContext vContext) throws HiveException {
    if (children == null || children.length == 0) {
        return parent;
    }
    for (int i = 0; i < children.length; i++) {
        VectorExpression child = children[i];
        VectorExpression newChild = fixDecimalDataTypePhysicalVariations(child, child.getChildExpressions(), vContext);
        if (child.getClass() == newChild.getClass() && child != newChild) {
            children[i] = newChild;
        }
    }
    if (parent.getOutputDataTypePhysicalVariation() == DataTypePhysicalVariation.NONE && !(parent instanceof ConvertDecimal64ToDecimal)) {
        boolean inputArgsChanged = false;
        DataTypePhysicalVariation[] dataTypePhysicalVariations = parent.getInputDataTypePhysicalVariations();
        for (int i = 0; i < children.length; i++) {
            // we found at least one children with mismatch
            if (children[i].getOutputDataTypePhysicalVariation() == DataTypePhysicalVariation.DECIMAL_64) {
                children[i] = vContext.wrapWithDecimal64ToDecimalConversion(children[i]);
                inputArgsChanged = true;
                dataTypePhysicalVariations[i] = DataTypePhysicalVariation.NONE;
            }
        }
        // fix up the input column numbers and output column numbers
        if (inputArgsChanged) {
            if (parent instanceof VectorUDFAdaptor) {
                VectorUDFAdaptor parentAdaptor = (VectorUDFAdaptor) parent;
                VectorUDFArgDesc[] argDescs = parentAdaptor.getArgDescs();
                for (int i = 0; i < argDescs.length; ++i) {
                    if (argDescs[i].getColumnNum() != children[i].getOutputColumnNum()) {
                        argDescs[i].setColumnNum(children[i].getOutputColumnNum());
                        break;
                    }
                }
            } else {
                Object[] arguments;
                int argumentCount = children.length + (parent.getOutputColumnNum() == -1 ? 0 : 1);
                // Need to handle it as a special case to avoid instantiation failure.
                if (parent instanceof VectorCoalesce) {
                    arguments = new Object[2];
                    arguments[0] = new int[children.length];
                    for (int i = 0; i < children.length; i++) {
                        VectorExpression vce = children[i];
                        ((int[]) arguments[0])[i] = vce.getOutputColumnNum();
                    }
                    arguments[1] = parent.getOutputColumnNum();
                } else {
                    if (parent instanceof DecimalColDivideDecimalScalar) {
                        arguments = new Object[argumentCount + 1];
                        arguments[children.length] = ((DecimalColDivideDecimalScalar) parent).getValue();
                    } else {
                        arguments = new Object[argumentCount];
                    }
                    for (int i = 0; i < children.length; i++) {
                        VectorExpression vce = children[i];
                        arguments[i] = vce.getOutputColumnNum();
                    }
                }
                // retain output column number from parent
                if (parent.getOutputColumnNum() != -1) {
                    arguments[arguments.length - 1] = parent.getOutputColumnNum();
                }
                // re-instantiate the parent expression with new arguments
                VectorExpression newParent = vContext.instantiateExpression(parent.getClass(), parent.getOutputTypeInfo(), parent.getOutputDataTypePhysicalVariation(), arguments);
                newParent.setOutputTypeInfo(parent.getOutputTypeInfo());
                newParent.setOutputDataTypePhysicalVariation(parent.getOutputDataTypePhysicalVariation());
                newParent.setInputTypeInfos(parent.getInputTypeInfos());
                newParent.setInputDataTypePhysicalVariations(dataTypePhysicalVariations);
                newParent.setChildExpressions(parent.getChildExpressions());
                return newParent;
            }
        }
    }
    return parent;
}
Also used : ConvertDecimal64ToDecimal(org.apache.hadoop.hive.ql.exec.vector.expressions.ConvertDecimal64ToDecimal) DecimalColDivideDecimalScalar(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DecimalColDivideDecimalScalar) VectorCoalesce(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorCoalesce) VectorUDFAdaptor(org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor) VectorUDFArgDesc(org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFArgDesc) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)

Example 2 with DecimalColDivideDecimalScalar

use of org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DecimalColDivideDecimalScalar in project hive by apache.

the class TestVectorArithmeticExpressions method testDecimalColDivideDecimalScalar.

/* Test decimal column to decimal scalar division. This is used to cover all the
   * cases used in the source code template ColumnDivideScalarDecimal.txt.
   * The template is used for division and modulo.
   */
@Test
public void testDecimalColDivideDecimalScalar() throws HiveException {
    VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
    HiveDecimal d = HiveDecimal.create("2.00");
    VectorExpression expr = new DecimalColDivideDecimalScalar(0, d, 2);
    // test without nulls
    expr.evaluate(b);
    DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.6")));
    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-1.65")));
    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0")));
    // test null propagation
    b = getVectorizedRowBatch3DecimalCols();
    DecimalColumnVector in = (DecimalColumnVector) b.cols[0];
    r = (DecimalColumnVector) b.cols[2];
    in.noNulls = false;
    in.isNull[0] = true;
    expr.evaluate(b);
    assertTrue(!r.noNulls);
    assertTrue(r.isNull[0]);
    // test repeating case, no nulls
    b = getVectorizedRowBatch3DecimalCols();
    in = (DecimalColumnVector) b.cols[0];
    in.isRepeating = true;
    expr.evaluate(b);
    r = (DecimalColumnVector) b.cols[2];
    assertTrue(r.isRepeating);
    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.6")));
    // test repeating case for null value
    b = getVectorizedRowBatch3DecimalCols();
    in = (DecimalColumnVector) b.cols[0];
    in.isRepeating = true;
    in.isNull[0] = true;
    in.noNulls = false;
    expr.evaluate(b);
    r = (DecimalColumnVector) b.cols[2];
    assertTrue(r.isRepeating);
    assertTrue(!r.noNulls);
    assertTrue(r.isNull[0]);
    // test that zero-divide produces null for all output values
    b = getVectorizedRowBatch3DecimalCols();
    in = (DecimalColumnVector) b.cols[0];
    expr = new DecimalColDivideDecimalScalar(0, HiveDecimal.create("0"), 2);
    expr.evaluate(b);
    r = (DecimalColumnVector) b.cols[2];
    assertFalse(r.noNulls);
    assertTrue(r.isNull[0]);
    assertTrue(r.isRepeating);
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) TestVectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) DecimalColDivideDecimalScalar(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DecimalColDivideDecimalScalar) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Test(org.junit.Test)

Aggregations

DecimalColDivideDecimalScalar (org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DecimalColDivideDecimalScalar)2 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)1 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)1 DecimalColumnVector (org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector)1 TestVectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch)1 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)1 ConvertDecimal64ToDecimal (org.apache.hadoop.hive.ql.exec.vector.expressions.ConvertDecimal64ToDecimal)1 VectorCoalesce (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorCoalesce)1 VectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)1 VectorUDFAdaptor (org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor)1 VectorUDFArgDesc (org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFArgDesc)1 Test (org.junit.Test)1