Search in sources :

Example 36 with DecimalColumnVector

use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.

the class VectorHashKeyWrapperBatch method evaluateBatchGroupingSets.

public void evaluateBatchGroupingSets(VectorizedRowBatch batch, boolean[] groupingSetsOverrideIsNulls) throws HiveException {
    for (int i = 0; i < batch.size; ++i) {
        vectorHashKeyWrappers[i].clearIsNull();
    }
    int keyIndex;
    int columnIndex;
    for (int i = 0; i < longIndices.length; ++i) {
        keyIndex = longIndices[i];
        if (groupingSetsOverrideIsNulls[keyIndex]) {
            final int batchSize = batch.size;
            for (int r = 0; r < batchSize; ++r) {
                vectorHashKeyWrappers[r].assignNullLong(keyIndex, i);
            }
            continue;
        }
        columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
        LongColumnVector columnVector = (LongColumnVector) batch.cols[columnIndex];
        evaluateLongColumnVector(batch, columnVector, keyIndex, i);
    }
    for (int i = 0; i < doubleIndices.length; ++i) {
        keyIndex = doubleIndices[i];
        if (groupingSetsOverrideIsNulls[keyIndex]) {
            final int batchSize = batch.size;
            for (int r = 0; r < batchSize; ++r) {
                vectorHashKeyWrappers[r].assignNullDouble(keyIndex, i);
            }
            continue;
        }
        columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
        DoubleColumnVector columnVector = (DoubleColumnVector) batch.cols[columnIndex];
        evaluateDoubleColumnVector(batch, columnVector, keyIndex, i);
    }
    for (int i = 0; i < stringIndices.length; ++i) {
        keyIndex = stringIndices[i];
        if (groupingSetsOverrideIsNulls[keyIndex]) {
            final int batchSize = batch.size;
            for (int r = 0; r < batchSize; ++r) {
                vectorHashKeyWrappers[r].assignNullString(keyIndex, i);
            }
            continue;
        }
        columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
        BytesColumnVector columnVector = (BytesColumnVector) batch.cols[columnIndex];
        evaluateStringColumnVector(batch, columnVector, keyIndex, i);
    }
    for (int i = 0; i < decimalIndices.length; ++i) {
        keyIndex = decimalIndices[i];
        if (groupingSetsOverrideIsNulls[keyIndex]) {
            final int batchSize = batch.size;
            for (int r = 0; r < batchSize; ++r) {
                vectorHashKeyWrappers[r].assignNullDecimal(keyIndex, i);
            }
            continue;
        }
        columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
        DecimalColumnVector columnVector = (DecimalColumnVector) batch.cols[columnIndex];
        evaluateDecimalColumnVector(batch, columnVector, keyIndex, i);
    }
    for (int i = 0; i < timestampIndices.length; ++i) {
        keyIndex = timestampIndices[i];
        if (groupingSetsOverrideIsNulls[keyIndex]) {
            final int batchSize = batch.size;
            for (int r = 0; r < batchSize; ++r) {
                vectorHashKeyWrappers[r].assignNullTimestamp(keyIndex, i);
            }
            continue;
        }
        columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
        TimestampColumnVector columnVector = (TimestampColumnVector) batch.cols[columnIndex];
        evaluateTimestampColumnVector(batch, columnVector, keyIndex, i);
    }
    for (int i = 0; i < intervalDayTimeIndices.length; ++i) {
        keyIndex = intervalDayTimeIndices[i];
        if (groupingSetsOverrideIsNulls[keyIndex]) {
            final int batchSize = batch.size;
            for (int r = 0; r < batchSize; ++r) {
                vectorHashKeyWrappers[r].assignNullIntervalDayTime(keyIndex, i);
            }
            continue;
        }
        columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
        IntervalDayTimeColumnVector columnVector = (IntervalDayTimeColumnVector) batch.cols[columnIndex];
        evaluateIntervalDayTimeColumnVector(batch, columnVector, keyIndex, i);
    }
    for (int i = 0; i < batch.size; ++i) {
        vectorHashKeyWrappers[i].setHashKey();
    }
}
Also used : TimestampColumnVector(org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) DoubleColumnVector(org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector) IntervalDayTimeColumnVector(org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector) BytesColumnVector(org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector)

Example 37 with DecimalColumnVector

use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.

the class VectorHashKeyWrapperBatch method evaluateBatch.

/**
 * Processes a batch:
 * <ul>
 * <li>Evaluates each key vector expression.</li>
 * <li>Copies out each key's primitive values into the key wrappers</li>
 * <li>computes the hashcode of the key wrappers</li>
 * </ul>
 * @param batch
 * @throws HiveException
 */
public void evaluateBatch(VectorizedRowBatch batch) throws HiveException {
    if (keyCount == 0) {
        // all keywrappers must be EmptyVectorHashKeyWrapper
        return;
    }
    for (int i = 0; i < batch.size; ++i) {
        vectorHashKeyWrappers[i].clearIsNull();
    }
    int keyIndex;
    int columnIndex;
    for (int i = 0; i < longIndices.length; ++i) {
        keyIndex = longIndices[i];
        columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
        LongColumnVector columnVector = (LongColumnVector) batch.cols[columnIndex];
        evaluateLongColumnVector(batch, columnVector, keyIndex, i);
    }
    for (int i = 0; i < doubleIndices.length; ++i) {
        keyIndex = doubleIndices[i];
        columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
        DoubleColumnVector columnVector = (DoubleColumnVector) batch.cols[columnIndex];
        evaluateDoubleColumnVector(batch, columnVector, keyIndex, i);
    }
    for (int i = 0; i < stringIndices.length; ++i) {
        keyIndex = stringIndices[i];
        columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
        BytesColumnVector columnVector = (BytesColumnVector) batch.cols[columnIndex];
        evaluateStringColumnVector(batch, columnVector, keyIndex, i);
    }
    for (int i = 0; i < decimalIndices.length; ++i) {
        keyIndex = decimalIndices[i];
        columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
        DecimalColumnVector columnVector = (DecimalColumnVector) batch.cols[columnIndex];
        evaluateDecimalColumnVector(batch, columnVector, keyIndex, i);
    }
    for (int i = 0; i < timestampIndices.length; ++i) {
        keyIndex = timestampIndices[i];
        columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
        TimestampColumnVector columnVector = (TimestampColumnVector) batch.cols[columnIndex];
        evaluateTimestampColumnVector(batch, columnVector, keyIndex, i);
    }
    for (int i = 0; i < intervalDayTimeIndices.length; ++i) {
        keyIndex = intervalDayTimeIndices[i];
        columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
        IntervalDayTimeColumnVector columnVector = (IntervalDayTimeColumnVector) batch.cols[columnIndex];
        evaluateIntervalDayTimeColumnVector(batch, columnVector, keyIndex, i);
    }
    for (int i = 0; i < batch.size; ++i) {
        vectorHashKeyWrappers[i].setHashKey();
    }
}
Also used : TimestampColumnVector(org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) DoubleColumnVector(org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector) IntervalDayTimeColumnVector(org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector) BytesColumnVector(org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector)

Example 38 with DecimalColumnVector

use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.

the class BatchToRowReader method nextDecimal.

public static HiveDecimalWritable nextDecimal(ColumnVector vector, int row, Object previous) {
    if (vector.isRepeating) {
        row = 0;
    }
    if (vector.noNulls || !vector.isNull[row]) {
        HiveDecimalWritable result;
        if (previous == null || previous.getClass() != HiveDecimalWritable.class) {
            result = new HiveDecimalWritable();
        } else {
            result = (HiveDecimalWritable) previous;
        }
        if (vector instanceof Decimal64ColumnVector) {
            long value = ((Decimal64ColumnVector) vector).vector[row];
            result.deserialize64(value, ((Decimal64ColumnVector) vector).scale);
        } else {
            result.set(((DecimalColumnVector) vector).vector[row]);
        }
        return result;
    } else {
        return null;
    }
}
Also used : Decimal64ColumnVector(org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)

Example 39 with DecimalColumnVector

use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.

the class TestVectorExpressionWriters method testWriterDecimal.

private void testWriterDecimal(DecimalTypeInfo type) throws HiveException {
    DecimalColumnVector dcv = VectorizedRowGroupGenUtil.generateDecimalColumnVector(type, true, false, this.vectorSize, new Random(10));
    dcv.isNull[2] = true;
    VectorExpressionWriter vew = getWriter(type);
    for (int i = 0; i < vectorSize; i++) {
        Writable w = (Writable) vew.writeValue(dcv, i);
        if (w != null) {
            Writable expected = getWritableValue(type, dcv.vector[i].getHiveDecimal());
            Assert.assertEquals(expected, w);
        } else {
            Assert.assertTrue(dcv.isNull[i]);
        }
    }
}
Also used : DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) Random(java.util.Random) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable)

Example 40 with DecimalColumnVector

use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.

the class TestVectorFilterExpressions method getVectorizedRowBatch2DecimalCol.

private VectorizedRowBatch getVectorizedRowBatch2DecimalCol() {
    VectorizedRowBatch b = new VectorizedRowBatch(2);
    DecimalColumnVector v0, v1;
    b.cols[0] = v0 = new DecimalColumnVector(18, 2);
    v0.vector[0].set(HiveDecimal.create("1.20"));
    v0.vector[1].set(HiveDecimal.create("-3.30"));
    v0.vector[2].set(HiveDecimal.create("0"));
    b.cols[1] = v1 = new DecimalColumnVector(18, 2);
    v1.vector[0].set(HiveDecimal.create("-1.00"));
    v1.vector[1].set(HiveDecimal.create("-3.30"));
    v1.vector[2].set(HiveDecimal.create("10.00"));
    b.size = 3;
    return b;
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector)

Aggregations

DecimalColumnVector (org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector)108 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)38 LongColumnVector (org.apache.hadoop.hive.ql.exec.vector.LongColumnVector)28 Test (org.junit.Test)28 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)27 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)25 DoubleColumnVector (org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector)25 BytesColumnVector (org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector)23 TimestampColumnVector (org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector)18 TestVectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch)16 ColumnVector (org.apache.hadoop.hive.ql.exec.vector.ColumnVector)14 IntervalDayTimeColumnVector (org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector)7 Timestamp (java.sql.Timestamp)5 Random (java.util.Random)4 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)4 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)3 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)3 IOException (java.io.IOException)2 DateColumnVector (org.apache.hadoop.hive.ql.exec.vector.DateColumnVector)2 Decimal64ColumnVector (org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector)2