use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.
the class VectorHashKeyWrapperBatch method evaluateBatchGroupingSets.
public void evaluateBatchGroupingSets(VectorizedRowBatch batch, boolean[] groupingSetsOverrideIsNulls) throws HiveException {
for (int i = 0; i < batch.size; ++i) {
vectorHashKeyWrappers[i].clearIsNull();
}
int keyIndex;
int columnIndex;
for (int i = 0; i < longIndices.length; ++i) {
keyIndex = longIndices[i];
if (groupingSetsOverrideIsNulls[keyIndex]) {
final int batchSize = batch.size;
for (int r = 0; r < batchSize; ++r) {
vectorHashKeyWrappers[r].assignNullLong(keyIndex, i);
}
continue;
}
columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
LongColumnVector columnVector = (LongColumnVector) batch.cols[columnIndex];
evaluateLongColumnVector(batch, columnVector, keyIndex, i);
}
for (int i = 0; i < doubleIndices.length; ++i) {
keyIndex = doubleIndices[i];
if (groupingSetsOverrideIsNulls[keyIndex]) {
final int batchSize = batch.size;
for (int r = 0; r < batchSize; ++r) {
vectorHashKeyWrappers[r].assignNullDouble(keyIndex, i);
}
continue;
}
columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
DoubleColumnVector columnVector = (DoubleColumnVector) batch.cols[columnIndex];
evaluateDoubleColumnVector(batch, columnVector, keyIndex, i);
}
for (int i = 0; i < stringIndices.length; ++i) {
keyIndex = stringIndices[i];
if (groupingSetsOverrideIsNulls[keyIndex]) {
final int batchSize = batch.size;
for (int r = 0; r < batchSize; ++r) {
vectorHashKeyWrappers[r].assignNullString(keyIndex, i);
}
continue;
}
columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
BytesColumnVector columnVector = (BytesColumnVector) batch.cols[columnIndex];
evaluateStringColumnVector(batch, columnVector, keyIndex, i);
}
for (int i = 0; i < decimalIndices.length; ++i) {
keyIndex = decimalIndices[i];
if (groupingSetsOverrideIsNulls[keyIndex]) {
final int batchSize = batch.size;
for (int r = 0; r < batchSize; ++r) {
vectorHashKeyWrappers[r].assignNullDecimal(keyIndex, i);
}
continue;
}
columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
DecimalColumnVector columnVector = (DecimalColumnVector) batch.cols[columnIndex];
evaluateDecimalColumnVector(batch, columnVector, keyIndex, i);
}
for (int i = 0; i < timestampIndices.length; ++i) {
keyIndex = timestampIndices[i];
if (groupingSetsOverrideIsNulls[keyIndex]) {
final int batchSize = batch.size;
for (int r = 0; r < batchSize; ++r) {
vectorHashKeyWrappers[r].assignNullTimestamp(keyIndex, i);
}
continue;
}
columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
TimestampColumnVector columnVector = (TimestampColumnVector) batch.cols[columnIndex];
evaluateTimestampColumnVector(batch, columnVector, keyIndex, i);
}
for (int i = 0; i < intervalDayTimeIndices.length; ++i) {
keyIndex = intervalDayTimeIndices[i];
if (groupingSetsOverrideIsNulls[keyIndex]) {
final int batchSize = batch.size;
for (int r = 0; r < batchSize; ++r) {
vectorHashKeyWrappers[r].assignNullIntervalDayTime(keyIndex, i);
}
continue;
}
columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
IntervalDayTimeColumnVector columnVector = (IntervalDayTimeColumnVector) batch.cols[columnIndex];
evaluateIntervalDayTimeColumnVector(batch, columnVector, keyIndex, i);
}
for (int i = 0; i < batch.size; ++i) {
vectorHashKeyWrappers[i].setHashKey();
}
}
use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.
the class VectorHashKeyWrapperBatch method evaluateBatch.
/**
* Processes a batch:
* <ul>
* <li>Evaluates each key vector expression.</li>
* <li>Copies out each key's primitive values into the key wrappers</li>
* <li>computes the hashcode of the key wrappers</li>
* </ul>
* @param batch
* @throws HiveException
*/
public void evaluateBatch(VectorizedRowBatch batch) throws HiveException {
if (keyCount == 0) {
// all keywrappers must be EmptyVectorHashKeyWrapper
return;
}
for (int i = 0; i < batch.size; ++i) {
vectorHashKeyWrappers[i].clearIsNull();
}
int keyIndex;
int columnIndex;
for (int i = 0; i < longIndices.length; ++i) {
keyIndex = longIndices[i];
columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
LongColumnVector columnVector = (LongColumnVector) batch.cols[columnIndex];
evaluateLongColumnVector(batch, columnVector, keyIndex, i);
}
for (int i = 0; i < doubleIndices.length; ++i) {
keyIndex = doubleIndices[i];
columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
DoubleColumnVector columnVector = (DoubleColumnVector) batch.cols[columnIndex];
evaluateDoubleColumnVector(batch, columnVector, keyIndex, i);
}
for (int i = 0; i < stringIndices.length; ++i) {
keyIndex = stringIndices[i];
columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
BytesColumnVector columnVector = (BytesColumnVector) batch.cols[columnIndex];
evaluateStringColumnVector(batch, columnVector, keyIndex, i);
}
for (int i = 0; i < decimalIndices.length; ++i) {
keyIndex = decimalIndices[i];
columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
DecimalColumnVector columnVector = (DecimalColumnVector) batch.cols[columnIndex];
evaluateDecimalColumnVector(batch, columnVector, keyIndex, i);
}
for (int i = 0; i < timestampIndices.length; ++i) {
keyIndex = timestampIndices[i];
columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
TimestampColumnVector columnVector = (TimestampColumnVector) batch.cols[columnIndex];
evaluateTimestampColumnVector(batch, columnVector, keyIndex, i);
}
for (int i = 0; i < intervalDayTimeIndices.length; ++i) {
keyIndex = intervalDayTimeIndices[i];
columnIndex = keyExpressions[keyIndex].getOutputColumnNum();
IntervalDayTimeColumnVector columnVector = (IntervalDayTimeColumnVector) batch.cols[columnIndex];
evaluateIntervalDayTimeColumnVector(batch, columnVector, keyIndex, i);
}
for (int i = 0; i < batch.size; ++i) {
vectorHashKeyWrappers[i].setHashKey();
}
}
use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.
the class BatchToRowReader method nextDecimal.
public static HiveDecimalWritable nextDecimal(ColumnVector vector, int row, Object previous) {
if (vector.isRepeating) {
row = 0;
}
if (vector.noNulls || !vector.isNull[row]) {
HiveDecimalWritable result;
if (previous == null || previous.getClass() != HiveDecimalWritable.class) {
result = new HiveDecimalWritable();
} else {
result = (HiveDecimalWritable) previous;
}
if (vector instanceof Decimal64ColumnVector) {
long value = ((Decimal64ColumnVector) vector).vector[row];
result.deserialize64(value, ((Decimal64ColumnVector) vector).scale);
} else {
result.set(((DecimalColumnVector) vector).vector[row]);
}
return result;
} else {
return null;
}
}
use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.
the class TestVectorExpressionWriters method testWriterDecimal.
private void testWriterDecimal(DecimalTypeInfo type) throws HiveException {
DecimalColumnVector dcv = VectorizedRowGroupGenUtil.generateDecimalColumnVector(type, true, false, this.vectorSize, new Random(10));
dcv.isNull[2] = true;
VectorExpressionWriter vew = getWriter(type);
for (int i = 0; i < vectorSize; i++) {
Writable w = (Writable) vew.writeValue(dcv, i);
if (w != null) {
Writable expected = getWritableValue(type, dcv.vector[i].getHiveDecimal());
Assert.assertEquals(expected, w);
} else {
Assert.assertTrue(dcv.isNull[i]);
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.
the class TestVectorFilterExpressions method getVectorizedRowBatch2DecimalCol.
private VectorizedRowBatch getVectorizedRowBatch2DecimalCol() {
VectorizedRowBatch b = new VectorizedRowBatch(2);
DecimalColumnVector v0, v1;
b.cols[0] = v0 = new DecimalColumnVector(18, 2);
v0.vector[0].set(HiveDecimal.create("1.20"));
v0.vector[1].set(HiveDecimal.create("-3.30"));
v0.vector[2].set(HiveDecimal.create("0"));
b.cols[1] = v1 = new DecimalColumnVector(18, 2);
v1.vector[0].set(HiveDecimal.create("-1.00"));
v1.vector[1].set(HiveDecimal.create("-3.30"));
v1.vector[2].set(HiveDecimal.create("10.00"));
b.size = 3;
return b;
}
Aggregations