use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.
the class FuncTimestampToDecimal method evaluate.
@Override
public void evaluate(VectorizedRowBatch batch) {
if (childExpressions != null) {
super.evaluateChildren(batch);
}
TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[inputColumn];
int[] sel = batch.selected;
int n = batch.size;
DecimalColumnVector outputColVector = (DecimalColumnVector) batch.cols[outputColumnNum];
boolean[] inputIsNull = inputColVector.isNull;
boolean[] outputIsNull = outputColVector.isNull;
if (n == 0) {
// Nothing to do
return;
}
// We do not need to do a column reset since we are carefully changing the output.
outputColVector.isRepeating = false;
if (inputColVector.isRepeating) {
if (inputColVector.noNulls || !inputIsNull[0]) {
// Set isNull before call in case it changes it mind.
outputIsNull[0] = false;
func(outputColVector, inputColVector, 0);
} else {
outputIsNull[0] = true;
outputColVector.noNulls = false;
}
outputColVector.isRepeating = true;
return;
}
if (inputColVector.noNulls) {
if (batch.selectedInUse) {
if (!outputColVector.noNulls) {
for (int j = 0; j != n; j++) {
final int i = sel[j];
// Set isNull before call in case it changes it mind.
outputIsNull[i] = false;
func(outputColVector, inputColVector, i);
}
} else {
for (int j = 0; j != n; j++) {
final int i = sel[j];
func(outputColVector, inputColVector, i);
}
}
} else {
if (!outputColVector.noNulls) {
// Assume it is almost always a performance win to fill all of isNull so we can
// safely reset noNulls.
Arrays.fill(outputIsNull, false);
outputColVector.noNulls = true;
}
for (int i = 0; i != n; i++) {
func(outputColVector, inputColVector, i);
}
}
} else /* there are nulls in the inputColVector */
{
// Carefully handle NULLs...
outputColVector.noNulls = false;
if (batch.selectedInUse) {
for (int j = 0; j != n; j++) {
int i = sel[j];
outputColVector.isNull[i] = inputColVector.isNull[i];
if (!inputColVector.isNull[i]) {
func(outputColVector, inputColVector, i);
}
}
} else {
System.arraycopy(inputColVector.isNull, 0, outputColVector.isNull, 0, n);
for (int i = 0; i != n; i++) {
if (!inputColVector.isNull[i]) {
func(outputColVector, inputColVector, i);
}
}
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.
the class CastDecimalToDecimal method evaluate.
/**
* Cast decimal(p1, s1) to decimal(p2, s2).
*
* The precision and scale are recorded in the input and output vectors,
* respectively.
*/
@Override
public void evaluate(VectorizedRowBatch batch) {
if (childExpressions != null) {
super.evaluateChildren(batch);
}
DecimalColumnVector inputColVector = (DecimalColumnVector) batch.cols[inputColumn];
int[] sel = batch.selected;
int n = batch.size;
DecimalColumnVector outputColVector = (DecimalColumnVector) batch.cols[outputColumnNum];
boolean[] outputIsNull = outputColVector.isNull;
if (n == 0) {
// Nothing to do
return;
}
// We do not need to do a column reset since we are carefully changing the output.
outputColVector.isRepeating = false;
if (inputColVector.isRepeating) {
outputColVector.isRepeating = true;
if (inputColVector.noNulls || !inputColVector.isNull[0]) {
// Set isNull before call in case it changes it mind.
outputColVector.isNull[0] = false;
convert(outputColVector, inputColVector, 0);
} else {
outputColVector.isNull[0] = true;
outputColVector.noNulls = false;
}
return;
}
if (inputColVector.noNulls) {
if (batch.selectedInUse) {
if (!outputColVector.noNulls) {
for (int j = 0; j != n; j++) {
final int i = sel[j];
// Set isNull before call in case it changes it mind.
outputIsNull[i] = false;
convert(outputColVector, inputColVector, i);
}
} else {
for (int j = 0; j != n; j++) {
final int i = sel[j];
convert(outputColVector, inputColVector, i);
}
}
} else {
if (!outputColVector.noNulls) {
// Assume it is almost always a performance win to fill all of isNull so we can
// safely reset noNulls.
Arrays.fill(outputIsNull, false);
outputColVector.noNulls = true;
}
for (int i = 0; i != n; i++) {
convert(outputColVector, inputColVector, i);
}
}
} else /* there are NULLs in the inputColVector */
{
if (batch.selectedInUse) {
for (int j = 0; j != n; j++) {
int i = sel[j];
if (!inputColVector.isNull[i]) {
// Set isNull before call in case it changes it mind.
outputColVector.isNull[i] = false;
convert(outputColVector, inputColVector, i);
} else {
outputColVector.isNull[i] = true;
outputColVector.noNulls = false;
}
}
} else {
for (int i = 0; i != n; i++) {
if (!inputColVector.isNull[i]) {
// Set isNull before call in case it changes it mind.
outputColVector.isNull[i] = false;
convert(outputColVector, inputColVector, i);
} else {
outputColVector.isNull[i] = true;
outputColVector.noNulls = false;
}
}
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.
the class CastStringToDecimal method evaluate.
@Override
public void evaluate(VectorizedRowBatch batch) {
if (childExpressions != null) {
super.evaluateChildren(batch);
}
BytesColumnVector inputColVector = (BytesColumnVector) batch.cols[inputColumn];
int[] sel = batch.selected;
int n = batch.size;
DecimalColumnVector outputColVector = (DecimalColumnVector) batch.cols[outputColumnNum];
boolean[] inputIsNull = inputColVector.isNull;
boolean[] outputIsNull = outputColVector.isNull;
if (n == 0) {
// Nothing to do
return;
}
// We do not need to do a column reset since we are carefully changing the output.
outputColVector.isRepeating = false;
if (inputColVector.isRepeating) {
if (inputColVector.noNulls || !inputIsNull[0]) {
// Set isNull before call in case it changes it mind.
outputIsNull[0] = false;
func(outputColVector, inputColVector, 0);
} else {
outputIsNull[0] = true;
outputColVector.noNulls = false;
}
outputColVector.isRepeating = true;
return;
}
if (inputColVector.noNulls) {
if (batch.selectedInUse) {
if (!outputColVector.noNulls) {
for (int j = 0; j != n; j++) {
final int i = sel[j];
// Set isNull before call in case it changes it mind.
outputIsNull[i] = false;
func(outputColVector, inputColVector, i);
}
} else {
for (int j = 0; j != n; j++) {
final int i = sel[j];
func(outputColVector, inputColVector, i);
}
}
} else {
if (!outputColVector.noNulls) {
// Assume it is almost always a performance win to fill all of isNull so we can
// safely reset noNulls.
Arrays.fill(outputIsNull, false);
outputColVector.noNulls = true;
}
for (int i = 0; i != n; i++) {
func(outputColVector, inputColVector, i);
}
}
} else /* there are NULLs in the inputColVector */
{
if (batch.selectedInUse) {
for (int j = 0; j != n; j++) {
int i = sel[j];
if (!inputColVector.isNull[i]) {
// Set isNull before call in case it changes it mind.
outputColVector.isNull[i] = false;
func(outputColVector, inputColVector, i);
} else {
outputColVector.isNull[i] = true;
outputColVector.noNulls = false;
}
}
} else {
System.arraycopy(inputColVector.isNull, 0, outputColVector.isNull, 0, n);
for (int i = 0; i != n; i++) {
if (!inputColVector.isNull[i]) {
// Set isNull before call in case it changes it mind.
outputColVector.isNull[i] = false;
func(outputColVector, inputColVector, i);
} else {
outputColVector.isNull[i] = true;
outputColVector.noNulls = false;
}
}
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.
the class DecimalColumnInList method evaluate.
@Override
public void evaluate(VectorizedRowBatch batch) {
if (childExpressions != null) {
super.evaluateChildren(batch);
}
DecimalColumnVector inputColumnVector = (DecimalColumnVector) batch.cols[inputColumn];
LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumnNum];
int[] sel = batch.selected;
boolean[] inputIsNull = inputColumnVector.isNull;
boolean[] outputIsNull = outputColVector.isNull;
int n = batch.size;
HiveDecimalWritable[] vector = inputColumnVector.vector;
long[] outputVector = outputColVector.vector;
// return immediately if batch is empty
if (n == 0) {
return;
}
// We do not need to do a column reset since we are carefully changing the output.
outputColVector.isRepeating = false;
if (inputColumnVector.isRepeating) {
if (inputColumnVector.noNulls || !inputIsNull[0]) {
outputIsNull[0] = false;
outputVector[0] = inSet.contains(vector[0]) ? 1 : 0;
} else {
outputIsNull[0] = true;
outputColVector.noNulls = false;
}
outputColVector.isRepeating = true;
return;
}
if (inputColumnVector.noNulls) {
if (batch.selectedInUse) {
if (!outputColVector.noNulls) {
for (int j = 0; j != n; j++) {
final int i = sel[j];
// Set isNull before call in case it changes it mind.
outputIsNull[i] = false;
outputVector[i] = inSet.contains(vector[i]) ? 1 : 0;
}
} else {
for (int j = 0; j != n; j++) {
final int i = sel[j];
outputVector[i] = inSet.contains(vector[i]) ? 1 : 0;
}
}
} else {
if (!outputColVector.noNulls) {
// Assume it is almost always a performance win to fill all of isNull so we can
// safely reset noNulls.
Arrays.fill(outputIsNull, false);
outputColVector.noNulls = true;
}
for (int i = 0; i != n; i++) {
outputVector[i] = inSet.contains(vector[i]) ? 1 : 0;
}
}
} else /* there are NULLs in the inputColVector */
{
// Carefully handle NULLs...
outputColVector.noNulls = false;
if (batch.selectedInUse) {
for (int j = 0; j != n; j++) {
int i = sel[j];
outputIsNull[i] = inputIsNull[i];
if (!inputIsNull[i]) {
outputVector[i] = inSet.contains(vector[i]) ? 1 : 0;
}
}
} else {
System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
for (int i = 0; i != n; i++) {
if (!inputIsNull[i]) {
outputVector[i] = inSet.contains(vector[i]) ? 1 : 0;
}
}
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector in project hive by apache.
the class DecimalToStringUnaryUDF method evaluate.
@Override
public void evaluate(VectorizedRowBatch batch) {
if (childExpressions != null) {
super.evaluateChildren(batch);
}
DecimalColumnVector inputColVector = (DecimalColumnVector) batch.cols[inputColumn];
int[] sel = batch.selected;
int n = batch.size;
BytesColumnVector outputColVector = (BytesColumnVector) batch.cols[outputColumnNum];
outputColVector.initBuffer();
boolean[] inputIsNull = inputColVector.isNull;
boolean[] outputIsNull = outputColVector.isNull;
if (n == 0) {
// Nothing to do
return;
}
// We do not need to do a column reset since we are carefully changing the output.
outputColVector.isRepeating = false;
if (inputColVector.isRepeating) {
if (inputColVector.noNulls || !inputIsNull[0]) {
// Set isNull before call in case it changes it mind.
outputIsNull[0] = false;
func(outputColVector, inputColVector, 0);
} else {
outputIsNull[0] = true;
outputColVector.noNulls = false;
}
outputColVector.isRepeating = true;
return;
}
if (inputColVector.noNulls) {
if (batch.selectedInUse) {
if (!outputColVector.noNulls) {
for (int j = 0; j != n; j++) {
final int i = sel[j];
// Set isNull before call in case it changes it mind.
outputIsNull[i] = false;
func(outputColVector, inputColVector, i);
}
} else {
for (int j = 0; j != n; j++) {
final int i = sel[j];
func(outputColVector, inputColVector, i);
}
}
} else {
if (!outputColVector.noNulls) {
// Assume it is almost always a performance win to fill all of isNull so we can
// safely reset noNulls.
Arrays.fill(outputIsNull, false);
outputColVector.noNulls = true;
}
for (int i = 0; i != n; i++) {
func(outputColVector, inputColVector, i);
}
}
} else /* there are NULLs in the inputColVector */
{
// Carefully handle NULLs...
outputColVector.noNulls = false;
if (batch.selectedInUse) {
for (int j = 0; j != n; j++) {
int i = sel[j];
outputColVector.isNull[i] = inputColVector.isNull[i];
if (!inputColVector.isNull[i]) {
func(outputColVector, inputColVector, i);
}
}
} else {
System.arraycopy(inputColVector.isNull, 0, outputColVector.isNull, 0, n);
for (int i = 0; i != n; i++) {
if (!inputColVector.isNull[i]) {
func(outputColVector, inputColVector, i);
}
}
}
}
}
Aggregations