use of org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector in project hive by apache.
the class TestVectorTypeCasts method testCastDecimalToTimestamp.
@Test
public void testCastDecimalToTimestamp() throws HiveException {
double[] doubleValues = new double[500];
VectorizedRowBatch b = getBatchDecimalTimestamp(doubleValues);
VectorExpression expr = new CastDecimalToTimestamp(0, 1);
expr.evaluate(b);
TimestampColumnVector r = (TimestampColumnVector) b.cols[1];
for (int i = 0; i < doubleValues.length; i++) {
Timestamp timestamp = r.asScratchTimestamp(i);
double asDouble = TimestampUtils.getDouble(timestamp);
double expectedDouble = doubleValues[i];
if (expectedDouble != asDouble) {
assertTrue(false);
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector in project hive by apache.
the class TestVectorUDFDatetimeLegacyHybridCalendar method getVectorizedRowBatch.
/**
* Input array is used to fill the entire specified size of the vector row batch.
*/
private VectorizedRowBatch getVectorizedRowBatch(java.sql.Timestamp[] inputs, int size) {
VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
TimestampColumnVector inputCol = new TimestampColumnVector(size);
for (int i = 0; i < size; i++) {
inputCol.set(i, inputs[i % inputs.length]);
}
batch.cols[0] = inputCol;
batch.cols[1] = new TimestampColumnVector(size);
batch.size = size;
return batch;
}
use of org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector in project hive by apache.
the class VectorUDFDateAddColScalar method evaluateTimestamp.
protected long evaluateTimestamp(ColumnVector columnVector, int index) {
TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
// Convert to date value (in days)
long days = DateWritableV2.millisToDays(tcv.getTime(index));
if (isPositive) {
days += numDays;
} else {
days -= numDays;
}
return days;
}
use of org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector in project hive by apache.
the class VectorUDFDateDiffColCol method toDateArray.
private LongColumnVector toDateArray(VectorizedRowBatch batch, TypeInfo typeInfo, ColumnVector inputColVector, LongColumnVector dateVector) {
PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
int size = batch.size;
if (primitiveCategory == PrimitiveCategory.DATE) {
return (LongColumnVector) inputColVector;
}
if (size > dateVector.vector.length) {
if (dateVector1 == dateVector) {
dateVector1 = new LongColumnVector(size * 2);
dateVector = dateVector1;
} else {
dateVector2 = new LongColumnVector(size * 2);
dateVector = dateVector2;
}
}
switch(primitiveCategory) {
case TIMESTAMP:
TimestampColumnVector tcv = (TimestampColumnVector) inputColVector;
copySelected(tcv, batch.selectedInUse, batch.selected, batch.size, dateVector);
return dateVector;
case STRING:
case CHAR:
case VARCHAR:
BytesColumnVector bcv = (BytesColumnVector) inputColVector;
copySelected(bcv, batch.selectedInUse, batch.selected, batch.size, dateVector);
return dateVector;
default:
throw new Error("Unsupported input type " + primitiveCategory.name());
}
}
use of org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector in project hive by apache.
the class VectorUDFDateDiffScalarCol method evaluateTimestamp.
protected int evaluateTimestamp(ColumnVector columnVector, int index) {
TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
date.setTime(tcv.getTime(index));
return baseDate - DateWritableV2.dateToDays(date);
}
Aggregations