use of org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector in project hive by apache.
the class TestVectorExpressionWriters method testSetterTimestamp.
private void testSetterTimestamp(TypeInfo type) throws HiveException {
Timestamp[] timestampValues = new Timestamp[vectorSize];
TimestampColumnVector tcv = VectorizedRowGroupGenUtil.generateTimestampColumnVector(true, false, vectorSize, new Random(10), timestampValues);
tcv.isNull[3] = true;
Object[] values = new Object[this.vectorSize];
VectorExpressionWriter vew = getWriter(type);
for (int i = 0; i < vectorSize; i++) {
// setValue() should be able to handle null input
values[i] = null;
values[i] = vew.setValue(values[i], tcv, i);
if (values[i] != null) {
Writable expected = getWritableValue(type, timestampValues[i]);
TimestampWritable t1 = (TimestampWritable) expected;
TimestampWritable t2 = (TimestampWritable) values[i];
Assert.assertTrue(t1.equals(t2));
} else {
Assert.assertTrue(tcv.isNull[i]);
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector in project hive by apache.
the class TestVectorFilterExpressions method testFilterTimestampBetween.
@Test
public void testFilterTimestampBetween() {
VectorizedRowBatch vrb = new VectorizedRowBatch(1);
vrb.cols[0] = new TimestampColumnVector();
TimestampColumnVector lcv0 = (TimestampColumnVector) vrb.cols[0];
// the epoch
Timestamp startTS = new Timestamp(0);
Timestamp endTS = Timestamp.valueOf("2013-11-05 00:00:00.000000000");
Timestamp ts0 = Timestamp.valueOf("1963-11-06 00:00:00.000");
lcv0.set(0, ts0);
Timestamp ts1 = Timestamp.valueOf("1983-11-06 00:00:00.000");
lcv0.set(1, ts1);
Timestamp ts2 = Timestamp.valueOf("2099-11-06 00:00:00.000");
lcv0.set(2, ts2);
vrb.size = 3;
VectorExpression expr1 = new FilterTimestampColumnBetween(0, startTS, endTS);
expr1.evaluate(vrb);
assertEquals(1, vrb.size);
assertEquals(true, vrb.selectedInUse);
assertEquals(1, vrb.selected[0]);
}
use of org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector in project hive by apache.
the class VectorUDFDateDiffColCol method toDateArray.
private LongColumnVector toDateArray(VectorizedRowBatch batch, Type colType, ColumnVector inputColVector, LongColumnVector dateVector) {
int size = batch.size;
if (colType == Type.DATE) {
return (LongColumnVector) inputColVector;
}
if (size > dateVector.vector.length) {
if (dateVector1 == dateVector) {
dateVector1 = new LongColumnVector(size * 2);
dateVector = dateVector1;
} else {
dateVector2 = new LongColumnVector(size * 2);
dateVector = dateVector2;
}
}
switch(colType) {
case TIMESTAMP:
TimestampColumnVector tcv = (TimestampColumnVector) inputColVector;
copySelected(tcv, batch.selectedInUse, batch.selected, batch.size, dateVector);
return dateVector;
case STRING:
case CHAR:
case VARCHAR:
BytesColumnVector bcv = (BytesColumnVector) inputColVector;
copySelected(bcv, batch.selectedInUse, batch.selected, batch.size, dateVector);
return dateVector;
default:
throw new Error("Unsupported input type " + colType.name());
}
}
use of org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector in project hive by apache.
the class VectorUDFDateDiffColScalar method evaluateTimestamp.
protected int evaluateTimestamp(ColumnVector columnVector, int index) {
TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
date.setTime(tcv.getTime(index));
return DateWritable.dateToDays(date) - baseDate;
}
use of org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector in project hive by apache.
the class VectorUDAFVarPopTimestamp method aggregateInputSelection.
@Override
public void aggregateInputSelection(VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, VectorizedRowBatch batch) throws HiveException {
inputExpression.evaluate(batch);
TimestampColumnVector inputColVector = (TimestampColumnVector) batch.cols[this.inputExpression.getOutputColumn()];
int batchSize = batch.size;
if (batchSize == 0) {
return;
}
if (inputColVector.isRepeating) {
if (inputColVector.noNulls || !inputColVector.isNull[0]) {
iterateRepeatingNoNullsWithAggregationSelection(aggregationBufferSets, aggregateIndex, inputColVector.getDouble(0), batchSize);
}
} else if (!batch.selectedInUse && inputColVector.noNulls) {
iterateNoSelectionNoNullsWithAggregationSelection(aggregationBufferSets, aggregateIndex, inputColVector, batchSize);
} else if (!batch.selectedInUse) {
iterateNoSelectionHasNullsWithAggregationSelection(aggregationBufferSets, aggregateIndex, inputColVector, batchSize, inputColVector.isNull);
} else if (inputColVector.noNulls) {
iterateSelectionNoNullsWithAggregationSelection(aggregationBufferSets, aggregateIndex, inputColVector, batchSize, batch.selected);
} else {
iterateSelectionHasNullsWithAggregationSelection(aggregationBufferSets, aggregateIndex, inputColVector, batchSize, inputColVector.isNull, batch.selected);
}
}
Aggregations