use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class BatchToRowReader method nextTimestamp.
public static TimestampWritable nextTimestamp(ColumnVector vector, int row, Object previous) {
if (vector.isRepeating) {
row = 0;
}
if (vector.noNulls || !vector.isNull[row]) {
TimestampWritable result;
if (previous == null || previous.getClass() != TimestampWritable.class) {
result = new TimestampWritable();
} else {
result = (TimestampWritable) previous;
}
TimestampColumnVector tcv = (TimestampColumnVector) vector;
result.setInternal(tcv.time[row], tcv.nanos[row]);
return result;
} else {
return null;
}
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class GenericUDFCurrentTimestamp method copyToNewInstance.
@Override
public void copyToNewInstance(Object newInstance) throws UDFArgumentException {
super.copyToNewInstance(newInstance);
// Need to preserve currentTimestamp
GenericUDFCurrentTimestamp other = (GenericUDFCurrentTimestamp) newInstance;
if (this.currentTimestamp != null) {
other.currentTimestamp = new TimestampWritable(this.currentTimestamp);
}
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class GenericUDF method getTimestampValue.
protected Timestamp getTimestampValue(DeferredObject[] arguments, int i, Converter[] converters) throws HiveException {
Object obj;
if ((obj = arguments[i].get()) == null) {
return null;
}
Object writableValue = converters[i].convert(obj);
// if string can not be parsed converter will return null
if (writableValue == null) {
return null;
}
Timestamp ts = ((TimestampWritable) writableValue).getTimestamp();
return ts;
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestVectorExpressionWriters method testSetterTimestamp.
private void testSetterTimestamp(TypeInfo type) throws HiveException {
Timestamp[] timestampValues = new Timestamp[vectorSize];
TimestampColumnVector tcv = VectorizedRowGroupGenUtil.generateTimestampColumnVector(true, false, vectorSize, new Random(10), timestampValues);
tcv.isNull[3] = true;
Object[] values = new Object[this.vectorSize];
VectorExpressionWriter vew = getWriter(type);
for (int i = 0; i < vectorSize; i++) {
// setValue() should be able to handle null input
values[i] = null;
values[i] = vew.setValue(values[i], tcv, i);
if (values[i] != null) {
Writable expected = getWritableValue(type, timestampValues[i]);
TimestampWritable t1 = (TimestampWritable) expected;
TimestampWritable t2 = (TimestampWritable) values[i];
Assert.assertTrue(t1.equals(t2));
} else {
Assert.assertTrue(tcv.isNull[i]);
}
}
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestVectorDateExpressions method compareToUDFDayOfMonthDate.
private void compareToUDFDayOfMonthDate(long t, int y) {
UDFDayOfMonth udf = new UDFDayOfMonth();
TimestampWritable tsw = toTimestampWritable(t);
IntWritable res = udf.evaluate(tsw);
Assert.assertEquals(res.get(), y);
}
Aggregations