use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestVectorTimestampExpressions method compareToUDFMonthLong.
private void compareToUDFMonthLong(Timestamp t, int y) {
UDFMonth udf = new UDFMonth();
TimestampWritable tsw = new TimestampWritable(t);
IntWritable res = udf.evaluate(tsw);
Assert.assertEquals(res.get(), y);
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestVectorTimestampExpressions method compareToUDFMinuteLong.
private void compareToUDFMinuteLong(Timestamp t, int y) {
UDFMinute udf = new UDFMinute();
TimestampWritable tsw = new TimestampWritable(t);
IntWritable res = udf.evaluate(tsw);
Assert.assertEquals(res.get(), y);
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestVectorTypeCasts method getBatchTimestampDecimal.
private VectorizedRowBatch getBatchTimestampDecimal(HiveDecimal[] hiveDecimalValues) {
Random r = new Random(994);
VectorizedRowBatch b = new VectorizedRowBatch(2);
TimestampColumnVector tcv;
b.cols[0] = tcv = new TimestampColumnVector(hiveDecimalValues.length);
b.cols[1] = new DecimalColumnVector(hiveDecimalValues.length, HiveDecimal.SYSTEM_DEFAULT_PRECISION, HiveDecimal.SYSTEM_DEFAULT_SCALE);
for (int i = 0; i < hiveDecimalValues.length; i++) {
int optionalNanos = 0;
switch(r.nextInt(4)) {
case 0:
// No nanos.
break;
case 1:
optionalNanos = r.nextInt((int) NANOSECONDS_PER_SECOND);
break;
case 2:
// Limit to milliseconds only...
optionalNanos = r.nextInt((int) MILLISECONDS_PER_SECOND) * (int) NANOSECONDS_PER_MILLISSECOND;
break;
case 3:
// Limit to below milliseconds only...
optionalNanos = r.nextInt((int) NANOSECONDS_PER_MILLISSECOND);
break;
}
long millis = RandomTypeUtil.randomMillis(r);
Timestamp ts = new Timestamp(millis);
ts.setNanos(optionalNanos);
TimestampWritable tsw = new TimestampWritable(ts);
hiveDecimalValues[i] = tsw.getHiveDecimal();
tcv.set(i, ts);
}
b.size = hiveDecimalValues.length;
return b;
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestVectorTypeCasts method getBatchDecimalTimestamp.
private VectorizedRowBatch getBatchDecimalTimestamp(double[] doubleValues) {
VectorizedRowBatch b = new VectorizedRowBatch(2);
DecimalColumnVector dv;
b.cols[0] = dv = new DecimalColumnVector(doubleValues.length, HiveDecimal.SYSTEM_DEFAULT_PRECISION, HiveDecimal.SYSTEM_DEFAULT_SCALE);
b.cols[1] = new TimestampColumnVector(doubleValues.length);
dv.noNulls = true;
Random r = new Random(94830);
for (int i = 0; i < doubleValues.length; i++) {
long millis = RandomTypeUtil.randomMillis(r);
Timestamp ts = new Timestamp(millis);
int nanos = RandomTypeUtil.randomNanos(r);
ts.setNanos(nanos);
TimestampWritable tsw = new TimestampWritable(ts);
double asDouble = tsw.getDouble();
doubleValues[i] = asDouble;
HiveDecimal hiveDecimal = HiveDecimal.create(new BigDecimal(asDouble));
dv.set(i, hiveDecimal);
}
b.size = doubleValues.length;
return b;
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestGenericUDFOPPlus method testIntervalDayTimePlusTimestamp.
@Test
public void testIntervalDayTimePlusTimestamp() throws Exception {
GenericUDFOPPlus udf = new GenericUDFOPPlus();
HiveIntervalDayTimeWritable left = new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
TimestampWritable right = new TimestampWritable(Timestamp.valueOf("2001-01-01 00:00:00"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector, PrimitiveObjectInspectorFactory.writableTimestampObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
TimestampWritable res = (TimestampWritable) udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp());
}
Aggregations