use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class TestVectorTimestampExpressions method compareToUDFSecondLong.
private void compareToUDFSecondLong(Timestamp t, int y) throws HiveException {
UDFSecond udf = new UDFSecond();
udf.initialize(new ObjectInspector[] { PrimitiveObjectInspectorFactory.writableTimestampObjectInspector });
TimestampWritableV2 tsw = new TimestampWritableV2(org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
IntWritable res = (IntWritable) udf.evaluate(new GenericUDF.DeferredObject[] { new GenericUDF.DeferredJavaObject(tsw) });
Assert.assertEquals(res.get(), y);
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class TestVectorTimestampExpressions method compareToUDFDayOfMonthLong.
private void compareToUDFDayOfMonthLong(Timestamp t, int y) throws HiveException {
UDFDayOfMonth udf = new UDFDayOfMonth();
udf.initialize(new ObjectInspector[] { PrimitiveObjectInspectorFactory.writableTimestampObjectInspector });
TimestampWritableV2 tsw = new TimestampWritableV2(org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
IntWritable res = (IntWritable) udf.evaluate(new GenericUDF.DeferredObject[] { new GenericUDF.DeferredJavaObject(tsw) });
Assert.assertEquals(res.get(), y);
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class TestVectorTimestampExpressions method compareToUDFHourLong.
private void compareToUDFHourLong(Timestamp t, int y) throws HiveException {
UDFHour udf = new UDFHour();
udf.initialize(new ObjectInspector[] { PrimitiveObjectInspectorFactory.writableTimestampObjectInspector });
TimestampWritableV2 tsw = new TimestampWritableV2(org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
IntWritable res = (IntWritable) udf.evaluate(new GenericUDF.DeferredObject[] { new GenericUDF.DeferredJavaObject(tsw) });
Assert.assertEquals(res.get(), y);
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class TestVectorTimestampExpressions method compareToUDFWeekOfYearLong.
private void compareToUDFWeekOfYearLong(Timestamp t, int y) {
UDFWeekOfYear udf = new UDFWeekOfYear();
TimestampWritableV2 tsw = new TimestampWritableV2(org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
IntWritable res = udf.evaluate(tsw);
Assert.assertEquals(res.get(), y);
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class TestVectorTypeCasts method getBatchTimestampDecimal.
private VectorizedRowBatch getBatchTimestampDecimal(HiveDecimal[] hiveDecimalValues) {
Random r = new Random(994);
VectorizedRowBatch b = new VectorizedRowBatch(2);
TimestampColumnVector tcv;
b.cols[0] = tcv = new TimestampColumnVector(hiveDecimalValues.length);
b.cols[1] = new DecimalColumnVector(hiveDecimalValues.length, HiveDecimal.SYSTEM_DEFAULT_PRECISION, HiveDecimal.SYSTEM_DEFAULT_SCALE);
for (int i = 0; i < hiveDecimalValues.length; i++) {
int optionalNanos = 0;
switch(r.nextInt(4)) {
case 0:
// No nanos.
break;
case 1:
optionalNanos = r.nextInt((int) NANOSECONDS_PER_SECOND);
break;
case 2:
// Limit to milliseconds only...
optionalNanos = r.nextInt((int) MILLISECONDS_PER_SECOND) * (int) NANOSECONDS_PER_MILLISSECOND;
break;
case 3:
// Limit to below milliseconds only...
optionalNanos = r.nextInt((int) NANOSECONDS_PER_MILLISSECOND);
break;
}
long millis = RandomTypeUtil.randomMillis(r);
Timestamp ts = new Timestamp(millis);
ts.setNanos(optionalNanos);
TimestampWritableV2 tsw = new TimestampWritableV2(org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
hiveDecimalValues[i] = tsw.getHiveDecimal();
tcv.set(i, ts);
}
b.size = hiveDecimalValues.length;
return b;
}
Aggregations