use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class TestValueBoundaryScanner method testTimestampIsDistanceGreater.
@Test
public void testTimestampIsDistanceGreater() {
PTFExpressionDef argDef = new PTFExpressionDef();
argDef.setOI(PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
TimestampValueBoundaryScanner scanner = new TimestampValueBoundaryScanner(null, null, new OrderExpressionDef(argDef), false);
Timestamp ts = new Timestamp();
// 1000s
ts.setTimeInMillis(1000000);
// 1000s
TimestampWritableV2 w1 = new TimestampWritableV2(ts);
// 1000s
TimestampWritableV2 w2 = new TimestampWritableV2(ts);
// empty == epoch == 0s
TimestampWritableV2 w3 = new TimestampWritableV2();
// equal timestamps, distance is not greater than 0
Assert.assertFalse(scanner.isDistanceGreater(w1, w2, 0));
Assert.assertFalse(scanner.isDistanceGreater(w2, w1, 0));
// null comparison, true only if one value is null
Assert.assertTrue(scanner.isDistanceGreater(w1, null, 100));
Assert.assertTrue(scanner.isDistanceGreater(w2, null, 100));
Assert.assertFalse(scanner.isDistanceGreater(null, null, 100));
// 1000s distance
// 1000 > 999
Assert.assertTrue(scanner.isDistanceGreater(w1, w3, 999));
Assert.assertFalse(scanner.isDistanceGreater(w1, w3, 1000));
Assert.assertFalse(scanner.isDistanceGreater(w1, w3, 1001));
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class TestValueBoundaryScanner method testTimestampEquals.
@Test
public void testTimestampEquals() {
PTFExpressionDef argDef = new PTFExpressionDef();
argDef.setOI(PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
TimestampValueBoundaryScanner scanner = new TimestampValueBoundaryScanner(null, null, new OrderExpressionDef(argDef), false);
Timestamp ts = new Timestamp();
ts.setTimeInMillis(1000);
TimestampWritableV2 w1 = new TimestampWritableV2(ts);
TimestampWritableV2 w2 = new TimestampWritableV2(ts);
// empty
TimestampWritableV2 w3 = new TimestampWritableV2();
Assert.assertTrue(scanner.isEqual(w1, w2));
Assert.assertTrue(scanner.isEqual(w2, w1));
// empty == epoch
Assert.assertTrue(scanner.isEqual(w3, new TimestampWritableV2(new Timestamp())));
// empty != another non-epoch
Assert.assertFalse(scanner.isEqual(w3, w1));
Assert.assertFalse(scanner.isEqual(null, w2));
Assert.assertFalse(scanner.isEqual(w1, null));
Assert.assertTrue(scanner.isEqual(null, null));
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class TestGenericUDFToUnixTimestamp method testTimestamp.
@Test
public void testTimestamp() throws HiveException {
GenericUDFToUnixTimeStamp udf = new GenericUDFToUnixTimeStamp();
ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
ObjectInspector[] arguments = { valueOI };
udf.initialize(arguments);
Timestamp ts = Timestamp.valueOf("1970-01-01 00:00:00");
TimestampTZ tstz = TimestampTZUtil.convert(ts, ZoneId.systemDefault());
runAndVerify(udf, new TimestampWritableV2(ts), new LongWritable(tstz.getEpochSecond()));
ts = Timestamp.valueOf("2001-02-03 01:02:03");
tstz = TimestampTZUtil.convert(ts, ZoneId.systemDefault());
runAndVerify(udf, new TimestampWritableV2(ts), new LongWritable(tstz.getEpochSecond()));
// test null values
runAndVerify(udf, null, null);
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class BatchToRowReader method nextTimestamp.
public static TimestampWritableV2 nextTimestamp(ColumnVector vector, int row, Object previous) {
if (vector.isRepeating) {
row = 0;
}
if (vector.noNulls || !vector.isNull[row]) {
TimestampWritableV2 result;
if (previous == null || previous.getClass() != TimestampWritableV2.class) {
result = new TimestampWritableV2();
} else {
result = (TimestampWritableV2) previous;
}
TimestampColumnVector tcv = (TimestampColumnVector) vector;
result.setInternal(tcv.time[row], tcv.nanos[row]);
return result;
} else {
return null;
}
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class GenericUDF method getTimestampValue.
protected Timestamp getTimestampValue(DeferredObject[] arguments, int i, Converter[] converters) throws HiveException {
Object obj;
if ((obj = arguments[i].get()) == null) {
return null;
}
Object writableValue = converters[i].convert(obj);
// if string can not be parsed converter will return null
if (writableValue == null) {
return null;
}
Timestamp ts = ((TimestampWritableV2) writableValue).getTimestamp();
return ts;
}
Aggregations