use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestGenericUDFOPPlus method testTimestampPlusIntervalYearMonth.
@Test
public void testTimestampPlusIntervalYearMonth() throws Exception {
GenericUDFOPPlus udf = new GenericUDFOPPlus();
TimestampWritable left = new TimestampWritable(Timestamp.valueOf("2001-11-15 01:02:03.123456789"));
HiveIntervalYearMonthWritable right = new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableTimestampObjectInspector, PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
TimestampWritable res = (TimestampWritable) udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2004-01-15 01:02:03.123456789"), res.getTimestamp());
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestGenericUDFOPPlus method testTimestampPlusIntervalDayTime.
@Test
public void testTimestampPlusIntervalDayTime() throws Exception {
GenericUDFOPPlus udf = new GenericUDFOPPlus();
TimestampWritable left = new TimestampWritable(Timestamp.valueOf("2001-01-01 00:00:00"));
HiveIntervalDayTimeWritable right = new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableTimestampObjectInspector, PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
TimestampWritable res = (TimestampWritable) udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp());
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestGenericUDFOPMinus method testTimestampMinusIntervalDayTime.
@Test
public void testTimestampMinusIntervalDayTime() throws Exception {
GenericUDFOPMinus udf = new GenericUDFOPMinus();
TimestampWritable left = new TimestampWritable(Timestamp.valueOf("2001-01-02 2:3:4.567"));
HiveIntervalDayTimeWritable right = new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableTimestampObjectInspector, PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
TimestampWritable res = (TimestampWritable) udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2001-01-01 00:00:00"), res.getTimestamp());
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class ObjectInspectorUtils method compare.
/**
* Compare two objects with their respective ObjectInspectors.
* if nullValueOpt is MAXVALUE, treat null as maximum value.
* if nullValueOpt is MINVALUE, treat null as minimum value.
*/
public static int compare(Object o1, ObjectInspector oi1, Object o2, ObjectInspector oi2, MapEqualComparer mapEqualComparer, NullValueOption nullValueOpt) {
if (oi1.getCategory() != oi2.getCategory()) {
return oi1.getCategory().compareTo(oi2.getCategory());
}
int nullCmpRtn = -1;
switch(nullValueOpt) {
case MAXVALUE:
nullCmpRtn = 1;
break;
case MINVALUE:
nullCmpRtn = -1;
break;
}
if (o1 == null) {
return o2 == null ? 0 : nullCmpRtn;
} else if (o2 == null) {
return -nullCmpRtn;
}
switch(oi1.getCategory()) {
case PRIMITIVE:
{
PrimitiveObjectInspector poi1 = ((PrimitiveObjectInspector) oi1);
PrimitiveObjectInspector poi2 = ((PrimitiveObjectInspector) oi2);
if (poi1.getPrimitiveCategory() != poi2.getPrimitiveCategory()) {
return poi1.getPrimitiveCategory().compareTo(poi2.getPrimitiveCategory());
}
switch(poi1.getPrimitiveCategory()) {
case VOID:
return 0;
case BOOLEAN:
{
int v1 = ((BooleanObjectInspector) poi1).get(o1) ? 1 : 0;
int v2 = ((BooleanObjectInspector) poi2).get(o2) ? 1 : 0;
return v1 - v2;
}
case BYTE:
{
int v1 = ((ByteObjectInspector) poi1).get(o1);
int v2 = ((ByteObjectInspector) poi2).get(o2);
return v1 - v2;
}
case SHORT:
{
int v1 = ((ShortObjectInspector) poi1).get(o1);
int v2 = ((ShortObjectInspector) poi2).get(o2);
return v1 - v2;
}
case INT:
{
int v1 = ((IntObjectInspector) poi1).get(o1);
int v2 = ((IntObjectInspector) poi2).get(o2);
return v1 > v2 ? 1 : (v1 < v2 ? -1 : 0);
}
case LONG:
{
long v1 = ((LongObjectInspector) poi1).get(o1);
long v2 = ((LongObjectInspector) poi2).get(o2);
return v1 > v2 ? 1 : (v1 < v2 ? -1 : 0);
}
case FLOAT:
{
float v1 = ((FloatObjectInspector) poi1).get(o1);
float v2 = ((FloatObjectInspector) poi2).get(o2);
// The IEEE 754 floating point spec specifies that signed -0.0 and 0.0 should be treated as equal.
if (v1 == 0.0f && v2 == 0.0f) {
return 0;
} else {
// Float.compare() treats -0.0 and 0.0 as different
return Float.compare(v1, v2);
}
}
case DOUBLE:
{
double v1 = ((DoubleObjectInspector) poi1).get(o1);
double v2 = ((DoubleObjectInspector) poi2).get(o2);
// The IEEE 754 floating point spec specifies that signed -0.0 and 0.0 should be treated as equal.
if (v1 == 0.0d && v2 == 0.0d) {
return 0;
} else {
// Double.compare() treats -0.0 and 0.0 as different
return Double.compare(v1, v2);
}
}
case STRING:
{
if (poi1.preferWritable() || poi2.preferWritable()) {
Text t1 = (Text) poi1.getPrimitiveWritableObject(o1);
Text t2 = (Text) poi2.getPrimitiveWritableObject(o2);
return t1 == null ? (t2 == null ? 0 : -1) : (t2 == null ? 1 : t1.compareTo(t2));
} else {
String s1 = (String) poi1.getPrimitiveJavaObject(o1);
String s2 = (String) poi2.getPrimitiveJavaObject(o2);
return s1 == null ? (s2 == null ? 0 : -1) : (s2 == null ? 1 : s1.compareTo(s2));
}
}
case CHAR:
{
HiveCharWritable t1 = ((HiveCharObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveCharWritable t2 = ((HiveCharObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
case VARCHAR:
{
HiveVarcharWritable t1 = ((HiveVarcharObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveVarcharWritable t2 = ((HiveVarcharObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
case BINARY:
{
BytesWritable bw1 = ((BinaryObjectInspector) poi1).getPrimitiveWritableObject(o1);
BytesWritable bw2 = ((BinaryObjectInspector) poi2).getPrimitiveWritableObject(o2);
return bw1.compareTo(bw2);
}
case DATE:
{
DateWritable d1 = ((DateObjectInspector) poi1).getPrimitiveWritableObject(o1);
DateWritable d2 = ((DateObjectInspector) poi2).getPrimitiveWritableObject(o2);
return d1.compareTo(d2);
}
case TIMESTAMP:
{
TimestampWritable t1 = ((TimestampObjectInspector) poi1).getPrimitiveWritableObject(o1);
TimestampWritable t2 = ((TimestampObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
case INTERVAL_YEAR_MONTH:
{
HiveIntervalYearMonthWritable i1 = ((HiveIntervalYearMonthObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveIntervalYearMonthWritable i2 = ((HiveIntervalYearMonthObjectInspector) poi2).getPrimitiveWritableObject(o2);
return i1.compareTo(i2);
}
case INTERVAL_DAY_TIME:
{
HiveIntervalDayTimeWritable i1 = ((HiveIntervalDayTimeObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveIntervalDayTimeWritable i2 = ((HiveIntervalDayTimeObjectInspector) poi2).getPrimitiveWritableObject(o2);
return i1.compareTo(i2);
}
case DECIMAL:
{
HiveDecimalWritable t1 = ((HiveDecimalObjectInspector) poi1).getPrimitiveWritableObject(o1);
HiveDecimalWritable t2 = ((HiveDecimalObjectInspector) poi2).getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
default:
{
throw new RuntimeException("Unknown type: " + poi1.getPrimitiveCategory());
}
}
}
case STRUCT:
{
StructObjectInspector soi1 = (StructObjectInspector) oi1;
StructObjectInspector soi2 = (StructObjectInspector) oi2;
List<? extends StructField> fields1 = soi1.getAllStructFieldRefs();
List<? extends StructField> fields2 = soi2.getAllStructFieldRefs();
int minimum = Math.min(fields1.size(), fields2.size());
for (int i = 0; i < minimum; i++) {
int r = compare(soi1.getStructFieldData(o1, fields1.get(i)), fields1.get(i).getFieldObjectInspector(), soi2.getStructFieldData(o2, fields2.get(i)), fields2.get(i).getFieldObjectInspector(), mapEqualComparer, nullValueOpt);
if (r != 0) {
return r;
}
}
return fields1.size() - fields2.size();
}
case LIST:
{
ListObjectInspector loi1 = (ListObjectInspector) oi1;
ListObjectInspector loi2 = (ListObjectInspector) oi2;
int minimum = Math.min(loi1.getListLength(o1), loi2.getListLength(o2));
for (int i = 0; i < minimum; i++) {
int r = compare(loi1.getListElement(o1, i), loi1.getListElementObjectInspector(), loi2.getListElement(o2, i), loi2.getListElementObjectInspector(), mapEqualComparer, nullValueOpt);
if (r != 0) {
return r;
}
}
return loi1.getListLength(o1) - loi2.getListLength(o2);
}
case MAP:
{
if (mapEqualComparer == null) {
throw new RuntimeException("Compare on map type not supported!");
} else {
return mapEqualComparer.compare(o1, (MapObjectInspector) oi1, o2, (MapObjectInspector) oi2);
}
}
case UNION:
{
UnionObjectInspector uoi1 = (UnionObjectInspector) oi1;
UnionObjectInspector uoi2 = (UnionObjectInspector) oi2;
byte tag1 = uoi1.getTag(o1);
byte tag2 = uoi2.getTag(o2);
if (tag1 != tag2) {
return tag1 - tag2;
}
return compare(uoi1.getField(o1), uoi1.getObjectInspectors().get(tag1), uoi2.getField(o2), uoi2.getObjectInspectors().get(tag2), mapEqualComparer, nullValueOpt);
}
default:
throw new RuntimeException("Compare on unknown type: " + oi1.getCategory());
}
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestVectorDateExpressions method compareToUDFUnixTimeStampDate.
private void compareToUDFUnixTimeStampDate(long t, long y) {
TimestampWritable tsw = toTimestampWritable(t);
LongWritable res = getLongWritable(tsw);
if (res.get() != y) {
System.out.printf("%d vs %d for %d, %d\n", res.get(), y, t, tsw.getTimestamp().getTime() / 1000);
}
Assert.assertEquals(res.get(), y);
}
Aggregations