use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class GenericUDFFromUtcTimestamp method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Object o0 = arguments[0].get();
if (o0 == null) {
return null;
}
Object o1 = arguments[1].get();
if (o1 == null) {
return null;
}
Object converted_o0 = timestampConverter.convert(o0);
if (converted_o0 == null) {
return null;
}
Timestamp inputTs = ((TimestampWritableV2) converted_o0).getTimestamp();
String tzStr = textConverter.convert(o1).toString();
TimeZone timezone = TimeZone.getTimeZone(tzStr);
TimeZone fromTz;
TimeZone toTz;
if (invert()) {
fromTz = timezone;
toTz = tzUTC;
} else {
fromTz = tzUTC;
toTz = timezone;
}
// inputTs is the year/month/day/hour/minute/second in the local timezone.
// For this UDF we want it in the timezone represented by fromTz
TimestampTZ fromTs = TimestampTZUtil.parse(inputTs.toString(), fromTz.toZoneId());
if (fromTs == null) {
return null;
}
// Now output this timestamp's millis value to the equivalent toTz.
Timestamp result = Timestamp.valueOf(fromTs.getZonedDateTime().withZoneSameInstant(toTz.toZoneId()).toLocalDateTime().toString());
return result;
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class GenericUDFDateAdd method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0].get() == null) {
return null;
}
Object daysWritableObject = daysConverter.convert(arguments[1].get());
if (daysWritableObject == null) {
return null;
}
int toBeAdded;
if (daysWritableObject instanceof ByteWritable) {
toBeAdded = ((ByteWritable) daysWritableObject).get();
} else if (daysWritableObject instanceof ShortWritable) {
toBeAdded = ((ShortWritable) daysWritableObject).get();
} else if (daysWritableObject instanceof IntWritable) {
toBeAdded = ((IntWritable) daysWritableObject).get();
} else {
return null;
}
// Convert the first param into a DateWritableV2 value
switch(inputType1) {
case STRING:
String dateString = dateConverter.convert(arguments[0].get()).toString();
if (DateParser.parseDate(dateString, dateVal)) {
output.set(dateVal);
} else {
return null;
}
break;
case TIMESTAMP:
Timestamp ts = ((TimestampWritableV2) dateConverter.convert(arguments[0].get())).getTimestamp();
output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
break;
case DATE:
DateWritableV2 dw = (DateWritableV2) dateConverter.convert(arguments[0].get());
output.set(dw.getDays());
break;
default:
throw new UDFArgumentException("DATE_ADD() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
}
int newDays = output.getDays() + (signModifier * toBeAdded);
output.set(newDays);
return output;
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class GenericUDFDatetimeLegacyHybridCalendar method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Object input = arguments[0].get();
if (input == null) {
return null;
}
input = converter.convert(input);
switch(resultOI.getPrimitiveCategory()) {
case DATE:
Date date = ((DateWritableV2) input).get();
java.sql.Date oldDate = new java.sql.Date(date.toEpochMilli());
dateWritable.set(Date.valueOf(formatter.format(oldDate)));
return dateWritable;
case TIMESTAMP:
Timestamp timestamp = ((TimestampWritableV2) input).getTimestamp();
Timestamp adjustedTimestamp = Timestamp.valueOf(formatter.format(new java.sql.Timestamp(timestamp.toEpochMilli())));
adjustedTimestamp.setNanos(timestamp.getNanos());
timestampWritable.set(adjustedTimestamp);
return timestampWritable;
default:
// Should never happen.
throw new IllegalStateException("Unexpected type in evaluating datetime_legacy_hybrid_calendar: " + inputOI.getPrimitiveCategory());
}
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class GenericUDFCurrentTimestamp method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 0) {
throw new UDFArgumentLengthException("The function CURRENT_TIMESTAMP does not take any arguments, but found " + arguments.length);
}
if (currentTimestamp == null) {
SessionState ss = SessionState.get();
ZonedDateTime dateTime = ss.getQueryCurrentTimestamp().atZone(ss.getConf().getLocalTimeZone());
currentTimestamp = new TimestampWritableV2(Timestamp.valueOf(dateTime.toLocalDateTime().toString()));
}
return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class GenericUDFDate method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0].get() == null) {
return null;
}
switch(inputType) {
case VOID:
throw new UDFArgumentException("TO_DATE() received non-null object of VOID type");
case STRING:
String dateString = textConverter.convert(arguments[0].get()).toString();
if (DateParser.parseDate(dateString, date)) {
output.set(date);
} else {
return null;
}
break;
case TIMESTAMP:
Timestamp ts = ((TimestampWritableV2) timestampConverter.convert(arguments[0].get())).getTimestamp();
output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
break;
case TIMESTAMPLOCALTZ:
case DATE:
DateWritableV2 dw = (DateWritableV2) dateWritableConverter.convert(arguments[0].get());
output.set(dw);
break;
default:
throw new UDFArgumentException("TO_DATE() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType);
}
return output;
}
Aggregations