use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class GenericUDFDatetimeLegacyHybridCalendar method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Object input = arguments[0].get();
if (input == null) {
return null;
}
input = converter.convert(input);
switch(resultOI.getPrimitiveCategory()) {
case DATE:
Date date = ((DateWritableV2) input).get();
java.sql.Date oldDate = new java.sql.Date(date.toEpochMilli());
dateWritable.set(Date.valueOf(formatter.format(oldDate)));
return dateWritable;
case TIMESTAMP:
Timestamp timestamp = ((TimestampWritableV2) input).getTimestamp();
Timestamp adjustedTimestamp = Timestamp.valueOf(formatter.format(new java.sql.Timestamp(timestamp.toEpochMilli())));
adjustedTimestamp.setNanos(timestamp.getNanos());
timestampWritable.set(adjustedTimestamp);
return timestampWritable;
default:
// Should never happen.
throw new IllegalStateException("Unexpected type in evaluating datetime_legacy_hybrid_calendar: " + inputOI.getPrimitiveCategory());
}
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class TruncDateFromTimestamp method truncDate.
protected void truncDate(ColumnVector inV, BytesColumnVector outV, int i) {
Date date = Date.ofEpochMilli(((TimestampColumnVector) inV).getTime(i));
processDate(outV, i, date);
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class VectorUDFDatetimeLegacyHybridCalendarDate method func.
protected void func(LongColumnVector outputColVector, LongColumnVector inputColVector, int i) {
// get number of milliseconds from number of days
Date inputDate = Date.ofEpochDay((int) inputColVector.vector[i]);
java.sql.Date oldDate = new java.sql.Date(inputDate.toEpochMilli());
Date adjustedDate = Date.valueOf(SIMPLE_DATE_FORMAT_THREAD_LOCAL.get().format(oldDate));
outputColVector.vector[i] = adjustedDate.toEpochDay();
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class GenericUDFTrunc method evaluateDate.
private Object evaluateDate(DeferredObject[] arguments) throws UDFArgumentLengthException, HiveException, UDFArgumentTypeException, UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length);
}
if (arguments[0].get() == null || arguments[1].get() == null) {
return null;
}
if (textConverter2 != null) {
fmtInput = textConverter2.convert(arguments[1].get()).toString();
}
Date d;
switch(inputType1) {
case STRING:
String dateString = textConverter1.convert(arguments[0].get()).toString();
d = DateParser.parseDate(dateString);
if (d == null) {
return null;
}
break;
case TIMESTAMP:
Timestamp ts = ((TimestampWritableV2) timestampConverter.convert(arguments[0].get())).getTimestamp();
d = Date.ofEpochMilli(ts.toEpochMilli());
break;
case DATE:
DateWritableV2 dw = (DateWritableV2) dateWritableConverter.convert(arguments[0].get());
d = dw.get();
break;
default:
throw new UDFArgumentTypeException(0, "TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
}
if (evalDate(d) == null) {
return null;
}
output.set(date.toString());
return output;
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class DateTimeMath method add.
/**
* Perform date + int operation .
* @param dt the date
* @param interval the int (days)
* @return the resulting date
*/
public Date add(Date dt, int interval) {
if (dt == null) {
return null;
}
Date dtResult = new Date();
dtResult.setTimeInDays(dt.toEpochDay() + interval);
return dtResult;
}
Aggregations