use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class GenericUDFCastFormat method convert.
private Object convert(Object o) throws HiveException {
Object input;
switch(inputOI.getPrimitiveCategory()) {
case STRING:
input = ((StringObjectInspector) inputOI).getPrimitiveJavaObject(o);
break;
case CHAR:
input = ((HiveCharObjectInspector) inputOI).getPrimitiveJavaObject(o).getStrippedValue();
break;
case VARCHAR:
input = ((HiveVarcharObjectInspector) inputOI).getPrimitiveJavaObject(o).toString();
break;
case TIMESTAMP:
input = ((TimestampObjectInspector) inputOI).getPrimitiveWritableObject(o).getTimestamp();
break;
case DATE:
input = ((DateObjectInspector) inputOI).getPrimitiveWritableObject(o).get();
break;
default:
throw new HiveException("Input type " + inputOI.getPrimitiveCategory() + " not valid");
}
// format here
Object formattedOutput = null;
if (inputOI.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.DATE || inputOI.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.TIMESTAMP) {
if (inputOI.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.DATE) {
try {
formattedOutput = formatter.format((Date) input);
} catch (IllegalArgumentException e) {
return null;
}
} else {
try {
formattedOutput = formatter.format((Timestamp) input);
} catch (IllegalArgumentException e) {
return null;
}
}
if (formattedOutput == null) {
return null;
}
}
// parse and create Writables
switch(outputOI.getPrimitiveCategory()) {
case STRING:
return new Text((String) formattedOutput);
case CHAR:
return ((SettableHiveCharObjectInspector) outputOI).create(new HiveChar((String) formattedOutput, -1));
case VARCHAR:
return ((SettableHiveVarcharObjectInspector) outputOI).create(new HiveVarchar((String) formattedOutput, -1));
case TIMESTAMP:
try {
Timestamp t = formatter.parseTimestamp((String) input);
if (t == null) {
return null;
}
return ((SettableTimestampObjectInspector) outputOI).create(t);
} catch (IllegalArgumentException e) {
return null;
}
case DATE:
try {
Date d = formatter.parseDate((String) input);
if (d == null) {
return null;
}
return ((SettableDateObjectInspector) outputOI).create(d);
} catch (IllegalArgumentException e) {
return null;
}
default:
throw new HiveException("Output type " + outputOI.getPrimitiveCategory() + " not valid");
}
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class GenericUDFCurrentDate method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 0) {
throw new UDFArgumentLengthException("The function CURRENT_DATE does not take any arguments, but found " + arguments.length);
}
if (currentDate == null) {
SessionState ss = SessionState.get();
ZonedDateTime dateTime = ss.getQueryCurrentTimestamp().atZone(ss.getConf().getLocalTimeZone());
Date dateVal = Date.valueOf(dateTime.toString().substring(0, 10));
currentDate = new DateWritableV2(dateVal);
}
return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class GenericUDFLastDay method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Date d = getDateValue(arguments, 0, converters);
if (d == null) {
return null;
}
lastDay(d);
output.set(date.toString());
return output;
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class DateTimeMath method add.
public Date add(HiveIntervalYearMonth interval, Date dt) {
if (dt == null || interval == null) {
return null;
}
Date dtResult = new Date();
add(interval, dt, dtResult);
return dtResult;
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class DateTimeMath method subtract.
public Date subtract(Date left, HiveIntervalYearMonth right) {
if (left == null || right == null) {
return null;
}
Date dtResult = new Date();
subtract(left, right, dtResult);
return dtResult;
}
Aggregations