use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestDateTimeMath method checkTimestampIntervalYearMonthArithmetic.
private static void checkTimestampIntervalYearMonthArithmetic(String left, char operationType, String right, String expected) throws Exception {
Timestamp leftTs = null;
if (left != null) {
leftTs = Timestamp.valueOf(left);
}
HiveIntervalYearMonth rightInterval = null;
if (right != null) {
rightInterval = HiveIntervalYearMonth.valueOf(right);
}
Timestamp expectedResult = null;
if (expected != null) {
expectedResult = Timestamp.valueOf(expected);
}
Timestamp testResult = null;
DateTimeMath dtm = new DateTimeMath();
switch(operationType) {
case '-':
testResult = dtm.subtract(leftTs, rightInterval);
break;
case '+':
testResult = dtm.add(leftTs, rightInterval);
break;
default:
throw new IllegalArgumentException("Invalid operation " + operationType);
}
assertEquals(String.format("%s %s %s", leftTs, operationType, rightInterval), expectedResult, testResult);
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class GenericUDF method getTimestampValue.
protected Timestamp getTimestampValue(DeferredObject[] arguments, int i, Converter[] converters) throws HiveException {
Object obj;
if ((obj = arguments[i].get()) == null) {
return null;
}
Object writableValue = converters[i].convert(obj);
// if string can not be parsed converter will return null
if (writableValue == null) {
return null;
}
Timestamp ts = ((TimestampWritableV2) writableValue).getTimestamp();
return ts;
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class GenericUDFAddMonths method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Integer numMonthV;
if (isNumMonthsConst) {
numMonthV = numMonthsConst;
} else {
numMonthV = getIntValue(arguments, 1, tsConverters);
}
if (numMonthV == null) {
return null;
}
int numMonthInt = numMonthV.intValue();
// the function should support both short date and full timestamp format
// time part of the timestamp should not be skipped
Timestamp ts = getTimestampValue(arguments, 0, tsConverters);
if (ts != null) {
addMonth(ts, numMonthInt);
} else {
Date date = getDateValue(arguments, 0, dtConverters);
if (date != null) {
addMonth(date, numMonthInt);
} else {
return null;
}
}
String res = formatter.format(calendar.getTime());
output.set(res);
return output;
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class GenericUDFMonthsBetween method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
// the function should support both short date and full timestamp format
// time part of the timestamp should not be skipped
Timestamp date1 = getTimestampValue(arguments, 0, tsConverters);
if (date1 == null) {
Date date = getDateValue(arguments, 0, dtConverters);
if (date == null) {
return null;
}
date1 = Timestamp.ofEpochMilli(date.toEpochMilli());
}
Timestamp date2 = getTimestampValue(arguments, 1, tsConverters);
if (date2 == null) {
Date date = getDateValue(arguments, 1, dtConverters);
if (date == null) {
return null;
}
date2 = Timestamp.ofEpochMilli(date.toEpochMilli());
}
cal1.setTimeInMillis(date1.toEpochMilli());
cal2.setTimeInMillis(date2.toEpochMilli());
// skip day/time part if both dates are end of the month
// or the same day of the month
int monDiffInt = (cal1.get(YEAR) - cal2.get(YEAR)) * 12 + (cal1.get(MONTH) - cal2.get(MONTH));
if (cal1.get(DATE) == cal2.get(DATE) || (cal1.get(DATE) == cal1.getActualMaximum(DATE) && cal2.get(DATE) == cal2.getActualMaximum(DATE))) {
output.set(monDiffInt);
return output;
}
int sec1 = getDayPartInSec(cal1);
int sec2 = getDayPartInSec(cal2);
// 1 sec is 0.000000373 months (1/2678400). 1 month is 31 days.
// there should be no adjustments for leap seconds
double monBtwDbl = monDiffInt + (sec1 - sec2) / 2678400D;
if (isRoundOffNeeded) {
// Round a double to 8 decimal places.
monBtwDbl = BigDecimal.valueOf(monBtwDbl).setScale(8, ROUND_HALF_UP).doubleValue();
}
output.set(monBtwDbl);
return output;
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestParquetTimestampsHive2Compatibility method testWriteHive4ReadHive4UsingLegacyConversion.
/**
* Tests that timestamps written using Hive4 APIs are read correctly by Hive4 APIs when legacy conversion is on.
*/
@ParameterizedTest(name = "{0}")
@MethodSource("generateTimestamps")
void testWriteHive4ReadHive4UsingLegacyConversion(String timestampString) {
String zoneId = "US/Pacific";
NanoTime nt = writeHive4(timestampString, zoneId, true);
Timestamp ts = readHive4(nt, zoneId, true);
assertEquals(timestampString, ts.toString());
}
Aggregations