Search in sources :

Example 76 with Timestamp

use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.

the class TestDateTimeMath method checkTimestampIntervalYearMonthArithmetic.

private static void checkTimestampIntervalYearMonthArithmetic(String left, char operationType, String right, String expected) throws Exception {
    Timestamp leftTs = null;
    if (left != null) {
        leftTs = Timestamp.valueOf(left);
    }
    HiveIntervalYearMonth rightInterval = null;
    if (right != null) {
        rightInterval = HiveIntervalYearMonth.valueOf(right);
    }
    Timestamp expectedResult = null;
    if (expected != null) {
        expectedResult = Timestamp.valueOf(expected);
    }
    Timestamp testResult = null;
    DateTimeMath dtm = new DateTimeMath();
    switch(operationType) {
        case '-':
            testResult = dtm.subtract(leftTs, rightInterval);
            break;
        case '+':
            testResult = dtm.add(leftTs, rightInterval);
            break;
        default:
            throw new IllegalArgumentException("Invalid operation " + operationType);
    }
    assertEquals(String.format("%s %s %s", leftTs, operationType, rightInterval), expectedResult, testResult);
}
Also used : DateTimeMath(org.apache.hadoop.hive.ql.util.DateTimeMath) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) Timestamp(org.apache.hadoop.hive.common.type.Timestamp)

Example 77 with Timestamp

use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.

the class GenericUDF method getTimestampValue.

protected Timestamp getTimestampValue(DeferredObject[] arguments, int i, Converter[] converters) throws HiveException {
    Object obj;
    if ((obj = arguments[i].get()) == null) {
        return null;
    }
    Object writableValue = converters[i].convert(obj);
    // if string can not be parsed converter will return null
    if (writableValue == null) {
        return null;
    }
    Timestamp ts = ((TimestampWritableV2) writableValue).getTimestamp();
    return ts;
}
Also used : Timestamp(org.apache.hadoop.hive.common.type.Timestamp) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2)

Example 78 with Timestamp

use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.

the class GenericUDFAddMonths method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    Integer numMonthV;
    if (isNumMonthsConst) {
        numMonthV = numMonthsConst;
    } else {
        numMonthV = getIntValue(arguments, 1, tsConverters);
    }
    if (numMonthV == null) {
        return null;
    }
    int numMonthInt = numMonthV.intValue();
    // the function should support both short date and full timestamp format
    // time part of the timestamp should not be skipped
    Timestamp ts = getTimestampValue(arguments, 0, tsConverters);
    if (ts != null) {
        addMonth(ts, numMonthInt);
    } else {
        Date date = getDateValue(arguments, 0, dtConverters);
        if (date != null) {
            addMonth(date, numMonthInt);
        } else {
            return null;
        }
    }
    String res = formatter.format(calendar.getTime());
    output.set(res);
    return output;
}
Also used : Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Date(org.apache.hadoop.hive.common.type.Date)

Example 79 with Timestamp

use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.

the class GenericUDFMonthsBetween method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    // the function should support both short date and full timestamp format
    // time part of the timestamp should not be skipped
    Timestamp date1 = getTimestampValue(arguments, 0, tsConverters);
    if (date1 == null) {
        Date date = getDateValue(arguments, 0, dtConverters);
        if (date == null) {
            return null;
        }
        date1 = Timestamp.ofEpochMilli(date.toEpochMilli());
    }
    Timestamp date2 = getTimestampValue(arguments, 1, tsConverters);
    if (date2 == null) {
        Date date = getDateValue(arguments, 1, dtConverters);
        if (date == null) {
            return null;
        }
        date2 = Timestamp.ofEpochMilli(date.toEpochMilli());
    }
    cal1.setTimeInMillis(date1.toEpochMilli());
    cal2.setTimeInMillis(date2.toEpochMilli());
    // skip day/time part if both dates are end of the month
    // or the same day of the month
    int monDiffInt = (cal1.get(YEAR) - cal2.get(YEAR)) * 12 + (cal1.get(MONTH) - cal2.get(MONTH));
    if (cal1.get(DATE) == cal2.get(DATE) || (cal1.get(DATE) == cal1.getActualMaximum(DATE) && cal2.get(DATE) == cal2.getActualMaximum(DATE))) {
        output.set(monDiffInt);
        return output;
    }
    int sec1 = getDayPartInSec(cal1);
    int sec2 = getDayPartInSec(cal2);
    // 1 sec is 0.000000373 months (1/2678400). 1 month is 31 days.
    // there should be no adjustments for leap seconds
    double monBtwDbl = monDiffInt + (sec1 - sec2) / 2678400D;
    if (isRoundOffNeeded) {
        // Round a double to 8 decimal places.
        monBtwDbl = BigDecimal.valueOf(monBtwDbl).setScale(8, ROUND_HALF_UP).doubleValue();
    }
    output.set(monBtwDbl);
    return output;
}
Also used : Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Date(org.apache.hadoop.hive.common.type.Date)

Example 80 with Timestamp

use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.

the class TestParquetTimestampsHive2Compatibility method testWriteHive4ReadHive4UsingLegacyConversion.

/**
 * Tests that timestamps written using Hive4 APIs are read correctly by Hive4 APIs when legacy conversion is on.
 */
@ParameterizedTest(name = "{0}")
@MethodSource("generateTimestamps")
void testWriteHive4ReadHive4UsingLegacyConversion(String timestampString) {
    String zoneId = "US/Pacific";
    NanoTime nt = writeHive4(timestampString, zoneId, true);
    Timestamp ts = readHive4(nt, zoneId, true);
    assertEquals(timestampString, ts.toString());
}
Also used : NanoTime(org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) MethodSource(org.junit.jupiter.params.provider.MethodSource)

Aggregations

Timestamp (org.apache.hadoop.hive.common.type.Timestamp)116 Test (org.junit.Test)36 TimestampWritableV2 (org.apache.hadoop.hive.serde2.io.TimestampWritableV2)32 Date (org.apache.hadoop.hive.common.type.Date)27 BytesWritable (org.apache.hadoop.io.BytesWritable)25 LongWritable (org.apache.hadoop.io.LongWritable)25 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)24 Text (org.apache.hadoop.io.Text)22 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)21 IntWritable (org.apache.hadoop.io.IntWritable)21 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)20 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)20 BooleanWritable (org.apache.hadoop.io.BooleanWritable)19 FloatWritable (org.apache.hadoop.io.FloatWritable)19 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)18 ArrayList (java.util.ArrayList)17 DateWritableV2 (org.apache.hadoop.hive.serde2.io.DateWritableV2)17 HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)16 List (java.util.List)15 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)12