Search in sources :

Example 16 with TimestampWritableV2

use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.

the class GenericUDFFromUtcTimestamp method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    Object o0 = arguments[0].get();
    if (o0 == null) {
        return null;
    }
    Object o1 = arguments[1].get();
    if (o1 == null) {
        return null;
    }
    Object converted_o0 = timestampConverter.convert(o0);
    if (converted_o0 == null) {
        return null;
    }
    Timestamp inputTs = ((TimestampWritableV2) converted_o0).getTimestamp();
    String tzStr = textConverter.convert(o1).toString();
    TimeZone timezone = TimeZone.getTimeZone(tzStr);
    TimeZone fromTz;
    TimeZone toTz;
    if (invert()) {
        fromTz = timezone;
        toTz = tzUTC;
    } else {
        fromTz = tzUTC;
        toTz = timezone;
    }
    // inputTs is the year/month/day/hour/minute/second in the local timezone.
    // For this UDF we want it in the timezone represented by fromTz
    TimestampTZ fromTs = TimestampTZUtil.parse(inputTs.toString(), fromTz.toZoneId());
    if (fromTs == null) {
        return null;
    }
    // Now output this timestamp's millis value to the equivalent toTz.
    Timestamp result = Timestamp.valueOf(fromTs.getZonedDateTime().withZoneSameInstant(toTz.toZoneId()).toLocalDateTime().toString());
    return result;
}
Also used : TimestampTZ(org.apache.hadoop.hive.common.type.TimestampTZ) TimeZone(java.util.TimeZone) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2)

Example 17 with TimestampWritableV2

use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.

the class GenericUDFDateAdd method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    if (arguments[0].get() == null) {
        return null;
    }
    Object daysWritableObject = daysConverter.convert(arguments[1].get());
    if (daysWritableObject == null) {
        return null;
    }
    int toBeAdded;
    if (daysWritableObject instanceof ByteWritable) {
        toBeAdded = ((ByteWritable) daysWritableObject).get();
    } else if (daysWritableObject instanceof ShortWritable) {
        toBeAdded = ((ShortWritable) daysWritableObject).get();
    } else if (daysWritableObject instanceof IntWritable) {
        toBeAdded = ((IntWritable) daysWritableObject).get();
    } else {
        return null;
    }
    // Convert the first param into a DateWritableV2 value
    switch(inputType1) {
        case STRING:
            String dateString = dateConverter.convert(arguments[0].get()).toString();
            if (DateParser.parseDate(dateString, dateVal)) {
                output.set(dateVal);
            } else {
                return null;
            }
            break;
        case TIMESTAMP:
            Timestamp ts = ((TimestampWritableV2) dateConverter.convert(arguments[0].get())).getTimestamp();
            output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
            break;
        case DATE:
            DateWritableV2 dw = (DateWritableV2) dateConverter.convert(arguments[0].get());
            output.set(dw.getDays());
            break;
        default:
            throw new UDFArgumentException("DATE_ADD() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
    }
    int newDays = output.getDays() + (signModifier * toBeAdded);
    output.set(newDays);
    return output;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) IntWritable(org.apache.hadoop.io.IntWritable)

Example 18 with TimestampWritableV2

use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.

the class GenericUDFDatetimeLegacyHybridCalendar method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    Object input = arguments[0].get();
    if (input == null) {
        return null;
    }
    input = converter.convert(input);
    switch(resultOI.getPrimitiveCategory()) {
        case DATE:
            Date date = ((DateWritableV2) input).get();
            java.sql.Date oldDate = new java.sql.Date(date.toEpochMilli());
            dateWritable.set(Date.valueOf(formatter.format(oldDate)));
            return dateWritable;
        case TIMESTAMP:
            Timestamp timestamp = ((TimestampWritableV2) input).getTimestamp();
            Timestamp adjustedTimestamp = Timestamp.valueOf(formatter.format(new java.sql.Timestamp(timestamp.toEpochMilli())));
            adjustedTimestamp.setNanos(timestamp.getNanos());
            timestampWritable.set(adjustedTimestamp);
            return timestampWritable;
        default:
            // Should never happen.
            throw new IllegalStateException("Unexpected type in evaluating datetime_legacy_hybrid_calendar: " + inputOI.getPrimitiveCategory());
    }
}
Also used : DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) VectorUDFDatetimeLegacyHybridCalendarTimestamp(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDatetimeLegacyHybridCalendarTimestamp) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) Date(org.apache.hadoop.hive.common.type.Date) VectorUDFDatetimeLegacyHybridCalendarDate(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDatetimeLegacyHybridCalendarDate)

Example 19 with TimestampWritableV2

use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.

the class GenericUDFCurrentTimestamp method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 0) {
        throw new UDFArgumentLengthException("The function CURRENT_TIMESTAMP does not take any arguments, but found " + arguments.length);
    }
    if (currentTimestamp == null) {
        SessionState ss = SessionState.get();
        ZonedDateTime dateTime = ss.getQueryCurrentTimestamp().atZone(ss.getConf().getLocalTimeZone());
        currentTimestamp = new TimestampWritableV2(Timestamp.valueOf(dateTime.toLocalDateTime().toString()));
    }
    return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) ZonedDateTime(java.time.ZonedDateTime) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2)

Example 20 with TimestampWritableV2

use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.

the class GenericUDFDate method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    if (arguments[0].get() == null) {
        return null;
    }
    switch(inputType) {
        case VOID:
            throw new UDFArgumentException("TO_DATE() received non-null object of VOID type");
        case STRING:
            String dateString = textConverter.convert(arguments[0].get()).toString();
            if (DateParser.parseDate(dateString, date)) {
                output.set(date);
            } else {
                return null;
            }
            break;
        case TIMESTAMP:
            Timestamp ts = ((TimestampWritableV2) timestampConverter.convert(arguments[0].get())).getTimestamp();
            output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
            break;
        case TIMESTAMPLOCALTZ:
        case DATE:
            DateWritableV2 dw = (DateWritableV2) dateWritableConverter.convert(arguments[0].get());
            output.set(dw);
            break;
        default:
            throw new UDFArgumentException("TO_DATE() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType);
    }
    return output;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) VectorUDFDateString(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateString) VectorUDFDateTimestamp(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateTimestamp) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2)

Aggregations

TimestampWritableV2 (org.apache.hadoop.hive.serde2.io.TimestampWritableV2)99 IntWritable (org.apache.hadoop.io.IntWritable)44 Test (org.junit.Test)42 Timestamp (org.apache.hadoop.hive.common.type.Timestamp)36 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)36 BytesWritable (org.apache.hadoop.io.BytesWritable)31 Text (org.apache.hadoop.io.Text)31 DateWritableV2 (org.apache.hadoop.hive.serde2.io.DateWritableV2)30 LongWritable (org.apache.hadoop.io.LongWritable)30 BooleanWritable (org.apache.hadoop.io.BooleanWritable)26 FloatWritable (org.apache.hadoop.io.FloatWritable)26 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)25 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)21 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)21 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)21 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)20 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)20 HiveIntervalDayTimeWritable (org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable)19 HiveVarcharWritable (org.apache.hadoop.hive.serde2.io.HiveVarcharWritable)18 ArrayList (java.util.ArrayList)16