Search in sources :

Example 51 with Date

use of org.apache.hadoop.hive.common.type.Date in project hive by apache.

the class MultiValueBoundaryScanner method isDistanceGreater.

@Override
public boolean isDistanceGreater(Object v1, Object v2, int amt) {
    Date l1 = PrimitiveObjectInspectorUtils.getDate(v1, (PrimitiveObjectInspector) expressionDef.getOI());
    Date l2 = PrimitiveObjectInspectorUtils.getDate(v2, (PrimitiveObjectInspector) expressionDef.getOI());
    if (l1 != null && l2 != null) {
        // Converts amt days to seconds
        return (double) (l1.toEpochMilli() - l2.toEpochMilli()) / 1000 > (long) amt * 24 * 3600;
    }
    // True if only one date is null
    return l1 != l2;
}
Also used : Date(org.apache.hadoop.hive.common.type.Date)

Example 52 with Date

use of org.apache.hadoop.hive.common.type.Date in project hive by apache.

the class TestGenericUDFGreatest method testGreatestDate.

@Test
public void testGreatestDate() throws HiveException {
    GenericUDFGreatest udf = new GenericUDFGreatest();
    ObjectInspector[] arguments = new ObjectInspector[3];
    for (int i = 0; i < arguments.length; i++) {
        arguments[i] = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
    }
    udf.initialize(arguments);
    Date d1 = Date.valueOf("2015-03-20");
    Date d2 = Date.valueOf("2015-03-21");
    Date d3 = Date.valueOf("2014-03-20");
    runAndVerify(new Date[] { d1, d2, d3 }, d2, udf);
    runAndVerify(new Date[] { null, d2, d3 }, null, udf);
    runAndVerify(new Date[] { d1, null, d3 }, null, udf);
    runAndVerify(new Date[] { d1, d2, null }, null, udf);
    runAndVerify(new Date[] { null, null, null }, null, udf);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Date(org.apache.hadoop.hive.common.type.Date) Test(org.junit.Test)

Example 53 with Date

use of org.apache.hadoop.hive.common.type.Date in project hive by apache.

the class VectorUDFDateAddColScalar method evaluateString.

protected void evaluateString(ColumnVector columnVector, LongColumnVector outputVector, int i) {
    BytesColumnVector bcv = (BytesColumnVector) columnVector;
    text.set(bcv.vector[i], bcv.start[i], bcv.length[i]);
    Date hDate = DateParser.parseDate(text.toString());
    if (hDate == null) {
        outputVector.noNulls = false;
        outputVector.isNull[i] = true;
        return;
    }
    long days = DateWritableV2.millisToDays(hDate.toEpochMilli());
    if (isPositive) {
        days += numDays;
    } else {
        days -= numDays;
    }
    outputVector.vector[i] = days;
}
Also used : BytesColumnVector(org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector) Date(org.apache.hadoop.hive.common.type.Date)

Example 54 with Date

use of org.apache.hadoop.hive.common.type.Date in project hive by apache.

the class VectorUDFDateAddColCol method evaluateRepeatedString.

protected void evaluateRepeatedString(BytesColumnVector inputColumnVector1, long[] vector2, LongColumnVector outputVector, boolean selectedInUse, int[] selected, int n) {
    if (inputColumnVector1.isNull[0]) {
        outputVector.noNulls = false;
        outputVector.isNull[0] = true;
        outputVector.isRepeating = true;
        return;
    }
    text.set(inputColumnVector1.vector[0], inputColumnVector1.start[0], inputColumnVector1.length[0]);
    Date date = DateParser.parseDate(text.toString());
    if (date == null) {
        outputVector.noNulls = false;
        outputVector.isNull[0] = true;
        outputVector.isRepeating = true;
        return;
    }
    long days = DateWritableV2.millisToDays(date.toEpochMilli());
    evaluateRepeatedCommon(days, vector2, outputVector, selectedInUse, selected, n);
}
Also used : Date(org.apache.hadoop.hive.common.type.Date)

Example 55 with Date

use of org.apache.hadoop.hive.common.type.Date in project hive by apache.

the class VectorUDFDateAddColCol method evaluateString.

protected void evaluateString(BytesColumnVector inputColumnVector1, LongColumnVector outputVector, int index, long numDays) {
    if (inputColumnVector1.isNull[index]) {
        outputVector.noNulls = false;
        outputVector.isNull[index] = true;
    } else {
        text.set(inputColumnVector1.vector[index], inputColumnVector1.start[index], inputColumnVector1.length[index]);
        Date hDate = DateParser.parseDate(text.toString());
        if (hDate == null) {
            outputVector.noNulls = false;
            outputVector.isNull[index] = true;
            return;
        }
        long days = DateWritableV2.millisToDays(hDate.toEpochMilli());
        if (isPositive) {
            days += numDays;
        } else {
            days -= numDays;
        }
        outputVector.vector[index] = days;
    }
}
Also used : Date(org.apache.hadoop.hive.common.type.Date)

Aggregations

Date (org.apache.hadoop.hive.common.type.Date)71 Timestamp (org.apache.hadoop.hive.common.type.Timestamp)26 DateWritableV2 (org.apache.hadoop.hive.serde2.io.DateWritableV2)21 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)18 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)18 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)17 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)15 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)14 Text (org.apache.hadoop.io.Text)14 Test (org.junit.Test)14 HiveIntervalYearMonth (org.apache.hadoop.hive.common.type.HiveIntervalYearMonth)13 BytesWritable (org.apache.hadoop.io.BytesWritable)12 HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)11 TimestampWritableV2 (org.apache.hadoop.hive.serde2.io.TimestampWritableV2)11 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)11 List (java.util.List)10 LongWritable (org.apache.hadoop.io.LongWritable)10 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)9 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)9 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)9