use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class MultiValueBoundaryScanner method isDistanceGreater.
@Override
public boolean isDistanceGreater(Object v1, Object v2, int amt) {
Date l1 = PrimitiveObjectInspectorUtils.getDate(v1, (PrimitiveObjectInspector) expressionDef.getOI());
Date l2 = PrimitiveObjectInspectorUtils.getDate(v2, (PrimitiveObjectInspector) expressionDef.getOI());
if (l1 != null && l2 != null) {
// Converts amt days to seconds
return (double) (l1.toEpochMilli() - l2.toEpochMilli()) / 1000 > (long) amt * 24 * 3600;
}
// True if only one date is null
return l1 != l2;
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class TestGenericUDFGreatest method testGreatestDate.
@Test
public void testGreatestDate() throws HiveException {
GenericUDFGreatest udf = new GenericUDFGreatest();
ObjectInspector[] arguments = new ObjectInspector[3];
for (int i = 0; i < arguments.length; i++) {
arguments[i] = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
}
udf.initialize(arguments);
Date d1 = Date.valueOf("2015-03-20");
Date d2 = Date.valueOf("2015-03-21");
Date d3 = Date.valueOf("2014-03-20");
runAndVerify(new Date[] { d1, d2, d3 }, d2, udf);
runAndVerify(new Date[] { null, d2, d3 }, null, udf);
runAndVerify(new Date[] { d1, null, d3 }, null, udf);
runAndVerify(new Date[] { d1, d2, null }, null, udf);
runAndVerify(new Date[] { null, null, null }, null, udf);
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class VectorUDFDateAddColScalar method evaluateString.
protected void evaluateString(ColumnVector columnVector, LongColumnVector outputVector, int i) {
BytesColumnVector bcv = (BytesColumnVector) columnVector;
text.set(bcv.vector[i], bcv.start[i], bcv.length[i]);
Date hDate = DateParser.parseDate(text.toString());
if (hDate == null) {
outputVector.noNulls = false;
outputVector.isNull[i] = true;
return;
}
long days = DateWritableV2.millisToDays(hDate.toEpochMilli());
if (isPositive) {
days += numDays;
} else {
days -= numDays;
}
outputVector.vector[i] = days;
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class VectorUDFDateAddColCol method evaluateRepeatedString.
protected void evaluateRepeatedString(BytesColumnVector inputColumnVector1, long[] vector2, LongColumnVector outputVector, boolean selectedInUse, int[] selected, int n) {
if (inputColumnVector1.isNull[0]) {
outputVector.noNulls = false;
outputVector.isNull[0] = true;
outputVector.isRepeating = true;
return;
}
text.set(inputColumnVector1.vector[0], inputColumnVector1.start[0], inputColumnVector1.length[0]);
Date date = DateParser.parseDate(text.toString());
if (date == null) {
outputVector.noNulls = false;
outputVector.isNull[0] = true;
outputVector.isRepeating = true;
return;
}
long days = DateWritableV2.millisToDays(date.toEpochMilli());
evaluateRepeatedCommon(days, vector2, outputVector, selectedInUse, selected, n);
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class VectorUDFDateAddColCol method evaluateString.
protected void evaluateString(BytesColumnVector inputColumnVector1, LongColumnVector outputVector, int index, long numDays) {
if (inputColumnVector1.isNull[index]) {
outputVector.noNulls = false;
outputVector.isNull[index] = true;
} else {
text.set(inputColumnVector1.vector[index], inputColumnVector1.start[index], inputColumnVector1.length[index]);
Date hDate = DateParser.parseDate(text.toString());
if (hDate == null) {
outputVector.noNulls = false;
outputVector.isNull[index] = true;
return;
}
long days = DateWritableV2.millisToDays(hDate.toEpochMilli());
if (isPositive) {
days += numDays;
} else {
days -= numDays;
}
outputVector.vector[index] = days;
}
}
Aggregations