use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class GenericUDFDatetimeLegacyHybridCalendar method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Object input = arguments[0].get();
if (input == null) {
return null;
}
input = converter.convert(input);
switch(resultOI.getPrimitiveCategory()) {
case DATE:
Date date = ((DateWritableV2) input).get();
java.sql.Date oldDate = new java.sql.Date(date.toEpochMilli());
dateWritable.set(Date.valueOf(formatter.format(oldDate)));
return dateWritable;
case TIMESTAMP:
Timestamp timestamp = ((TimestampWritableV2) input).getTimestamp();
Timestamp adjustedTimestamp = Timestamp.valueOf(formatter.format(new java.sql.Timestamp(timestamp.toEpochMilli())));
adjustedTimestamp.setNanos(timestamp.getNanos());
timestampWritable.set(adjustedTimestamp);
return timestampWritable;
default:
// Should never happen.
throw new IllegalStateException("Unexpected type in evaluating datetime_legacy_hybrid_calendar: " + inputOI.getPrimitiveCategory());
}
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class GenericUDFDate method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0].get() == null) {
return null;
}
switch(inputType) {
case VOID:
throw new UDFArgumentException("TO_DATE() received non-null object of VOID type");
case STRING:
String dateString = textConverter.convert(arguments[0].get()).toString();
if (DateParser.parseDate(dateString, date)) {
output.set(date);
} else {
return null;
}
break;
case TIMESTAMP:
Timestamp ts = ((TimestampWritableV2) timestampConverter.convert(arguments[0].get())).getTimestamp();
output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
break;
case TIMESTAMPLOCALTZ:
case DATE:
DateWritableV2 dw = (DateWritableV2) dateWritableConverter.convert(arguments[0].get());
output.set(dw);
break;
default:
throw new UDFArgumentException("TO_DATE() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType);
}
return output;
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestUtilities method testSerializeTimestamp.
@Test
public void testSerializeTimestamp() {
Timestamp ts = Timestamp.ofEpochMilli(1374554702000L, 123456);
ExprNodeConstantDesc constant = new ExprNodeConstantDesc(ts);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(1);
children.add(constant);
ExprNodeGenericFuncDesc desc = new ExprNodeGenericFuncDesc(TypeInfoFactory.timestampTypeInfo, new GenericUDFFromUtcTimestamp(), children);
assertEquals(desc.getExprString(), SerializationUtilities.deserializeExpression(SerializationUtilities.serializeExpression(desc)).getExprString());
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestVectorMathFunctions method getVectorizedRowBatchTimestampInLongOut.
public static VectorizedRowBatch getVectorizedRowBatchTimestampInLongOut(long[] longValues) {
Random r = new Random(345);
VectorizedRowBatch batch = new VectorizedRowBatch(2);
TimestampColumnVector inV;
LongColumnVector outV;
inV = new TimestampColumnVector(longValues.length);
outV = new LongColumnVector(longValues.length);
for (int i = 0; i < longValues.length; i++) {
Timestamp randTimestamp = RandomTypeUtil.getRandTimestamp(r);
longValues[i] = TimestampWritableV2.getLong(randTimestamp);
inV.set(0, randTimestamp.toSqlTimestamp());
}
batch.cols[0] = inV;
batch.cols[1] = outV;
batch.size = longValues.length;
return batch;
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestVectorMathFunctions method getVectorizedRowBatchTimestampInStringOut.
public static VectorizedRowBatch getVectorizedRowBatchTimestampInStringOut(long[] epochSecondValues, int[] nanoValues) {
Random r = new Random(345);
VectorizedRowBatch batch = new VectorizedRowBatch(2);
batch.size = epochSecondValues.length;
TimestampColumnVector inV;
BytesColumnVector outV;
inV = new TimestampColumnVector(batch.size);
outV = new BytesColumnVector(batch.size);
for (int i = 0; i < batch.size; i++) {
Timestamp randTimestamp = RandomTypeUtil.getRandTimestamp(r);
epochSecondValues[i] = randTimestamp.toEpochSecond();
nanoValues[i] = randTimestamp.getNanos();
inV.set(i, randTimestamp.toSqlTimestamp());
}
batch.cols[0] = inV;
batch.cols[1] = outV;
return batch;
}
Aggregations