use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.
the class TestGenericUDFOPPlus method testDatePlusIntervalDayTime.
@Test
public void testDatePlusIntervalDayTime() throws Exception {
GenericUDFOPPlus udf = new GenericUDFOPPlus();
DateWritableV2 left = new DateWritableV2(Date.valueOf("2001-01-01"));
HiveIntervalDayTimeWritable right = new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableDateObjectInspector, PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
// Date + day-time interval = timestamp
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
TimestampWritableV2 res = (TimestampWritableV2) udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp());
}
use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.
the class TestGenericUDFOPPlus method testDatePlusIntervalYearMonth.
@Test
public void testDatePlusIntervalYearMonth() throws Exception {
GenericUDFOPPlus udf = new GenericUDFOPPlus();
DateWritableV2 left = new DateWritableV2(Date.valueOf("2001-06-15"));
HiveIntervalYearMonthWritable right = new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableDateObjectInspector, PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
DateWritableV2 res = (DateWritableV2) udf.evaluate(args);
Assert.assertEquals(Date.valueOf("2004-02-15"), res.get());
}
use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.
the class DynamicValueVectorExpression method initValue.
private void initValue() {
Object val = dynamicValue.getValue();
if (val == null) {
isNullValue = true;
} else {
PrimitiveObjectInspector poi = dynamicValue.getObjectInspector();
byte[] bytesVal;
switch(poi.getPrimitiveCategory()) {
case BOOLEAN:
case BYTE:
case SHORT:
case INT:
case LONG:
longValue = PrimitiveObjectInspectorUtils.getLong(val, poi);
break;
case FLOAT:
case DOUBLE:
doubleValue = PrimitiveObjectInspectorUtils.getDouble(val, poi);
break;
case STRING:
case CHAR:
case VARCHAR:
bytesVal = PrimitiveObjectInspectorUtils.getString(val, poi).getBytes();
setBytesValue(bytesVal);
break;
case BINARY:
bytesVal = PrimitiveObjectInspectorUtils.getBinary(val, poi).copyBytes();
setBytesValue(bytesVal);
break;
case DECIMAL:
decimalValue = PrimitiveObjectInspectorUtils.getHiveDecimal(val, poi);
break;
case DATE:
longValue = DateWritableV2.dateToDays(PrimitiveObjectInspectorUtils.getDate(val, poi));
case TIMESTAMP:
timestampValue = PrimitiveObjectInspectorUtils.getTimestamp(val, poi).toSqlTimestamp();
break;
case INTERVAL_YEAR_MONTH:
longValue = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(val, poi).getTotalMonths();
break;
case INTERVAL_DAY_TIME:
intervalDayTimeValue = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime(val, poi);
break;
default:
throw new IllegalStateException("Unsupported type " + poi.getPrimitiveCategory());
}
}
initialized = true;
}
use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.
the class TestOrcFile method createOrcDateFile.
/**
* Generate an ORC file with a range of dates and times.
*/
public void createOrcDateFile(Path file, int minYear, int maxYear) throws IOException {
List<OrcProto.Type> types = new ArrayList<OrcProto.Type>();
types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.STRUCT).addFieldNames("time").addFieldNames("date").addSubtypes(1).addSubtypes(2).build());
types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.TIMESTAMP).build());
types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.DATE).build());
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector = OrcStruct.createObjectInspector(0, types);
}
Writer writer = OrcFile.createWriter(file, OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).blockPadding(false).setProlepticGregorian(true));
OrcStruct row = new OrcStruct(2);
for (int year = minYear; year < maxYear; ++year) {
for (int ms = 1000; ms < 2000; ++ms) {
row.setFieldValue(0, new TimestampWritableV2(Timestamp.valueOf(year + "-05-05 12:34:56." + ms)));
row.setFieldValue(1, new DateWritableV2(Date.of(year - 1900, 11, 25)));
writer.addRow(row);
}
}
writer.close();
Reader reader = OrcFile.createReader(file, OrcFile.readerOptions(conf));
RecordReader rows = reader.rows();
for (int year = minYear; year < maxYear; ++year) {
for (int ms = 1000; ms < 2000; ++ms) {
row = (OrcStruct) rows.next(row);
assertEquals(new TimestampWritableV2(Timestamp.valueOf(year + "-05-05 12:34:56." + ms)), row.getFieldValue(0));
assertEquals(new DateWritableV2(Date.of(year - 1900, 11, 25)), row.getFieldValue(1));
}
}
rows.close();
}
use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.
the class TestGenericUDFMonthsBetween method runTestDt.
protected void runTestDt(String dt1, String dt2, Double expDiff, GenericUDFMonthsBetween udf) throws HiveException {
DateWritableV2 dtWr1 = dt1 == null ? null : new DateWritableV2(Date.valueOf(dt1));
DateWritableV2 dtWr2 = dt2 == null ? null : new DateWritableV2(Date.valueOf(dt2));
DeferredJavaObject valueObj1 = new DeferredJavaObject(dtWr1);
DeferredJavaObject valueObj2 = new DeferredJavaObject(dtWr2);
DeferredObject[] args = new DeferredObject[] { valueObj1, valueObj2 };
DoubleWritable output = (DoubleWritable) udf.evaluate(args);
if (expDiff == null) {
assertNull("months_between() test for NULL DATE failed", output);
} else {
assertNotNull("months_between() test for NOT NULL DATE failed", output);
assertEquals("months_between() test for DATE failed", expDiff, output.get(), 0.00000001D);
}
}
Aggregations