use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestParquetTimestampUtils method verifyInt64TimestampValue.
private void verifyInt64TimestampValue(String tsString, LogicalTypeAnnotation.TimeUnit timeUnit, boolean legal) {
Timestamp ts = truncateTimestampString(tsString, timeUnit);
String truncatedTsString = ts.toString();
Long int64Value = ParquetTimestampUtils.getInt64(ts, timeUnit);
if (legal) {
Timestamp tsFetched = ParquetTimestampUtils.getTimestamp(int64Value, timeUnit, false);
Assert.assertEquals(truncatedTsString, tsFetched.toString());
} else {
Assert.assertEquals(null, int64Value);
}
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestParquetTimestampUtils method testTimezoneless.
@Test
public void testTimezoneless() {
Timestamp ts1 = Timestamp.valueOf("2011-01-01 00:30:30.111111111");
NanoTime nt1 = NanoTimeUtils.getNanoTime(ts1, ZoneOffset.UTC, false);
Assert.assertEquals(nt1.getJulianDay(), 2455563);
Assert.assertEquals(nt1.getTimeOfDayNanos(), 1830111111111L);
Timestamp ts1Fetched = NanoTimeUtils.getTimestamp(nt1, ZoneOffset.UTC);
Assert.assertEquals(ts1Fetched.toString(), ts1.toString());
Timestamp ts2 = Timestamp.valueOf("2011-02-02 08:30:30.222222222");
NanoTime nt2 = NanoTimeUtils.getNanoTime(ts2, ZoneOffset.UTC, false);
Assert.assertEquals(nt2.getJulianDay(), 2455595);
Assert.assertEquals(nt2.getTimeOfDayNanos(), 30630222222222L);
Timestamp ts2Fetched = NanoTimeUtils.getTimestamp(nt2, ZoneOffset.UTC);
Assert.assertEquals(ts2Fetched.toString(), ts2.toString());
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestParquetTimestampUtils method verifyTsString.
private void verifyTsString(String tsString, boolean local) {
Timestamp ts = Timestamp.valueOf(tsString);
ZoneId sourceZone = local ? ZoneOffset.UTC : TimeZone.getDefault().toZoneId();
NanoTime nt = NanoTimeUtils.getNanoTime(ts, sourceZone, false);
ZoneId targetZone = local ? ZoneOffset.UTC : TimeZone.getDefault().toZoneId();
Timestamp tsFetched = NanoTimeUtils.getTimestamp(nt, targetZone);
Assert.assertEquals(tsString, tsFetched.toString());
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestETypeConverter method testGetBigIntConverter.
@Test
public void testGetBigIntConverter() {
Timestamp timestamp = Timestamp.valueOf("1998-10-03 09:58:31.231");
NanoTime nanoTime = NanoTimeUtils.getNanoTime(timestamp, ZoneOffset.UTC, false);
PrimitiveType primitiveType = Types.optional(PrimitiveTypeName.INT96).named("value");
Writable writable = getWritableFromBinaryConverter(createHiveTypeInfo("bigint"), primitiveType, nanoTime.toBinary());
// Retrieve as BigInt
LongWritable longWritable = (LongWritable) writable;
assertEquals(nanoTime.getTimeOfDayNanos(), longWritable.get());
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestParquetTimestampsHive2Compatibility method readHive2.
/**
* Converts the specified nano time to a java.sql.Timestamp using Hive2 legacy code.
*/
private static java.sql.Timestamp readHive2(NanoTime nt) {
// Current Hive parquet timestamp implementation stores it in UTC, but other components do not do that.
// If this file written by current Hive implementation itself, we need to do the reverse conversion, else skip the conversion.
int julianDay = nt.getJulianDay();
long nanosOfDay = nt.getTimeOfDayNanos();
long remainder = nanosOfDay;
julianDay += remainder / NANOS_PER_DAY;
remainder %= NANOS_PER_DAY;
if (remainder < 0) {
remainder += NANOS_PER_DAY;
julianDay--;
}
JulianDate jDateTime = JulianDate.of((double) julianDay);
Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone(ZoneId.of("GMT")));
calendar.set(Calendar.YEAR, jDateTime.toLocalDateTime().getYear());
// java calendar index starting at 1.
calendar.set(Calendar.MONTH, jDateTime.toLocalDateTime().getMonthValue() - 1);
calendar.set(Calendar.DAY_OF_MONTH, jDateTime.toLocalDateTime().getDayOfMonth());
int hour = (int) (remainder / (NANOS_PER_HOUR));
remainder = remainder % (NANOS_PER_HOUR);
int minutes = (int) (remainder / (NANOS_PER_MINUTE));
remainder = remainder % (NANOS_PER_MINUTE);
int seconds = (int) (remainder / (NANOS_PER_SECOND));
long nanos = remainder % NANOS_PER_SECOND;
calendar.set(Calendar.HOUR_OF_DAY, hour);
calendar.set(Calendar.MINUTE, minutes);
calendar.set(Calendar.SECOND, seconds);
java.sql.Timestamp ts = new java.sql.Timestamp(calendar.getTimeInMillis());
ts.setNanos((int) nanos);
return ts;
}
Aggregations