use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestParquetTimestampUtils method testTimezone.
@Test
public void testTimezone() {
Calendar cal = Calendar.getInstance();
cal.set(Calendar.YEAR, 1968);
cal.set(Calendar.MONTH, Calendar.MAY);
cal.set(Calendar.DAY_OF_MONTH, 23);
cal.set(Calendar.HOUR_OF_DAY, 17);
cal.set(Calendar.MINUTE, 1);
cal.set(Calendar.SECOND, 1);
cal.setTimeZone(TimeZone.getTimeZone("US/Pacific"));
Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 1);
ts = TimestampTZUtil.convertTimestampToZone(ts, GMT, US_PACIFIC);
/**
* 17:00 PDT = 00:00 GMT (daylight-savings)
* (0*60*60 + 1*60 + 1)*10e9 + 1 = 61000000001, or
*
* 17:00 PST = 01:00 GMT (if not daylight savings)
* (1*60*60 + 1*60 + 1)*10e9 + 1 = 3661000000001
*/
NanoTime nt = NanoTimeUtils.getNanoTime(ts, US_PACIFIC, false);
long timeOfDayNanos = nt.getTimeOfDayNanos();
Assert.assertTrue(timeOfDayNanos == 61000000001L || timeOfDayNanos == 3661000000001L);
// in both cases, this will be the next day in GMT
Assert.assertEquals(nt.getJulianDay(), 2440001);
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestParquetTimestampUtils method testInt64ExactValue.
// /////////// INT64/LogicalTypes Timestamp tests /////////////
@Test
public void testInt64ExactValue() {
// basic value
Timestamp ts1 = Timestamp.valueOf("2011-01-01 00:30:00.000000001");
long millis = ParquetTimestampUtils.getInt64(ts1, LogicalTypeAnnotation.TimeUnit.MILLIS);
Assert.assertEquals(1293841800_000L, millis);
long micros = ParquetTimestampUtils.getInt64(ts1, LogicalTypeAnnotation.TimeUnit.MICROS);
Assert.assertEquals(1293841800000_000L, micros);
long nanos = ParquetTimestampUtils.getInt64(ts1, LogicalTypeAnnotation.TimeUnit.NANOS);
Assert.assertEquals(1293841800000_000_001L, nanos);
// test correct conversion to time units
Timestamp ts2 = Timestamp.valueOf("2011-01-01 00:30:00.55555");
millis = ParquetTimestampUtils.getInt64(ts2, LogicalTypeAnnotation.TimeUnit.MILLIS);
Assert.assertEquals(1293841800_555L, millis);
micros = ParquetTimestampUtils.getInt64(ts2, LogicalTypeAnnotation.TimeUnit.MICROS);
Assert.assertEquals(1293841800_555_550L, micros);
nanos = ParquetTimestampUtils.getInt64(ts2, LogicalTypeAnnotation.TimeUnit.NANOS);
Assert.assertEquals(1293841800_555_550_000L, nanos);
// test max nanos
Timestamp ts3 = Timestamp.valueOf("2018-12-31 23:59:59.999999999");
millis = ParquetTimestampUtils.getInt64(ts3, LogicalTypeAnnotation.TimeUnit.MILLIS);
Assert.assertEquals(1546300799_999L, millis);
micros = ParquetTimestampUtils.getInt64(ts3, LogicalTypeAnnotation.TimeUnit.MICROS);
Assert.assertEquals(1546300799_999_999L, micros);
nanos = ParquetTimestampUtils.getInt64(ts3, LogicalTypeAnnotation.TimeUnit.NANOS);
Assert.assertEquals(1546300799_999_999_999L, nanos);
// test pre-epoch date
Timestamp ts4 = Timestamp.valueOf("1968-01-31 00:30:00.000000001");
millis = ParquetTimestampUtils.getInt64(ts4, LogicalTypeAnnotation.TimeUnit.MILLIS);
Assert.assertEquals(-60564600_000L, millis);
micros = ParquetTimestampUtils.getInt64(ts4, LogicalTypeAnnotation.TimeUnit.MICROS);
Assert.assertEquals(-60564600000_000L, micros);
nanos = ParquetTimestampUtils.getInt64(ts4, LogicalTypeAnnotation.TimeUnit.NANOS);
Assert.assertEquals(-60564599999_999_999L, nanos);
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestParquetTimestampUtils method testNanos.
@Test
public void testNanos() {
// case 1: 01:01:01.0000000001
Calendar cal = Calendar.getInstance();
cal.set(Calendar.YEAR, 1968);
cal.set(Calendar.MONTH, Calendar.MAY);
cal.set(Calendar.DAY_OF_MONTH, 23);
cal.set(Calendar.HOUR_OF_DAY, 1);
cal.set(Calendar.MINUTE, 1);
cal.set(Calendar.SECOND, 1);
cal.setTimeZone(TimeZone.getTimeZone("GMT"));
Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 1);
// (1*60*60 + 1*60 + 1) * 10e9 + 1
NanoTime nt = NanoTimeUtils.getNanoTime(ts, GMT, false);
Assert.assertEquals(nt.getTimeOfDayNanos(), 3661000000001L);
// case 2: 23:59:59.999999999
cal = Calendar.getInstance();
cal.set(Calendar.YEAR, 1968);
cal.set(Calendar.MONTH, Calendar.MAY);
cal.set(Calendar.DAY_OF_MONTH, 23);
cal.set(Calendar.HOUR_OF_DAY, 23);
cal.set(Calendar.MINUTE, 59);
cal.set(Calendar.SECOND, 59);
cal.setTimeZone(TimeZone.getTimeZone("GMT"));
ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 999999999);
// (23*60*60 + 59*60 + 59)*10e9 + 999999999
nt = NanoTimeUtils.getNanoTime(ts, GMT, false);
Assert.assertEquals(nt.getTimeOfDayNanos(), 86399999999999L);
// case 3: verify the difference.
Calendar cal2 = Calendar.getInstance();
cal2.set(Calendar.YEAR, 1968);
cal2.set(Calendar.MONTH, Calendar.MAY);
cal2.set(Calendar.DAY_OF_MONTH, 23);
cal2.set(Calendar.HOUR_OF_DAY, 0);
cal2.set(Calendar.MINUTE, 10);
cal2.set(Calendar.SECOND, 0);
cal2.setTimeZone(TimeZone.getTimeZone("GMT"));
Timestamp ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis(), 10);
Calendar cal1 = Calendar.getInstance();
cal1.set(Calendar.YEAR, 1968);
cal1.set(Calendar.MONTH, Calendar.MAY);
cal1.set(Calendar.DAY_OF_MONTH, 23);
cal1.set(Calendar.HOUR_OF_DAY, 0);
cal1.set(Calendar.MINUTE, 0);
cal1.set(Calendar.SECOND, 0);
cal1.setTimeZone(TimeZone.getTimeZone("GMT"));
Timestamp ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis(), 1);
NanoTime n2 = NanoTimeUtils.getNanoTime(ts2, GMT, false);
NanoTime n1 = NanoTimeUtils.getNanoTime(ts1, GMT, false);
Assert.assertEquals(n2.getTimeOfDayNanos() - n1.getTimeOfDayNanos(), 600000000009L);
NanoTime n3 = new NanoTime(n1.getJulianDay() - 1, n1.getTimeOfDayNanos() + TimeUnit.DAYS.toNanos(1));
Assert.assertEquals(ts1, NanoTimeUtils.getTimestamp(n3, GMT, false));
n3 = new NanoTime(n1.getJulianDay() + 3, n1.getTimeOfDayNanos() - TimeUnit.DAYS.toNanos(3));
Assert.assertEquals(ts1, NanoTimeUtils.getTimestamp(n3, GMT, false));
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestETypeConverter method testGetInt64MicrosTimestampConverter.
@Test
public void testGetInt64MicrosTimestampConverter() throws Exception {
Timestamp timestamp = Timestamp.valueOf("2018-07-15 15:12:20.112233");
PrimitiveType primitiveType = createInt64TimestampType(false, TimeUnit.MICROS);
long time = timestamp.toEpochSecond() * 1000000 + timestamp.getNanos() / 1000;
Writable writable = getWritableFromPrimitiveConverter(null, primitiveType, time);
TimestampWritableV2 timestampWritable = (TimestampWritableV2) writable;
assertEquals(timestamp.toEpochMilli(), timestampWritable.getTimestamp().toEpochMilli());
assertEquals(timestamp.getNanos(), timestampWritable.getNanos());
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestETypeConverter method testGetTimestampProlepticConverter.
@Test
public void testGetTimestampProlepticConverter() throws Exception {
Timestamp timestamp = Timestamp.valueOf("1572-06-15 15:12:20.0");
NanoTime nanoTime = NanoTimeUtils.getNanoTime(timestamp, ZoneOffset.UTC, false);
PrimitiveType primitiveType = Types.optional(PrimitiveTypeName.INT96).named("value");
Writable writable = getWritableFromBinaryConverter(null, primitiveType, nanoTime.toBinary());
TimestampWritableV2 timestampWritable = (TimestampWritableV2) writable;
assertEquals(timestamp.getNanos(), timestampWritable.getNanos());
}
Aggregations