use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class VectorizedColumnReaderTestBase method timestampRead.
protected void timestampRead(boolean isDictionaryEncoding) throws InterruptedException, HiveException, IOException {
conf.set(IOConstants.COLUMNS, "int96_field");
conf.set(IOConstants.COLUMNS_TYPES, "timestamp");
conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
VectorizedParquetRecordReader reader = createTestParquetReader("message test { required " + "int96 int96_field;}", conf);
VectorizedRowBatch previous = reader.createValue();
try {
int c = 0;
while (reader.next(NullWritable.get(), previous)) {
TimestampColumnVector vector = (TimestampColumnVector) previous.cols[0];
assertTrue(vector.noNulls);
for (int i = 0; i < vector.nanos.length; i++) {
if (c == nElements) {
break;
}
Timestamp expected = new Timestamp();
if (isDictionaryEncoding) {
expected.setTimeInMillis(c % UNIQUE_NUM);
} else {
expected.setTimeInMillis(c);
}
assertEquals("Not the same time at " + c, expected.toEpochMilli(), vector.getTime(i));
assertEquals("Not the same nano at " + c, expected.getNanos(), vector.getNanos(i));
assertFalse(vector.isNull[i]);
c++;
}
}
assertEquals(nElements, c);
} finally {
reader.close();
}
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class VectorizedColumnReaderTestBase method getNanoTime.
protected static NanoTime getNanoTime(int index) {
Timestamp ts = new Timestamp();
ts.setTimeInMillis(index);
return NanoTimeUtils.getNanoTime(ts, TimeZone.getDefault().toZoneId(), false);
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class VectorizedColumnReaderTestBase method stringReadTimestamp.
protected void stringReadTimestamp(boolean isDictionaryEncoding) throws InterruptedException, HiveException, IOException {
conf.set(IOConstants.COLUMNS, "int96_field");
conf.set(IOConstants.COLUMNS_TYPES, "string");
conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
VectorizedParquetRecordReader reader = createTestParquetReader("message test { required " + "int96 int96_field;}", conf);
VectorizedRowBatch previous = reader.createValue();
try {
int c = 0;
while (reader.next(NullWritable.get(), previous)) {
BytesColumnVector vector = (BytesColumnVector) previous.cols[0];
assertTrue(vector.noNulls);
for (int i = 0; i < vector.vector.length; i++) {
if (c == nElements) {
break;
}
Timestamp expected = new Timestamp();
if (isDictionaryEncoding) {
expected.setTimeInMillis(c % UNIQUE_NUM);
} else {
expected.setTimeInMillis(c);
}
;
String actual = new String(Arrays.copyOfRange(vector.vector[i], vector.start[i], vector.start[i] + vector.length[i]));
assertEquals("Not the same time at " + c, expected.toString(), actual);
assertFalse(vector.isNull[i]);
c++;
}
}
assertEquals(nElements, c);
} finally {
reader.close();
}
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestETypeConverter method testGetTimestampConverter.
@Test
public void testGetTimestampConverter() throws Exception {
Timestamp timestamp = Timestamp.valueOf("2018-06-15 15:12:20.0");
NanoTime nanoTime = NanoTimeUtils.getNanoTime(timestamp, ZoneOffset.UTC, false);
PrimitiveType primitiveType = Types.optional(PrimitiveTypeName.INT96).named("value");
Writable writable = getWritableFromBinaryConverter(null, primitiveType, nanoTime.toBinary());
TimestampWritableV2 timestampWritable = (TimestampWritableV2) writable;
assertEquals(timestamp.getNanos(), timestampWritable.getNanos());
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestETypeConverter method testGetInt64NanosAdjustedToUTCTimestampConverter.
@Test
public void testGetInt64NanosAdjustedToUTCTimestampConverter() throws Exception {
ZoneId zone = ZoneId.systemDefault();
Timestamp timestamp = Timestamp.valueOf("2018-07-15 15:12:20.11223344");
PrimitiveType primitiveType = createInt64TimestampType(true, TimeUnit.NANOS);
long time = timestamp.toEpochSecond() * 1000000000 + timestamp.getNanos();
Writable writable = getWritableFromPrimitiveConverter(null, primitiveType, time);
TimestampWritableV2 timestampWritable = (TimestampWritableV2) writable;
timestamp = Timestamp.ofEpochSecond(timestamp.toEpochSecond(), timestamp.getNanos(), zone);
assertEquals(timestamp.toEpochMilli(), timestampWritable.getTimestamp().toEpochMilli());
assertEquals(timestamp.getNanos(), timestampWritable.getNanos());
}
Aggregations