Search in sources :

Example 26 with Timestamp

use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.

the class VectorizedColumnReaderTestBase method timestampRead.

protected void timestampRead(boolean isDictionaryEncoding) throws InterruptedException, HiveException, IOException {
    conf.set(IOConstants.COLUMNS, "int96_field");
    conf.set(IOConstants.COLUMNS_TYPES, "timestamp");
    conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
    conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
    VectorizedParquetRecordReader reader = createTestParquetReader("message test { required " + "int96 int96_field;}", conf);
    VectorizedRowBatch previous = reader.createValue();
    try {
        int c = 0;
        while (reader.next(NullWritable.get(), previous)) {
            TimestampColumnVector vector = (TimestampColumnVector) previous.cols[0];
            assertTrue(vector.noNulls);
            for (int i = 0; i < vector.nanos.length; i++) {
                if (c == nElements) {
                    break;
                }
                Timestamp expected = new Timestamp();
                if (isDictionaryEncoding) {
                    expected.setTimeInMillis(c % UNIQUE_NUM);
                } else {
                    expected.setTimeInMillis(c);
                }
                assertEquals("Not the same time at " + c, expected.toEpochMilli(), vector.getTime(i));
                assertEquals("Not the same nano at " + c, expected.getNanos(), vector.getNanos(i));
                assertFalse(vector.isNull[i]);
                c++;
            }
        }
        assertEquals(nElements, c);
    } finally {
        reader.close();
    }
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) TimestampColumnVector(org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector) VectorizedParquetRecordReader(org.apache.hadoop.hive.ql.io.parquet.vector.VectorizedParquetRecordReader) Timestamp(org.apache.hadoop.hive.common.type.Timestamp)

Example 27 with Timestamp

use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.

the class VectorizedColumnReaderTestBase method getNanoTime.

protected static NanoTime getNanoTime(int index) {
    Timestamp ts = new Timestamp();
    ts.setTimeInMillis(index);
    return NanoTimeUtils.getNanoTime(ts, TimeZone.getDefault().toZoneId(), false);
}
Also used : Timestamp(org.apache.hadoop.hive.common.type.Timestamp)

Example 28 with Timestamp

use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.

the class VectorizedColumnReaderTestBase method stringReadTimestamp.

protected void stringReadTimestamp(boolean isDictionaryEncoding) throws InterruptedException, HiveException, IOException {
    conf.set(IOConstants.COLUMNS, "int96_field");
    conf.set(IOConstants.COLUMNS_TYPES, "string");
    conf.setBoolean(ColumnProjectionUtils.READ_ALL_COLUMNS, false);
    conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, "0");
    VectorizedParquetRecordReader reader = createTestParquetReader("message test { required " + "int96 int96_field;}", conf);
    VectorizedRowBatch previous = reader.createValue();
    try {
        int c = 0;
        while (reader.next(NullWritable.get(), previous)) {
            BytesColumnVector vector = (BytesColumnVector) previous.cols[0];
            assertTrue(vector.noNulls);
            for (int i = 0; i < vector.vector.length; i++) {
                if (c == nElements) {
                    break;
                }
                Timestamp expected = new Timestamp();
                if (isDictionaryEncoding) {
                    expected.setTimeInMillis(c % UNIQUE_NUM);
                } else {
                    expected.setTimeInMillis(c);
                }
                ;
                String actual = new String(Arrays.copyOfRange(vector.vector[i], vector.start[i], vector.start[i] + vector.length[i]));
                assertEquals("Not the same time at " + c, expected.toString(), actual);
                assertFalse(vector.isNull[i]);
                c++;
            }
        }
        assertEquals(nElements, c);
    } finally {
        reader.close();
    }
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) VectorizedParquetRecordReader(org.apache.hadoop.hive.ql.io.parquet.vector.VectorizedParquetRecordReader) BytesColumnVector(org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector) Timestamp(org.apache.hadoop.hive.common.type.Timestamp)

Example 29 with Timestamp

use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.

the class TestETypeConverter method testGetTimestampConverter.

@Test
public void testGetTimestampConverter() throws Exception {
    Timestamp timestamp = Timestamp.valueOf("2018-06-15 15:12:20.0");
    NanoTime nanoTime = NanoTimeUtils.getNanoTime(timestamp, ZoneOffset.UTC, false);
    PrimitiveType primitiveType = Types.optional(PrimitiveTypeName.INT96).named("value");
    Writable writable = getWritableFromBinaryConverter(null, primitiveType, nanoTime.toBinary());
    TimestampWritableV2 timestampWritable = (TimestampWritableV2) writable;
    assertEquals(timestamp.getNanos(), timestampWritable.getNanos());
}
Also used : NanoTime(org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime) Writable(org.apache.hadoop.io.Writable) DoubleWritable(org.apache.hadoop.io.DoubleWritable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) PrimitiveType(org.apache.parquet.schema.PrimitiveType) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) Test(org.junit.Test)

Example 30 with Timestamp

use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.

the class TestETypeConverter method testGetInt64NanosAdjustedToUTCTimestampConverter.

@Test
public void testGetInt64NanosAdjustedToUTCTimestampConverter() throws Exception {
    ZoneId zone = ZoneId.systemDefault();
    Timestamp timestamp = Timestamp.valueOf("2018-07-15 15:12:20.11223344");
    PrimitiveType primitiveType = createInt64TimestampType(true, TimeUnit.NANOS);
    long time = timestamp.toEpochSecond() * 1000000000 + timestamp.getNanos();
    Writable writable = getWritableFromPrimitiveConverter(null, primitiveType, time);
    TimestampWritableV2 timestampWritable = (TimestampWritableV2) writable;
    timestamp = Timestamp.ofEpochSecond(timestamp.toEpochSecond(), timestamp.getNanos(), zone);
    assertEquals(timestamp.toEpochMilli(), timestampWritable.getTimestamp().toEpochMilli());
    assertEquals(timestamp.getNanos(), timestampWritable.getNanos());
}
Also used : ZoneId(java.time.ZoneId) Writable(org.apache.hadoop.io.Writable) DoubleWritable(org.apache.hadoop.io.DoubleWritable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) PrimitiveType(org.apache.parquet.schema.PrimitiveType) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) Test(org.junit.Test)

Aggregations

Timestamp (org.apache.hadoop.hive.common.type.Timestamp)116 Test (org.junit.Test)36 TimestampWritableV2 (org.apache.hadoop.hive.serde2.io.TimestampWritableV2)32 Date (org.apache.hadoop.hive.common.type.Date)27 BytesWritable (org.apache.hadoop.io.BytesWritable)25 LongWritable (org.apache.hadoop.io.LongWritable)25 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)24 Text (org.apache.hadoop.io.Text)22 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)21 IntWritable (org.apache.hadoop.io.IntWritable)21 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)20 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)20 BooleanWritable (org.apache.hadoop.io.BooleanWritable)19 FloatWritable (org.apache.hadoop.io.FloatWritable)19 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)18 ArrayList (java.util.ArrayList)17 DateWritableV2 (org.apache.hadoop.hive.serde2.io.DateWritableV2)17 HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)16 List (java.util.List)15 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)12