Search in sources :

Example 11 with TypeInfoFactory.timestampTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo in project hive by apache.

the class TestVectorHashKeyWrapperBatch method testVectorHashKeyWrapperBatch.

// Specific test for HIVE-18744 --
// Tests Timestamp assignment.
@Test
public void testVectorHashKeyWrapperBatch() throws HiveException {
    VectorExpression[] keyExpressions = new VectorExpression[] { new IdentityExpression(0) };
    TypeInfo[] typeInfos = new TypeInfo[] { TypeInfoFactory.timestampTypeInfo };
    VectorHashKeyWrapperBatch vhkwb = VectorHashKeyWrapperBatch.compileKeyWrapperBatch(keyExpressions, typeInfos);
    VectorizedRowBatch batch = new VectorizedRowBatch(1);
    batch.selectedInUse = false;
    batch.size = 10;
    TimestampColumnVector timestampColVector = new TimestampColumnVector(batch.DEFAULT_SIZE);
    ;
    batch.cols[0] = timestampColVector;
    timestampColVector.reset();
    // Cause Timestamp object to be replaced (in buggy code) with ZERO_TIMESTAMP.
    timestampColVector.noNulls = false;
    timestampColVector.isNull[0] = true;
    Timestamp scratch = new Timestamp(2039);
    Timestamp ts0 = new Timestamp(2039);
    scratch.setTime(ts0.getTime());
    scratch.setNanos(ts0.getNanos());
    timestampColVector.set(1, scratch);
    Timestamp ts1 = new Timestamp(33222);
    scratch.setTime(ts1.getTime());
    scratch.setNanos(ts1.getNanos());
    timestampColVector.set(2, scratch);
    batch.size = 3;
    vhkwb.evaluateBatch(batch);
    VectorHashKeyWrapper[] vhkwArray = vhkwb.getVectorHashKeyWrappers();
    VectorHashKeyWrapper vhk = vhkwArray[0];
    assertTrue(vhk.isNull(0));
    vhk = vhkwArray[1];
    assertFalse(vhk.isNull(0));
    assertEquals(vhk.getTimestamp(0), ts0);
    vhk = vhkwArray[2];
    assertFalse(vhk.isNull(0));
    assertEquals(vhk.getTimestamp(0), ts1);
}
Also used : VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) IdentityExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.IdentityExpression) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) Timestamp(java.sql.Timestamp) Test(org.junit.Test)

Example 12 with TypeInfoFactory.timestampTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo in project hive by apache.

the class TestVectorGenericDateExpressions method testDateAddScalarCol.

@Test
public void testDateAddScalarCol() throws HiveException {
    for (PrimitiveCategory scalarType1 : dateTimestampStringTypes) testDateAddScalarCol(scalarType1, true);
    VectorExpression udf = new VectorUDFDateAddScalarCol("error".getBytes(utf8), 0, 1);
    udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo });
    udf.transientInit();
    VectorizedRowBatch batch = new VectorizedRowBatch(2, 1);
    batch.cols[0] = new LongColumnVector(1);
    batch.cols[1] = new LongColumnVector(1);
    udf.evaluate(batch);
    Assert.assertEquals(batch.cols[1].isNull[0], true);
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) TestVectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector) Test(org.junit.Test)

Example 13 with TypeInfoFactory.timestampTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo in project hive by apache.

the class TestVectorGenericDateExpressions method testDateDiffScalarCol.

@Test
public void testDateDiffScalarCol() throws HiveException {
    for (PrimitiveCategory scalarType1 : dateTimestampStringTypes) {
        for (PrimitiveCategory colType2 : dateTimestampStringTypes) {
            LongColumnVector date2 = newRandomLongColumnVector(10000, size);
            LongColumnVector output = new LongColumnVector(size);
            ColumnVector col2 = castTo(date2, colType2);
            VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
            batch.cols[0] = col2;
            batch.cols[1] = output;
            long scalar1 = newRandom(1000);
            validateDateDiff(batch, scalar1, scalarType1, colType2, date2);
            TestVectorizedRowBatch.addRandomNulls(date2);
            batch.cols[0] = castTo(date2, colType2);
            validateDateDiff(batch, scalar1, scalarType1, colType2, date2);
        }
    }
    VectorExpression udf;
    byte[] bytes = "error".getBytes(utf8);
    VectorizedRowBatch batch = new VectorizedRowBatch(2, 1);
    udf = new VectorUDFDateDiffScalarCol(new Timestamp(0), 0, 1);
    udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.timestampTypeInfo, TypeInfoFactory.stringTypeInfo });
    udf.transientInit();
    batch.cols[0] = new BytesColumnVector(1);
    batch.cols[1] = new LongColumnVector(1);
    BytesColumnVector bcv = (BytesColumnVector) batch.cols[0];
    bcv.vector[0] = bytes;
    bcv.start[0] = 0;
    bcv.length[0] = bytes.length;
    udf.evaluate(batch);
    Assert.assertEquals(batch.cols[1].isNull[0], true);
    udf = new VectorUDFDateDiffScalarCol(bytes, 0, 1);
    udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo });
    udf.transientInit();
    batch.cols[0] = new LongColumnVector(1);
    batch.cols[1] = new LongColumnVector(1);
    udf.evaluate(batch);
    Assert.assertEquals(batch.cols[1].isNull[0], true);
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) TestVectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch) BytesColumnVector(org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Timestamp(java.sql.Timestamp) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector) TimestampColumnVector(org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector) BytesColumnVector(org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector) ColumnVector(org.apache.hadoop.hive.ql.exec.vector.ColumnVector) Test(org.junit.Test)

Example 14 with TypeInfoFactory.timestampTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo in project hive by apache.

the class TestETypeConverter method testTimestampInt96ConverterGMT.

@Test
public void testTimestampInt96ConverterGMT() {
    PrimitiveConverter converter;
    parent.metadata.put(ParquetTableUtils.PARQUET_INT96_WRITE_ZONE_PROPERTY, "GMT");
    converter = getETypeConverter(parent, PrimitiveTypeName.INT96, TypeInfoFactory.timestampTypeInfo);
    converter.addBinary(NanoTimeUtils.getNanoTime(ts, Calendar.getInstance(TimeZone.getTimeZone("GMT"))).toBinary());
    parent.assertWritableValue(new TimestampWritable(ts));
}
Also used : PrimitiveConverter(org.apache.parquet.io.api.PrimitiveConverter) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) Test(org.junit.Test)

Example 15 with TypeInfoFactory.timestampTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo in project hive by apache.

the class TestETypeConverter method testTimestampInt96ConverterLocal.

@Test
public void testTimestampInt96ConverterLocal() {
    PrimitiveConverter converter;
    // Default timezone should be Localtime
    converter = getETypeConverter(parent, PrimitiveTypeName.INT96, TypeInfoFactory.timestampTypeInfo);
    converter.addBinary(NanoTimeUtils.getNanoTime(ts, Calendar.getInstance()).toBinary());
    parent.assertWritableValue(new TimestampWritable(ts));
}
Also used : PrimitiveConverter(org.apache.parquet.io.api.PrimitiveConverter) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) Test(org.junit.Test)

Aggregations

Test (org.junit.Test)24 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)13 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)12 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)11 LongColumnVector (org.apache.hadoop.hive.ql.exec.vector.LongColumnVector)9 TestVectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch)9 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)9 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)9 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)9 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)9 BytesColumnVector (org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector)7 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)7 HiveIntervalDayTimeWritable (org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable)6 PrimitiveConverter (org.apache.parquet.io.api.PrimitiveConverter)4 Timestamp (java.sql.Timestamp)3 ArrayList (java.util.ArrayList)3 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)3 HiveIntervalYearMonthWritable (org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable)3 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)3 List (java.util.List)2