use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo in project hive by apache.
the class TestVectorHashKeyWrapperBatch method testVectorHashKeyWrapperBatch.
// Specific test for HIVE-18744 --
// Tests Timestamp assignment.
@Test
public void testVectorHashKeyWrapperBatch() throws HiveException {
VectorExpression[] keyExpressions = new VectorExpression[] { new IdentityExpression(0) };
TypeInfo[] typeInfos = new TypeInfo[] { TypeInfoFactory.timestampTypeInfo };
VectorHashKeyWrapperBatch vhkwb = VectorHashKeyWrapperBatch.compileKeyWrapperBatch(keyExpressions, typeInfos);
VectorizedRowBatch batch = new VectorizedRowBatch(1);
batch.selectedInUse = false;
batch.size = 10;
TimestampColumnVector timestampColVector = new TimestampColumnVector(batch.DEFAULT_SIZE);
;
batch.cols[0] = timestampColVector;
timestampColVector.reset();
// Cause Timestamp object to be replaced (in buggy code) with ZERO_TIMESTAMP.
timestampColVector.noNulls = false;
timestampColVector.isNull[0] = true;
Timestamp scratch = new Timestamp(2039);
Timestamp ts0 = new Timestamp(2039);
scratch.setTime(ts0.getTime());
scratch.setNanos(ts0.getNanos());
timestampColVector.set(1, scratch);
Timestamp ts1 = new Timestamp(33222);
scratch.setTime(ts1.getTime());
scratch.setNanos(ts1.getNanos());
timestampColVector.set(2, scratch);
batch.size = 3;
vhkwb.evaluateBatch(batch);
VectorHashKeyWrapper[] vhkwArray = vhkwb.getVectorHashKeyWrappers();
VectorHashKeyWrapper vhk = vhkwArray[0];
assertTrue(vhk.isNull(0));
vhk = vhkwArray[1];
assertFalse(vhk.isNull(0));
assertEquals(vhk.getTimestamp(0), ts0);
vhk = vhkwArray[2];
assertFalse(vhk.isNull(0));
assertEquals(vhk.getTimestamp(0), ts1);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo in project hive by apache.
the class TestVectorGenericDateExpressions method testDateAddScalarCol.
@Test
public void testDateAddScalarCol() throws HiveException {
for (PrimitiveCategory scalarType1 : dateTimestampStringTypes) testDateAddScalarCol(scalarType1, true);
VectorExpression udf = new VectorUDFDateAddScalarCol("error".getBytes(utf8), 0, 1);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo });
udf.transientInit();
VectorizedRowBatch batch = new VectorizedRowBatch(2, 1);
batch.cols[0] = new LongColumnVector(1);
batch.cols[1] = new LongColumnVector(1);
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0], true);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo in project hive by apache.
the class TestVectorGenericDateExpressions method testDateDiffScalarCol.
@Test
public void testDateDiffScalarCol() throws HiveException {
for (PrimitiveCategory scalarType1 : dateTimestampStringTypes) {
for (PrimitiveCategory colType2 : dateTimestampStringTypes) {
LongColumnVector date2 = newRandomLongColumnVector(10000, size);
LongColumnVector output = new LongColumnVector(size);
ColumnVector col2 = castTo(date2, colType2);
VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
batch.cols[0] = col2;
batch.cols[1] = output;
long scalar1 = newRandom(1000);
validateDateDiff(batch, scalar1, scalarType1, colType2, date2);
TestVectorizedRowBatch.addRandomNulls(date2);
batch.cols[0] = castTo(date2, colType2);
validateDateDiff(batch, scalar1, scalarType1, colType2, date2);
}
}
VectorExpression udf;
byte[] bytes = "error".getBytes(utf8);
VectorizedRowBatch batch = new VectorizedRowBatch(2, 1);
udf = new VectorUDFDateDiffScalarCol(new Timestamp(0), 0, 1);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.timestampTypeInfo, TypeInfoFactory.stringTypeInfo });
udf.transientInit();
batch.cols[0] = new BytesColumnVector(1);
batch.cols[1] = new LongColumnVector(1);
BytesColumnVector bcv = (BytesColumnVector) batch.cols[0];
bcv.vector[0] = bytes;
bcv.start[0] = 0;
bcv.length[0] = bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0], true);
udf = new VectorUDFDateDiffScalarCol(bytes, 0, 1);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo });
udf.transientInit();
batch.cols[0] = new LongColumnVector(1);
batch.cols[1] = new LongColumnVector(1);
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0], true);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo in project hive by apache.
the class TestETypeConverter method testTimestampInt96ConverterGMT.
@Test
public void testTimestampInt96ConverterGMT() {
PrimitiveConverter converter;
parent.metadata.put(ParquetTableUtils.PARQUET_INT96_WRITE_ZONE_PROPERTY, "GMT");
converter = getETypeConverter(parent, PrimitiveTypeName.INT96, TypeInfoFactory.timestampTypeInfo);
converter.addBinary(NanoTimeUtils.getNanoTime(ts, Calendar.getInstance(TimeZone.getTimeZone("GMT"))).toBinary());
parent.assertWritableValue(new TimestampWritable(ts));
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo in project hive by apache.
the class TestETypeConverter method testTimestampInt96ConverterLocal.
@Test
public void testTimestampInt96ConverterLocal() {
PrimitiveConverter converter;
// Default timezone should be Localtime
converter = getETypeConverter(parent, PrimitiveTypeName.INT96, TypeInfoFactory.timestampTypeInfo);
converter.addBinary(NanoTimeUtils.getNanoTime(ts, Calendar.getInstance()).toBinary());
parent.assertWritableValue(new TimestampWritable(ts));
}
Aggregations