use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestVectorDateExpressions method compareToUDFWeekOfYearDate.
private void compareToUDFWeekOfYearDate(long t, int y) {
UDFWeekOfYear udf = new UDFWeekOfYear();
TimestampWritable tsw = toTimestampWritable(t);
IntWritable res = udf.evaluate(tsw);
Assert.assertEquals(res.get(), y);
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestVectorExpressionWriters method testWriterTimestamp.
private void testWriterTimestamp(TypeInfo type) throws HiveException {
Timestamp[] timestampValues = new Timestamp[vectorSize];
TimestampColumnVector tcv = VectorizedRowGroupGenUtil.generateTimestampColumnVector(true, false, vectorSize, new Random(10), timestampValues);
tcv.isNull[3] = true;
VectorExpressionWriter vew = getWriter(type);
for (int i = 0; i < vectorSize; i++) {
Writable w = (Writable) vew.writeValue(tcv, i);
if (w != null) {
Writable expected = getWritableValue(type, timestampValues[i]);
TimestampWritable t1 = (TimestampWritable) expected;
TimestampWritable t2 = (TimestampWritable) w;
Assert.assertTrue(t1.equals(t2));
} else {
Assert.assertTrue(tcv.isNull[i]);
}
}
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestVectorTimestampExpressions method compareToUDFYearLong.
private void compareToUDFYearLong(Timestamp t, int y) {
UDFYear udf = new UDFYear();
TimestampWritable tsw = new TimestampWritable(t);
IntWritable res = udf.evaluate(tsw);
if (res.get() != y) {
System.out.printf("%d vs %d for %s, %d\n", res.get(), y, t.toString(), tsw.getTimestamp().getTime() / 1000);
}
Assert.assertEquals(res.get(), y);
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestVectorizedORCReader method checkVectorizedReader.
private void checkVectorizedReader() throws Exception {
Reader vreader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf));
Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf));
RecordReaderImpl vrr = (RecordReaderImpl) vreader.rows();
RecordReaderImpl rr = (RecordReaderImpl) reader.rows();
VectorizedRowBatch batch = reader.getSchema().createRowBatch();
OrcStruct row = null;
// Check Vectorized ORC reader against ORC row reader
while (vrr.nextBatch(batch)) {
for (int i = 0; i < batch.size; i++) {
row = (OrcStruct) rr.next(row);
for (int j = 0; j < batch.cols.length; j++) {
Object a = (row.getFieldValue(j));
ColumnVector cv = batch.cols[j];
// if the value is repeating, use row 0
int rowId = cv.isRepeating ? 0 : i;
// make sure the null flag agrees
if (a == null) {
Assert.assertEquals(true, !cv.noNulls && cv.isNull[rowId]);
} else if (a instanceof BooleanWritable) {
// Boolean values are stores a 1's and 0's, so convert and compare
Long temp = (long) (((BooleanWritable) a).get() ? 1 : 0);
long b = ((LongColumnVector) cv).vector[rowId];
Assert.assertEquals(temp.toString(), Long.toString(b));
} else if (a instanceof TimestampWritable) {
// Timestamps are stored as long, so convert and compare
TimestampWritable t = ((TimestampWritable) a);
TimestampColumnVector tcv = ((TimestampColumnVector) cv);
Assert.assertEquals(t.getTimestamp(), tcv.asScratchTimestamp(rowId));
} else if (a instanceof DateWritable) {
// Dates are stored as long, so convert and compare
DateWritable adt = (DateWritable) a;
long b = ((LongColumnVector) cv).vector[rowId];
Assert.assertEquals(adt.get().getTime(), DateWritable.daysToMillis((int) b));
} else if (a instanceof HiveDecimalWritable) {
// Decimals are stored as BigInteger, so convert and compare
HiveDecimalWritable dec = (HiveDecimalWritable) a;
HiveDecimalWritable b = ((DecimalColumnVector) cv).vector[i];
Assert.assertEquals(dec, b);
} else if (a instanceof DoubleWritable) {
double b = ((DoubleColumnVector) cv).vector[rowId];
assertEquals(a.toString(), Double.toString(b));
} else if (a instanceof Text) {
BytesColumnVector bcv = (BytesColumnVector) cv;
Text b = new Text();
b.set(bcv.vector[rowId], bcv.start[rowId], bcv.length[rowId]);
assertEquals(a, b);
} else if (a instanceof IntWritable || a instanceof LongWritable || a instanceof ByteWritable || a instanceof ShortWritable) {
assertEquals(a.toString(), Long.toString(((LongColumnVector) cv).vector[rowId]));
} else {
assertEquals("huh", a.getClass().getName());
}
}
}
// Check repeating
Assert.assertEquals(false, batch.cols[0].isRepeating);
Assert.assertEquals(false, batch.cols[1].isRepeating);
Assert.assertEquals(false, batch.cols[2].isRepeating);
Assert.assertEquals(true, batch.cols[3].isRepeating);
Assert.assertEquals(false, batch.cols[4].isRepeating);
Assert.assertEquals(false, batch.cols[5].isRepeating);
Assert.assertEquals(false, batch.cols[6].isRepeating);
Assert.assertEquals(false, batch.cols[7].isRepeating);
Assert.assertEquals(false, batch.cols[8].isRepeating);
Assert.assertEquals(false, batch.cols[9].isRepeating);
// Check non null
Assert.assertEquals(false, batch.cols[0].noNulls);
Assert.assertEquals(false, batch.cols[1].noNulls);
Assert.assertEquals(true, batch.cols[2].noNulls);
Assert.assertEquals(true, batch.cols[3].noNulls);
Assert.assertEquals(false, batch.cols[4].noNulls);
Assert.assertEquals(false, batch.cols[5].noNulls);
Assert.assertEquals(false, batch.cols[6].noNulls);
Assert.assertEquals(false, batch.cols[7].noNulls);
Assert.assertEquals(false, batch.cols[8].noNulls);
Assert.assertEquals(false, batch.cols[9].noNulls);
}
Assert.assertEquals(false, rr.nextBatch(batch));
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.
the class TestOrcFile method testTimestamp.
@Test
public void testTimestamp() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector = ObjectInspectorFactory.getReflectionObjectInspector(Timestamp.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).version(OrcFile.Version.V_0_11));
List<Timestamp> tslist = Lists.newArrayList();
tslist.add(Timestamp.valueOf("2037-01-01 00:00:00.000999"));
tslist.add(Timestamp.valueOf("2003-01-01 00:00:00.000000222"));
tslist.add(Timestamp.valueOf("1999-01-01 00:00:00.999999999"));
tslist.add(Timestamp.valueOf("1995-01-01 00:00:00.688888888"));
tslist.add(Timestamp.valueOf("2002-01-01 00:00:00.1"));
tslist.add(Timestamp.valueOf("2010-03-02 00:00:00.000009001"));
tslist.add(Timestamp.valueOf("2005-01-01 00:00:00.000002229"));
tslist.add(Timestamp.valueOf("2006-01-01 00:00:00.900203003"));
tslist.add(Timestamp.valueOf("2003-01-01 00:00:00.800000007"));
tslist.add(Timestamp.valueOf("1996-08-02 00:00:00.723100809"));
tslist.add(Timestamp.valueOf("1998-11-02 00:00:00.857340643"));
tslist.add(Timestamp.valueOf("2008-10-02 00:00:00"));
for (Timestamp ts : tslist) {
writer.addRow(ts);
}
writer.close();
Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows = reader.rows();
int idx = 0;
while (rows.hasNext()) {
Object row = rows.next(null);
Timestamp tlistTimestamp = tslist.get(idx++);
if (tlistTimestamp.getNanos() != ((TimestampWritable) row).getNanos()) {
assertTrue(false);
}
}
assertEquals(0, writer.getSchema().getMaximumId());
boolean[] expected = new boolean[] { false };
boolean[] included = OrcUtils.includeColumns("", writer.getSchema());
assertEquals(true, Arrays.equals(expected, included));
}
Aggregations