use of org.apache.hadoop.io.IntWritable in project hive by apache.
the class TestVectorTimestampExpressions method compareToUDFYearLong.
private void compareToUDFYearLong(Timestamp t, int y) {
UDFYear udf = new UDFYear();
TimestampWritable tsw = new TimestampWritable(t);
IntWritable res = udf.evaluate(tsw);
if (res.get() != y) {
System.out.printf("%d vs %d for %s, %d\n", res.get(), y, t.toString(), tsw.getTimestamp().getTime() / 1000);
}
Assert.assertEquals(res.get(), y);
}
use of org.apache.hadoop.io.IntWritable in project hive by apache.
the class TestVectorizedORCReader method checkVectorizedReader.
private void checkVectorizedReader() throws Exception {
Reader vreader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf));
Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf));
RecordReaderImpl vrr = (RecordReaderImpl) vreader.rows();
RecordReaderImpl rr = (RecordReaderImpl) reader.rows();
VectorizedRowBatch batch = reader.getSchema().createRowBatch();
OrcStruct row = null;
// Check Vectorized ORC reader against ORC row reader
while (vrr.nextBatch(batch)) {
for (int i = 0; i < batch.size; i++) {
row = (OrcStruct) rr.next(row);
for (int j = 0; j < batch.cols.length; j++) {
Object a = (row.getFieldValue(j));
ColumnVector cv = batch.cols[j];
// if the value is repeating, use row 0
int rowId = cv.isRepeating ? 0 : i;
// make sure the null flag agrees
if (a == null) {
Assert.assertEquals(true, !cv.noNulls && cv.isNull[rowId]);
} else if (a instanceof BooleanWritable) {
// Boolean values are stores a 1's and 0's, so convert and compare
Long temp = (long) (((BooleanWritable) a).get() ? 1 : 0);
long b = ((LongColumnVector) cv).vector[rowId];
Assert.assertEquals(temp.toString(), Long.toString(b));
} else if (a instanceof TimestampWritable) {
// Timestamps are stored as long, so convert and compare
TimestampWritable t = ((TimestampWritable) a);
TimestampColumnVector tcv = ((TimestampColumnVector) cv);
Assert.assertEquals(t.getTimestamp(), tcv.asScratchTimestamp(rowId));
} else if (a instanceof DateWritable) {
// Dates are stored as long, so convert and compare
DateWritable adt = (DateWritable) a;
long b = ((LongColumnVector) cv).vector[rowId];
Assert.assertEquals(adt.get().getTime(), DateWritable.daysToMillis((int) b));
} else if (a instanceof HiveDecimalWritable) {
// Decimals are stored as BigInteger, so convert and compare
HiveDecimalWritable dec = (HiveDecimalWritable) a;
HiveDecimalWritable b = ((DecimalColumnVector) cv).vector[i];
Assert.assertEquals(dec, b);
} else if (a instanceof DoubleWritable) {
double b = ((DoubleColumnVector) cv).vector[rowId];
assertEquals(a.toString(), Double.toString(b));
} else if (a instanceof Text) {
BytesColumnVector bcv = (BytesColumnVector) cv;
Text b = new Text();
b.set(bcv.vector[rowId], bcv.start[rowId], bcv.length[rowId]);
assertEquals(a, b);
} else if (a instanceof IntWritable || a instanceof LongWritable || a instanceof ByteWritable || a instanceof ShortWritable) {
assertEquals(a.toString(), Long.toString(((LongColumnVector) cv).vector[rowId]));
} else {
assertEquals("huh", a.getClass().getName());
}
}
}
// Check repeating
Assert.assertEquals(false, batch.cols[0].isRepeating);
Assert.assertEquals(false, batch.cols[1].isRepeating);
Assert.assertEquals(false, batch.cols[2].isRepeating);
Assert.assertEquals(true, batch.cols[3].isRepeating);
Assert.assertEquals(false, batch.cols[4].isRepeating);
Assert.assertEquals(false, batch.cols[5].isRepeating);
Assert.assertEquals(false, batch.cols[6].isRepeating);
Assert.assertEquals(false, batch.cols[7].isRepeating);
Assert.assertEquals(false, batch.cols[8].isRepeating);
Assert.assertEquals(false, batch.cols[9].isRepeating);
// Check non null
Assert.assertEquals(false, batch.cols[0].noNulls);
Assert.assertEquals(false, batch.cols[1].noNulls);
Assert.assertEquals(true, batch.cols[2].noNulls);
Assert.assertEquals(true, batch.cols[3].noNulls);
Assert.assertEquals(false, batch.cols[4].noNulls);
Assert.assertEquals(false, batch.cols[5].noNulls);
Assert.assertEquals(false, batch.cols[6].noNulls);
Assert.assertEquals(false, batch.cols[7].noNulls);
Assert.assertEquals(false, batch.cols[8].noNulls);
Assert.assertEquals(false, batch.cols[9].noNulls);
}
Assert.assertEquals(false, rr.nextBatch(batch));
}
use of org.apache.hadoop.io.IntWritable in project hive by apache.
the class TestArrayCompatibility method testThriftPrimitiveInList.
@Test
public void testThriftPrimitiveInList() throws Exception {
Path test = writeDirect("ThriftPrimitiveInList", Types.buildMessage().requiredGroup().as(LIST).repeated(INT32).named("list_of_ints_tuple").named("list_of_ints").named("ThriftPrimitiveInList"), new DirectWriter() {
@Override
public void write(RecordConsumer rc) {
rc.startMessage();
rc.startField("list_of_ints", 0);
rc.startGroup();
rc.startField("list_of_ints_tuple", 0);
rc.addInteger(34);
rc.addInteger(35);
rc.addInteger(36);
rc.endField("list_of_ints_tuple", 0);
rc.endGroup();
rc.endField("list_of_ints", 0);
rc.endMessage();
}
});
ArrayWritable expected = list(new IntWritable(34), new IntWritable(35), new IntWritable(36));
List<ArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record", expected, records.get(0));
}
use of org.apache.hadoop.io.IntWritable in project hive by apache.
the class TestArrayCompatibility method testAvroPrimitiveInList.
@Test
public void testAvroPrimitiveInList() throws Exception {
Path test = writeDirect("AvroPrimitiveInList", Types.buildMessage().requiredGroup().as(LIST).repeated(INT32).named("array").named("list_of_ints").named("AvroPrimitiveInList"), new DirectWriter() {
@Override
public void write(RecordConsumer rc) {
rc.startMessage();
rc.startField("list_of_ints", 0);
rc.startGroup();
rc.startField("array", 0);
rc.addInteger(34);
rc.addInteger(35);
rc.addInteger(36);
rc.endField("array", 0);
rc.endGroup();
rc.endField("list_of_ints", 0);
rc.endMessage();
}
});
ArrayWritable expected = list(new IntWritable(34), new IntWritable(35), new IntWritable(36));
List<ArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record", expected, records.get(0));
}
use of org.apache.hadoop.io.IntWritable in project hive by apache.
the class TestArrayCompatibility method testUnannotatedListOfPrimitives.
@Test
public void testUnannotatedListOfPrimitives() throws Exception {
MessageType fileSchema = Types.buildMessage().repeated(INT32).named("list_of_ints").named("UnannotatedListOfPrimitives");
Path test = writeDirect("UnannotatedListOfPrimitives", fileSchema, new DirectWriter() {
@Override
public void write(RecordConsumer rc) {
rc.startMessage();
rc.startField("list_of_ints", 0);
rc.addInteger(34);
rc.addInteger(35);
rc.addInteger(36);
rc.endField("list_of_ints", 0);
rc.endMessage();
}
});
ArrayWritable expected = list(new IntWritable(34), new IntWritable(35), new IntWritable(36));
List<ArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record", expected, records.get(0));
}
Aggregations