use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class TestStandardParquetHiveMapInspector method testRegularMap.
@Test
public void testRegularMap() {
final Writable[] entry1 = new Writable[] { new IntWritable(0), new IntWritable(1) };
final Writable[] entry2 = new Writable[] { new IntWritable(2), new IntWritable(3) };
final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new Writable[] { new ArrayWritable(Writable.class, entry1), new ArrayWritable(Writable.class, entry2) });
assertEquals("Wrong result of inspection", new IntWritable(1), inspector.getMapValueElement(map, new IntWritable(0)));
assertEquals("Wrong result of inspection", new IntWritable(3), inspector.getMapValueElement(map, new IntWritable(2)));
assertNull("Wrong result of inspection", inspector.getMapValueElement(map, new ShortWritable((short) 0)));
assertNull("Wrong result of inspection", inspector.getMapValueElement(map, new ShortWritable((short) 2)));
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class AbstractTestParquetDirect method read.
public static List<ArrayWritable> read(Path parquetFile) throws IOException {
List<ArrayWritable> records = new ArrayList<ArrayWritable>();
RecordReader<NullWritable, ArrayWritable> reader = new MapredParquetInputFormat().getRecordReader(new FileSplit(parquetFile, 0, fileLength(parquetFile), (String[]) null), new JobConf(), null);
NullWritable alwaysNull = reader.createKey();
ArrayWritable record = reader.createValue();
while (reader.next(alwaysNull, record)) {
records.add(record);
// a new value so the last isn't clobbered
record = reader.createValue();
}
return records;
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class TestArrayCompatibility method testNewOptionalGroupInList.
@Test
public void testNewOptionalGroupInList() throws Exception {
Path test = writeDirect("NewOptionalGroupInList", Types.buildMessage().optionalGroup().as(LIST).repeatedGroup().optionalGroup().required(DOUBLE).named("latitude").required(DOUBLE).named("longitude").named("element").named("list").named("locations").named("NewOptionalGroupInList"), new DirectWriter() {
@Override
public void write(RecordConsumer rc) {
rc.startMessage();
rc.startField("locations", 0);
rc.startGroup();
// start writing array contents
rc.startField("list", 0);
// write a non-null element
// array level
rc.startGroup();
rc.startField("element", 0);
rc.startGroup();
rc.startField("latitude", 0);
rc.addDouble(0.0);
rc.endField("latitude", 0);
rc.startField("longitude", 1);
rc.addDouble(0.0);
rc.endField("longitude", 1);
rc.endGroup();
rc.endField("element", 0);
// array level
rc.endGroup();
// write a null element (element field is omitted)
// array level
rc.startGroup();
// array level
rc.endGroup();
// write a second non-null element
// array level
rc.startGroup();
rc.startField("element", 0);
rc.startGroup();
rc.startField("latitude", 0);
rc.addDouble(0.0);
rc.endField("latitude", 0);
rc.startField("longitude", 1);
rc.addDouble(180.0);
rc.endField("longitude", 1);
rc.endGroup();
rc.endField("element", 0);
// array level
rc.endGroup();
// finished writing array contents
rc.endField("list", 0);
rc.endGroup();
rc.endField("locations", 0);
rc.endMessage();
}
});
ArrayWritable expected = list(record(new DoubleWritable(0.0), new DoubleWritable(0.0)), null, record(new DoubleWritable(0.0), new DoubleWritable(180.0)));
List<ArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record", expected, records.get(0));
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class TestArrayCompatibility method testThriftPrimitiveInList.
@Test
public void testThriftPrimitiveInList() throws Exception {
Path test = writeDirect("ThriftPrimitiveInList", Types.buildMessage().requiredGroup().as(LIST).repeated(INT32).named("list_of_ints_tuple").named("list_of_ints").named("ThriftPrimitiveInList"), new DirectWriter() {
@Override
public void write(RecordConsumer rc) {
rc.startMessage();
rc.startField("list_of_ints", 0);
rc.startGroup();
rc.startField("list_of_ints_tuple", 0);
rc.addInteger(34);
rc.addInteger(35);
rc.addInteger(36);
rc.endField("list_of_ints_tuple", 0);
rc.endGroup();
rc.endField("list_of_ints", 0);
rc.endMessage();
}
});
ArrayWritable expected = list(new IntWritable(34), new IntWritable(35), new IntWritable(36));
List<ArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record", expected, records.get(0));
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class TestArrayCompatibility method testThriftSingleFieldGroupInList.
@Test
public void testThriftSingleFieldGroupInList() throws Exception {
// this tests the case where older data has an ambiguous structure, but the
// correct interpretation can be determined from the repeated name
Path test = writeDirect("ThriftSingleFieldGroupInList", Types.buildMessage().optionalGroup().as(LIST).repeatedGroup().required(INT64).named("count").named("single_element_groups_tuple").named("single_element_groups").named("ThriftSingleFieldGroupInList"), new DirectWriter() {
@Override
public void write(RecordConsumer rc) {
rc.startMessage();
rc.startField("single_element_groups", 0);
rc.startGroup();
// start writing array contents
rc.startField("single_element_groups_tuple", 0);
rc.startGroup();
rc.startField("count", 0);
rc.addLong(1234L);
rc.endField("count", 0);
rc.endGroup();
rc.startGroup();
rc.startField("count", 0);
rc.addLong(2345L);
rc.endField("count", 0);
rc.endGroup();
// finished writing array contents
rc.endField("single_element_groups_tuple", 0);
rc.endGroup();
rc.endField("single_element_groups", 0);
rc.endMessage();
}
});
ArrayWritable expected = list(record(new LongWritable(1234L)), record(new LongWritable(2345L)));
List<ArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record", expected, records.get(0));
}
Aggregations