use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class TestMapStructures method testMapWithComplexKey.
@Test
public void testMapWithComplexKey() throws Exception {
Path test = writeDirect("MapWithComplexKey", Types.buildMessage().optionalGroup().as(MAP).repeatedGroup().requiredGroup().required(INT32).named("x").required(INT32).named("y").named("key").optional(DOUBLE).named("value").named("key_value").named("matrix").named("MapWithComplexKey"), new TestArrayCompatibility.DirectWriter() {
@Override
public void write(RecordConsumer rc) {
rc.startMessage();
rc.startField("matrix", 0);
rc.startGroup();
rc.startField("key_value", 0);
rc.startGroup();
rc.startField("key", 0);
rc.startGroup();
rc.startField("x", 0);
rc.addInteger(7);
rc.endField("x", 0);
rc.startField("y", 1);
rc.addInteger(22);
rc.endField("y", 1);
rc.endGroup();
rc.endField("key", 0);
rc.startField("value", 1);
rc.addDouble(3.14);
rc.endField("value", 1);
rc.endGroup();
rc.endField("key_value", 0);
rc.endGroup();
rc.endField("matrix", 0);
rc.endMessage();
}
});
ArrayWritable expected = list(record(record(new IntWritable(7), new IntWritable(22)), new DoubleWritable(3.14)));
List<ArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record", expected, records.get(0));
deserialize(records.get(0), Arrays.asList("matrix"), Arrays.asList("map<struct<x:int,y:int>,bigint>"));
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class TestMapStructures method testStringMapRequiredPrimitive.
@Test
public void testStringMapRequiredPrimitive() throws Exception {
Path test = writeDirect("StringMapRequiredPrimitive", Types.buildMessage().optionalGroup().as(MAP).repeatedGroup().required(BINARY).as(UTF8).named("key").required(INT32).named("value").named("key_value").named("votes").named("StringMapRequiredPrimitive"), new TestArrayCompatibility.DirectWriter() {
@Override
public void write(RecordConsumer rc) {
rc.startMessage();
rc.startField("votes", 0);
rc.startGroup();
rc.startField("key_value", 0);
rc.startGroup();
rc.startField("key", 0);
rc.addBinary(Binary.fromString("lettuce"));
rc.endField("key", 0);
rc.startField("value", 1);
rc.addInteger(34);
rc.endField("value", 1);
rc.endGroup();
rc.startGroup();
rc.startField("key", 0);
rc.addBinary(Binary.fromString("cabbage"));
rc.endField("key", 0);
rc.startField("value", 1);
rc.addInteger(18);
rc.endField("value", 1);
rc.endGroup();
rc.endField("key_value", 0);
rc.endGroup();
rc.endField("votes", 0);
rc.endMessage();
}
});
ArrayWritable expected = list(record(new Text("lettuce"), new IntWritable(34)), record(new Text("cabbage"), new IntWritable(18)));
List<ArrayWritable> records = read(test);
Assert.assertEquals("Should have only one record", 1, records.size());
assertEquals("Should match expected record", expected, records.get(0));
deserialize(records.get(0), Arrays.asList("votes"), Arrays.asList("map<string,int>"));
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class TestDeepParquetHiveMapInspector method testEmptyContainer.
@Test
public void testEmptyContainer() {
final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new ArrayWritable[0]);
assertNull("Should be null", inspector.getMapValueElement(map, new ShortWritable((short) 0)));
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class TestParquetHiveArrayInspector method testRegularList.
@Test
public void testRegularList() {
final ArrayWritable list = new ArrayWritable(Writable.class, new Writable[] { new IntWritable(3), new IntWritable(5), new IntWritable(1) });
final List<Writable> expected = new ArrayList<Writable>();
expected.add(new IntWritable(3));
expected.add(new IntWritable(5));
expected.add(new IntWritable(1));
assertEquals("Wrong size", 3, inspector.getListLength(list));
assertEquals("Wrong result of inspection", expected, inspector.getList(list));
for (int i = 0; i < expected.size(); ++i) {
assertEquals("Wrong result of inspection", expected.get(i), inspector.getListElement(list, i));
}
assertNull("Should be null", inspector.getListElement(list, 3));
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class TestParquetHiveArrayInspector method testEmptyContainer.
@Test
public void testEmptyContainer() {
final ArrayWritable list = new ArrayWritable(ArrayWritable.class, new ArrayWritable[0]);
assertEquals("Wrong size", 0, inspector.getListLength(list));
assertNotNull("Should not be null", inspector.getList(list));
assertNull("Should be null", inspector.getListElement(list, 0));
}
Aggregations