Search in sources :

Example 51 with ArrayWritable

use of org.apache.hadoop.io.ArrayWritable in project hive by apache.

the class TestMapStructures method testMapWithComplexKey.

@Test
public void testMapWithComplexKey() throws Exception {
    Path test = writeDirect("MapWithComplexKey", Types.buildMessage().optionalGroup().as(MAP).repeatedGroup().requiredGroup().required(INT32).named("x").required(INT32).named("y").named("key").optional(DOUBLE).named("value").named("key_value").named("matrix").named("MapWithComplexKey"), new TestArrayCompatibility.DirectWriter() {

        @Override
        public void write(RecordConsumer rc) {
            rc.startMessage();
            rc.startField("matrix", 0);
            rc.startGroup();
            rc.startField("key_value", 0);
            rc.startGroup();
            rc.startField("key", 0);
            rc.startGroup();
            rc.startField("x", 0);
            rc.addInteger(7);
            rc.endField("x", 0);
            rc.startField("y", 1);
            rc.addInteger(22);
            rc.endField("y", 1);
            rc.endGroup();
            rc.endField("key", 0);
            rc.startField("value", 1);
            rc.addDouble(3.14);
            rc.endField("value", 1);
            rc.endGroup();
            rc.endField("key_value", 0);
            rc.endGroup();
            rc.endField("matrix", 0);
            rc.endMessage();
        }
    });
    ArrayWritable expected = list(record(record(new IntWritable(7), new IntWritable(22)), new DoubleWritable(3.14)));
    List<ArrayWritable> records = read(test);
    Assert.assertEquals("Should have only one record", 1, records.size());
    assertEquals("Should match expected record", expected, records.get(0));
    deserialize(records.get(0), Arrays.asList("matrix"), Arrays.asList("map<struct<x:int,y:int>,bigint>"));
}
Also used : Path(org.apache.hadoop.fs.Path) ArrayWritable(org.apache.hadoop.io.ArrayWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) RecordConsumer(org.apache.parquet.io.api.RecordConsumer) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Example 52 with ArrayWritable

use of org.apache.hadoop.io.ArrayWritable in project hive by apache.

the class TestMapStructures method testStringMapRequiredPrimitive.

@Test
public void testStringMapRequiredPrimitive() throws Exception {
    Path test = writeDirect("StringMapRequiredPrimitive", Types.buildMessage().optionalGroup().as(MAP).repeatedGroup().required(BINARY).as(UTF8).named("key").required(INT32).named("value").named("key_value").named("votes").named("StringMapRequiredPrimitive"), new TestArrayCompatibility.DirectWriter() {

        @Override
        public void write(RecordConsumer rc) {
            rc.startMessage();
            rc.startField("votes", 0);
            rc.startGroup();
            rc.startField("key_value", 0);
            rc.startGroup();
            rc.startField("key", 0);
            rc.addBinary(Binary.fromString("lettuce"));
            rc.endField("key", 0);
            rc.startField("value", 1);
            rc.addInteger(34);
            rc.endField("value", 1);
            rc.endGroup();
            rc.startGroup();
            rc.startField("key", 0);
            rc.addBinary(Binary.fromString("cabbage"));
            rc.endField("key", 0);
            rc.startField("value", 1);
            rc.addInteger(18);
            rc.endField("value", 1);
            rc.endGroup();
            rc.endField("key_value", 0);
            rc.endGroup();
            rc.endField("votes", 0);
            rc.endMessage();
        }
    });
    ArrayWritable expected = list(record(new Text("lettuce"), new IntWritable(34)), record(new Text("cabbage"), new IntWritable(18)));
    List<ArrayWritable> records = read(test);
    Assert.assertEquals("Should have only one record", 1, records.size());
    assertEquals("Should match expected record", expected, records.get(0));
    deserialize(records.get(0), Arrays.asList("votes"), Arrays.asList("map<string,int>"));
}
Also used : Path(org.apache.hadoop.fs.Path) ArrayWritable(org.apache.hadoop.io.ArrayWritable) Text(org.apache.hadoop.io.Text) RecordConsumer(org.apache.parquet.io.api.RecordConsumer) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Example 53 with ArrayWritable

use of org.apache.hadoop.io.ArrayWritable in project hive by apache.

the class TestDeepParquetHiveMapInspector method testEmptyContainer.

@Test
public void testEmptyContainer() {
    final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new ArrayWritable[0]);
    assertNull("Should be null", inspector.getMapValueElement(map, new ShortWritable((short) 0)));
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Test(org.junit.Test)

Example 54 with ArrayWritable

use of org.apache.hadoop.io.ArrayWritable in project hive by apache.

the class TestParquetHiveArrayInspector method testRegularList.

@Test
public void testRegularList() {
    final ArrayWritable list = new ArrayWritable(Writable.class, new Writable[] { new IntWritable(3), new IntWritable(5), new IntWritable(1) });
    final List<Writable> expected = new ArrayList<Writable>();
    expected.add(new IntWritable(3));
    expected.add(new IntWritable(5));
    expected.add(new IntWritable(1));
    assertEquals("Wrong size", 3, inspector.getListLength(list));
    assertEquals("Wrong result of inspection", expected, inspector.getList(list));
    for (int i = 0; i < expected.size(); ++i) {
        assertEquals("Wrong result of inspection", expected.get(i), inspector.getListElement(list, i));
    }
    assertNull("Should be null", inspector.getListElement(list, 3));
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) ArrayList(java.util.ArrayList) Writable(org.apache.hadoop.io.Writable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) IntWritable(org.apache.hadoop.io.IntWritable) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Example 55 with ArrayWritable

use of org.apache.hadoop.io.ArrayWritable in project hive by apache.

the class TestParquetHiveArrayInspector method testEmptyContainer.

@Test
public void testEmptyContainer() {
    final ArrayWritable list = new ArrayWritable(ArrayWritable.class, new ArrayWritable[0]);
    assertEquals("Wrong size", 0, inspector.getListLength(list));
    assertNotNull("Should not be null", inspector.getList(list));
    assertNull("Should be null", inspector.getListElement(list, 0));
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) Test(org.junit.Test)

Aggregations

ArrayWritable (org.apache.hadoop.io.ArrayWritable)72 Test (org.junit.Test)41 IntWritable (org.apache.hadoop.io.IntWritable)31 Writable (org.apache.hadoop.io.Writable)29 Path (org.apache.hadoop.fs.Path)18 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)18 LongWritable (org.apache.hadoop.io.LongWritable)18 RecordConsumer (org.apache.parquet.io.api.RecordConsumer)18 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)15 ArrayList (java.util.ArrayList)13 BytesWritable (org.apache.hadoop.io.BytesWritable)10 List (java.util.List)9 BooleanWritable (org.apache.hadoop.io.BooleanWritable)8 FloatWritable (org.apache.hadoop.io.FloatWritable)8 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)6 NullWritable (org.apache.hadoop.io.NullWritable)6 Text (org.apache.hadoop.io.Text)6 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)5 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)5 MapWritable (org.apache.hadoop.io.MapWritable)5