use of org.apache.hadoop.hive.ql.io.parquet.writable.BinaryWritable in project parquet-mr by apache.
the class TestParquetSerDe method testParquetHiveSerDe.
public void testParquetHiveSerDe() throws Throwable {
try {
// Create the SerDe
System.out.println("test: testParquetHiveSerDe");
final ParquetHiveSerDe serDe = new ParquetHiveSerDe();
final Configuration conf = new Configuration();
final Properties tbl = createProperties();
serDe.initialize(conf, tbl);
// Data
final Writable[] arr = new Writable[8];
arr[0] = new ByteWritable((byte) 123);
arr[1] = new ShortWritable((short) 456);
arr[2] = new IntWritable(789);
arr[3] = new LongWritable(1000l);
arr[4] = new DoubleWritable((double) 5.3);
arr[5] = new BinaryWritable(Binary.fromString("hive and hadoop and parquet. Big family."));
final Writable[] mapContainer = new Writable[1];
final Writable[] map = new Writable[3];
for (int i = 0; i < 3; ++i) {
final Writable[] pair = new Writable[2];
pair[0] = new BinaryWritable(Binary.fromString("key_" + i));
pair[1] = new IntWritable(i);
map[i] = new ArrayWritable(Writable.class, pair);
}
mapContainer[0] = new ArrayWritable(Writable.class, map);
arr[6] = new ArrayWritable(Writable.class, mapContainer);
final Writable[] arrayContainer = new Writable[1];
final Writable[] array = new Writable[5];
for (int i = 0; i < 5; ++i) {
array[i] = new BinaryWritable(Binary.fromString("elem_" + i));
}
arrayContainer[0] = new ArrayWritable(Writable.class, array);
arr[7] = new ArrayWritable(Writable.class, arrayContainer);
final ArrayWritable arrWritable = new ArrayWritable(Writable.class, arr);
// Test
deserializeAndSerializeLazySimple(serDe, arrWritable);
System.out.println("test: testParquetHiveSerDe - OK");
} catch (final Throwable e) {
e.printStackTrace();
throw e;
}
}
Aggregations