use of org.apache.hadoop.hive.serde2.io.ByteWritable in project hive by apache.
the class TestLazySimpleSerDe method testLazySimpleSerDe.
/**
* Test the LazySimpleSerDe class.
*/
public void testLazySimpleSerDe() throws Throwable {
try {
// Create the SerDe
LazySimpleSerDe serDe = new LazySimpleSerDe();
Configuration conf = new Configuration();
Properties tbl = new Properties();
tbl.setProperty(serdeConstants.SERIALIZATION_FORMAT, "9");
tbl.setProperty("columns", "abyte,ashort,aint,along,adouble,astring,anullint,anullstring,aba");
tbl.setProperty("columns.types", "tinyint:smallint:int:bigint:double:string:int:string:binary");
tbl.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, "NULL");
SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
// Data
Text t = new Text("123\t456\t789\t1000\t5.3\thive and hadoop\t1.\tNULL\t");
t.append(new byte[] { (byte) Integer.parseInt("10111111", 2) }, 0, 1);
StringBuilder sb = new StringBuilder("123\t456\t789\t1000\t5.3\thive and hadoop\t1\tNULL\t");
String s = sb.append(new String(Base64.encodeBase64(new byte[] { (byte) Integer.parseInt("10111111", 2) }))).toString();
Object[] expectedFieldsData = { new ByteWritable((byte) 123), new ShortWritable((short) 456), new IntWritable(789), new LongWritable(1000), new DoubleWritable(5.3), new Text("hive and hadoop"), new IntWritable(1), null, new BytesWritable(new byte[] { (byte) Integer.parseInt("10111111", 2) }) };
// Test
deserializeAndSerialize(serDe, t, s, expectedFieldsData);
} catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
Aggregations