use of org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe in project hive by apache.
the class TestStatsSerde method deserializeAndSerializeColumnar.
private void deserializeAndSerializeColumnar(ColumnarSerDe serDe, BytesRefArrayWritable t, String[] data) throws SerDeException {
// Get the row structure
StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
// Deserialize
Object row = serDe.deserialize(t);
int size = 0;
for (int i = 0; i < data.length; i++) {
size += data[i].length();
}
assertEquals("serialized size correct after deserialization", size, serDe.getSerDeStats().getRawDataSize());
assertNotSame(0, size);
BytesRefArrayWritable serializedData = (BytesRefArrayWritable) serDe.serialize(row, oi);
size = 0;
for (int i = 0; i < serializedData.size(); i++) {
size += serializedData.get(i).getLength();
}
assertEquals("serialized size correct after serialization", size, serDe.getSerDeStats().getRawDataSize());
assertNotSame(0, size);
}
use of org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe in project hive by apache.
the class TestRCFile method main.
/** For debugging and testing. */
public static void main(String[] args) throws Exception {
int count = 10000;
boolean create = true;
Configuration conf = new Configuration();
FileSystem fs = FileSystem.getLocal(conf);
Path file = null;
// the SerDe part is from TestLazySimpleSerDe
AbstractSerDe serDe = new ColumnarSerDe();
// Create the SerDe
Properties tbl = createProperties();
SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
String usage = "Usage: RCFile " + "[-count N]" + " file";
if (args.length == 0) {
System.err.println(usage);
System.exit(-1);
}
try {
for (int i = 0; i < args.length; ++i) {
// parse command line
if (args[i] == null) {
continue;
} else if (args[i].equals("-count")) {
count = Integer.parseInt(args[++i]);
} else {
// file is required parameter
file = new Path(args[i]);
}
}
if (file == null) {
System.err.println(usage);
System.exit(-1);
}
LOG.info("count = " + count);
LOG.info("create = " + create);
LOG.info("file = " + file);
TestRCFile test = new TestRCFile();
// test.performanceTest();
test.testSimpleReadAndWrite();
byte[][] bytesArray = new byte[][] { "123".getBytes("UTF-8"), "456".getBytes("UTF-8"), "789".getBytes("UTF-8"), "1000".getBytes("UTF-8"), "5.3".getBytes("UTF-8"), "hive and hadoop".getBytes("UTF-8"), new byte[0], "NULL".getBytes("UTF-8") };
test.writeTest(fs, count, file, bytesArray);
test.fullyReadTest(fs, count, file);
test.partialReadTest(fs, count, file);
System.out.println("Finished.");
} finally {
fs.close();
}
}
Aggregations