use of org.apache.hadoop.io.ArrayWritable in project akela by mozilla-metrics.
the class MultiScanTableMapReduceUtil method convertScanArrayToString.
/**
* Converts an array of Scan objects into a base64 string
* @param scans
* @return
* @throws IOException
*/
public static String convertScanArrayToString(final Scan[] scans) throws IOException {
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
final DataOutputStream dos = new DataOutputStream(baos);
ArrayWritable aw = new ArrayWritable(Scan.class, scans);
aw.write(dos);
return Base64.encodeBytes(baos.toByteArray());
}
use of org.apache.hadoop.io.ArrayWritable in project akela by mozilla-metrics.
the class MultiScanTableMapReduceUtil method convertStringToScanArray.
/**
* Converts base64 scans string back into a Scan array
* @param base64
* @return
* @throws IOException
*/
public static Scan[] convertStringToScanArray(final String base64) throws IOException {
final DataInputStream dis = new DataInputStream(new ByteArrayInputStream(Base64.decode(base64)));
ArrayWritable aw = new ArrayWritable(Scan.class);
aw.readFields(dis);
Writable[] writables = aw.get();
Scan[] scans = new Scan[writables.length];
for (int i = 0; i < writables.length; i++) {
scans[i] = (Scan) writables[i];
}
return scans;
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class ArrayWritableObjectInspector method getStructFieldData.
@Override
public Object getStructFieldData(final Object data, final StructField fieldRef) {
if (data == null) {
return null;
}
if (data instanceof ArrayWritable) {
final ArrayWritable arr = (ArrayWritable) data;
final StructFieldImpl structField = (StructFieldImpl) fieldRef;
int index = isRoot ? structField.getIndex() : structField.adjustedIndex;
if (index < arr.get().length) {
return arr.get()[index];
} else {
return null;
}
}
// is something else.
if (data instanceof List) {
return ((List) data).get(((StructFieldImpl) fieldRef).getIndex());
}
throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class AbstractTestParquetDirect method read.
public static List<ArrayWritable> read(Path parquetFile) throws IOException {
List<ArrayWritable> records = new ArrayList<ArrayWritable>();
RecordReader<NullWritable, ArrayWritable> reader = new MapredParquetInputFormat().getRecordReader(new FileSplit(parquetFile, 0, fileLength(parquetFile), (String[]) null), new JobConf(), null);
NullWritable alwaysNull = reader.createKey();
ArrayWritable record = reader.createValue();
while (reader.next(alwaysNull, record)) {
records.add(record);
// a new value so the last isn't clobbered
record = reader.createValue();
}
return records;
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class TestParquetSerDe method testParquetHiveSerDe.
public void testParquetHiveSerDe() throws Throwable {
try {
// Create the SerDe
System.out.println("test: testParquetHiveSerDe");
final ParquetHiveSerDe serDe = new ParquetHiveSerDe();
final Configuration conf = new Configuration();
final Properties tbl = createProperties();
SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
// Data
final Writable[] arr = new Writable[9];
// primitive types
arr[0] = new ByteWritable((byte) 123);
arr[1] = new ShortWritable((short) 456);
arr[2] = new IntWritable(789);
arr[3] = new LongWritable(1000l);
arr[4] = new DoubleWritable((double) 5.3);
arr[5] = new BytesWritable("hive and hadoop and parquet. Big family.".getBytes("UTF-8"));
arr[6] = new BytesWritable("parquetSerde binary".getBytes("UTF-8"));
final Writable[] map = new Writable[3];
for (int i = 0; i < 3; ++i) {
final Writable[] pair = new Writable[2];
pair[0] = new BytesWritable(("key_" + i).getBytes("UTF-8"));
pair[1] = new IntWritable(i);
map[i] = new ArrayWritable(Writable.class, pair);
}
arr[7] = new ArrayWritable(Writable.class, map);
final Writable[] array = new Writable[5];
for (int i = 0; i < 5; ++i) {
array[i] = new BytesWritable(("elem_" + i).getBytes("UTF-8"));
}
arr[8] = new ArrayWritable(Writable.class, array);
final ArrayWritable arrWritable = new ArrayWritable(Writable.class, arr);
// Test
deserializeAndSerializeLazySimple(serDe, arrWritable);
System.out.println("test: testParquetHiveSerDe - OK");
} catch (final Throwable e) {
e.printStackTrace();
throw e;
}
}
Aggregations