use of org.apache.hadoop.io.Writable in project hive by apache.
the class HCatSplit method readFields.
/* (non-Javadoc)
* @see org.apache.hadoop.io.Writable#readFields(java.io.DataInput)
*/
@SuppressWarnings("unchecked")
@Override
public void readFields(DataInput input) throws IOException {
String partitionInfoString = WritableUtils.readString(input);
partitionInfo = (PartInfo) HCatUtil.deserialize(partitionInfoString);
String baseSplitClassName = WritableUtils.readString(input);
org.apache.hadoop.mapred.InputSplit split;
try {
Class<? extends org.apache.hadoop.mapred.InputSplit> splitClass = (Class<? extends org.apache.hadoop.mapred.InputSplit>) JavaUtils.loadClass(baseSplitClassName);
//Class.forName().newInstance() does not work if the underlying
//InputSplit has package visibility
Constructor<? extends org.apache.hadoop.mapred.InputSplit> constructor = splitClass.getDeclaredConstructor(new Class[] {});
constructor.setAccessible(true);
split = constructor.newInstance();
// read baseSplit from input
((Writable) split).readFields(input);
this.baseMapRedSplit = split;
} catch (Exception e) {
throw new IOException("Exception from " + baseSplitClassName, e);
}
}
use of org.apache.hadoop.io.Writable in project hive by apache.
the class HCatSplit method write.
/* (non-Javadoc)
* @see org.apache.hadoop.io.Writable#write(java.io.DataOutput)
*/
@Override
public void write(DataOutput output) throws IOException {
String partitionInfoString = HCatUtil.serialize(partitionInfo);
// write partitionInfo into output
WritableUtils.writeString(output, partitionInfoString);
WritableUtils.writeString(output, baseMapRedSplit.getClass().getName());
Writable baseSplitWritable = (Writable) baseMapRedSplit;
//write baseSplit into output
baseSplitWritable.write(output);
}
use of org.apache.hadoop.io.Writable in project hive by apache.
the class TestLazyHBaseObject method testLazyHBaseRow3.
/**
* Test the LazyHBaseRow class with a one-to-one/onto mapping between Hive columns and
* HBase column family/column qualifier pairs. The column types are primitive and fields
* are stored in binary format in HBase.
* @throws SerDeException
*/
public void testLazyHBaseRow3() throws SerDeException {
List<TypeInfo> fieldTypeInfos = TypeInfoUtils.getTypeInfosFromTypeString("string,int,tinyint,smallint,bigint,float,double,string,boolean");
List<String> fieldNames = Arrays.asList(new String[] { "key", "c_int", "c_byte", "c_short", "c_long", "c_float", "c_double", "c_string", "c_bool" });
Text nullSequence = new Text("\\N");
String hbaseColumnsMapping = ":key#str,cf-int:cq-int#bin,cf-byte:cq-byte#bin," + "cf-short:cq-short#bin,cf-long:cq-long#bin,cf-float:cq-float#bin,cf-double:cq-double#bin," + "cf-string:cq-string#str,cf-bool:cq-bool#bin";
ColumnMappings columnMappings = null;
try {
columnMappings = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping);
} catch (SerDeException e) {
fail(e.toString());
}
ColumnMapping[] columnsMapping = columnMappings.getColumnsMapping();
for (int i = 0; i < columnsMapping.length; i++) {
ColumnMapping colMap = columnsMapping[i];
if (i == 0 || i == 7) {
colMap.binaryStorage.add(false);
} else {
colMap.binaryStorage.add(true);
}
}
ObjectInspector oi = LazyFactory.createLazyStructInspector(fieldNames, fieldTypeInfos, new byte[] { ' ', ':', '=' }, nullSequence, false, false, (byte) 0);
LazyHBaseRow o = new LazyHBaseRow((LazySimpleStructObjectInspector) oi, columnMappings);
byte[] rowKey = "row-key".getBytes();
List<KeyValue> kvs = new ArrayList<KeyValue>();
byte[] value;
for (int i = 1; i < columnsMapping.length; i++) {
switch(i) {
case 1:
value = Bytes.toBytes(1);
break;
case 2:
value = new byte[] { (byte) 1 };
break;
case 3:
value = Bytes.toBytes((short) 1);
break;
case 4:
value = Bytes.toBytes((long) 1);
break;
case 5:
value = Bytes.toBytes((float) 1.0F);
break;
case 6:
value = Bytes.toBytes((double) 1.0);
break;
case 7:
value = "Hadoop, Hive, with HBase storage handler.".getBytes();
break;
case 8:
value = Bytes.toBytes(true);
break;
default:
throw new RuntimeException("Not expected: " + i);
}
ColumnMapping colMap = columnsMapping[i];
kvs.add(new KeyValue(rowKey, colMap.familyNameBytes, colMap.qualifierNameBytes, value));
}
Collections.sort(kvs, KeyValue.COMPARATOR);
Result result = new Result(kvs);
o.init(result);
List<? extends StructField> fieldRefs = ((StructObjectInspector) oi).getAllStructFieldRefs();
for (int i = 0; i < fieldRefs.size(); i++) {
Object fieldData = ((StructObjectInspector) oi).getStructFieldData(o, fieldRefs.get(i));
assert (fieldData != null);
assert (fieldData instanceof LazyPrimitive<?, ?>);
Writable writable = ((LazyPrimitive<?, ?>) fieldData).getWritableObject();
switch(i) {
case 0:
Text text = new Text("row-key");
assertEquals(text, writable);
break;
case 1:
IntWritable iw = new IntWritable(1);
assertEquals(iw, writable);
break;
case 2:
ByteWritable bw = new ByteWritable((byte) 1);
assertEquals(bw, writable);
break;
case 3:
ShortWritable sw = new ShortWritable((short) 1);
assertEquals(sw, writable);
break;
case 4:
LongWritable lw = new LongWritable(1);
assertEquals(lw, writable);
break;
case 5:
FloatWritable fw = new FloatWritable(1.0F);
assertEquals(fw, writable);
break;
case 6:
DoubleWritable dw = new DoubleWritable(1.0);
assertEquals(dw, writable);
break;
case 7:
Text t = new Text("Hadoop, Hive, with HBase storage handler.");
assertEquals(t, writable);
break;
case 8:
BooleanWritable boolWritable = new BooleanWritable(true);
assertEquals(boolWritable, writable);
break;
default:
fail("Error: Unanticipated value in deserializing fields for HBaseSerDe.");
break;
}
}
}
use of org.apache.hadoop.io.Writable in project hive by apache.
the class TypedBytesWritableInput method readMap.
public MapWritable readMap(MapWritable mw) throws IOException {
if (mw == null) {
mw = new MapWritable();
}
int length = in.readMapHeader();
for (int i = 0; i < length; i++) {
Writable key = read();
Writable value = read();
mw.put(key, value);
}
return mw;
}
use of org.apache.hadoop.io.Writable in project hive by apache.
the class TypedBytesWritableInput method readArray.
public ArrayWritable readArray(ArrayWritable aw) throws IOException {
if (aw == null) {
aw = new ArrayWritable(TypedBytesWritable.class);
} else if (!aw.getValueClass().equals(TypedBytesWritable.class)) {
throw new RuntimeException("value class has to be TypedBytesWritable");
}
int length = in.readVectorHeader();
Writable[] writables = new Writable[length];
for (int i = 0; i < length; i++) {
writables[i] = new TypedBytesWritable(in.readRaw());
}
aw.set(writables);
return aw;
}
Aggregations