use of org.apache.hadoop.hive.serde2.io.ShortWritable in project parquet-mr by apache.
the class TestDeepParquetHiveMapInspector method testNullContainer.
@Test
public void testNullContainer() {
final ArrayWritable map = new ArrayWritable(ArrayWritable.class, null);
assertNull("Should be null", inspector.getMapValueElement(map, new ShortWritable((short) 0)));
}
use of org.apache.hadoop.hive.serde2.io.ShortWritable in project presto by prestodb.
the class OrcTester method decodeRecordReaderValue.
private static Object decodeRecordReaderValue(Type type, Object actualValue) {
if (actualValue instanceof OrcLazyObject) {
try {
actualValue = ((OrcLazyObject) actualValue).materialize();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
if (actualValue instanceof BooleanWritable) {
actualValue = ((BooleanWritable) actualValue).get();
} else if (actualValue instanceof ByteWritable) {
actualValue = ((ByteWritable) actualValue).get();
} else if (actualValue instanceof BytesWritable) {
actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes());
} else if (actualValue instanceof DateWritable) {
actualValue = new SqlDate(((DateWritable) actualValue).getDays());
} else if (actualValue instanceof DoubleWritable) {
actualValue = ((DoubleWritable) actualValue).get();
} else if (actualValue instanceof FloatWritable) {
actualValue = ((FloatWritable) actualValue).get();
} else if (actualValue instanceof IntWritable) {
actualValue = ((IntWritable) actualValue).get();
} else if (actualValue instanceof HiveCharWritable) {
actualValue = ((HiveCharWritable) actualValue).getPaddedValue().toString();
} else if (actualValue instanceof LongWritable) {
actualValue = ((LongWritable) actualValue).get();
} else if (actualValue instanceof ShortWritable) {
actualValue = ((ShortWritable) actualValue).get();
} else if (actualValue instanceof HiveDecimalWritable) {
DecimalType decimalType = (DecimalType) type;
HiveDecimalWritable writable = (HiveDecimalWritable) actualValue;
// writable messes with the scale so rescale the values to the Presto type
BigInteger rescaledValue = rescale(writable.getHiveDecimal().unscaledValue(), writable.getScale(), decimalType.getScale());
actualValue = new SqlDecimal(rescaledValue, decimalType.getPrecision(), decimalType.getScale());
} else if (actualValue instanceof Text) {
actualValue = actualValue.toString();
} else if (actualValue instanceof TimestampWritable) {
TimestampWritable timestamp = (TimestampWritable) actualValue;
actualValue = sqlTimestampOf((timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L), SESSION);
} else if (actualValue instanceof OrcStruct) {
List<Object> fields = new ArrayList<>();
OrcStruct structObject = (OrcStruct) actualValue;
for (int fieldId = 0; fieldId < structObject.getNumFields(); fieldId++) {
fields.add(OrcUtil.getFieldValue(structObject, fieldId));
}
actualValue = decodeRecordReaderStruct(type, fields);
} else if (actualValue instanceof com.facebook.hive.orc.OrcStruct) {
List<Object> fields = new ArrayList<>();
com.facebook.hive.orc.OrcStruct structObject = (com.facebook.hive.orc.OrcStruct) actualValue;
for (int fieldId = 0; fieldId < structObject.getNumFields(); fieldId++) {
fields.add(structObject.getFieldValue(fieldId));
}
actualValue = decodeRecordReaderStruct(type, fields);
} else if (actualValue instanceof List) {
actualValue = decodeRecordReaderList(type, ((List<?>) actualValue));
} else if (actualValue instanceof Map) {
actualValue = decodeRecordReaderMap(type, (Map<?, ?>) actualValue);
}
return actualValue;
}
use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.
the class TestHBaseSerDe method testHBaseSerDeWithHiveMapToHBaseColumnFamily.
@Test
public void testHBaseSerDeWithHiveMapToHBaseColumnFamily() throws SerDeException {
byte[] cfint = "cf-int".getBytes();
byte[] cfbyte = "cf-byte".getBytes();
byte[] cfshort = "cf-short".getBytes();
byte[] cflong = "cf-long".getBytes();
byte[] cffloat = "cf-float".getBytes();
byte[] cfdouble = "cf-double".getBytes();
byte[] cfbool = "cf-bool".getBytes();
byte[][] columnFamilies = new byte[][] { cfint, cfbyte, cfshort, cflong, cffloat, cfdouble, cfbool };
byte[][] rowKeys = new byte[][] { Integer.toString(1).getBytes(), Integer.toString(Integer.MIN_VALUE).getBytes(), Integer.toString(Integer.MAX_VALUE).getBytes() };
byte[][][] columnQualifiersAndValues = new byte[][][] { { Bytes.toBytes(1), new byte[] { 1 }, Bytes.toBytes((short) 1), Bytes.toBytes((long) 1), Bytes.toBytes(1.0F), Bytes.toBytes(1.0), Bytes.toBytes(true) }, { Bytes.toBytes(Integer.MIN_VALUE), new byte[] { Byte.MIN_VALUE }, Bytes.toBytes(Short.MIN_VALUE), Bytes.toBytes(Long.MIN_VALUE), Bytes.toBytes(Float.MIN_VALUE), Bytes.toBytes(Double.MIN_VALUE), Bytes.toBytes(false) }, { Bytes.toBytes(Integer.MAX_VALUE), new byte[] { Byte.MAX_VALUE }, Bytes.toBytes(Short.MAX_VALUE), Bytes.toBytes(Long.MAX_VALUE), Bytes.toBytes(Float.MAX_VALUE), Bytes.toBytes(Double.MAX_VALUE), Bytes.toBytes(true) } };
List<Cell> kvs = new ArrayList<Cell>();
Result[] r = new Result[] { null, null, null };
Put[] p = new Put[] { null, null, null };
for (int i = 0; i < r.length; i++) {
kvs.clear();
p[i] = new Put(rowKeys[i]);
for (int j = 0; j < columnQualifiersAndValues[i].length; j++) {
kvs.add(new KeyValue(rowKeys[i], columnFamilies[j], columnQualifiersAndValues[i][j], columnQualifiersAndValues[i][j]));
p[i].addColumn(columnFamilies[j], columnQualifiersAndValues[i][j], columnQualifiersAndValues[i][j]);
}
r[i] = Result.create(kvs);
}
Object[][] expectedData = { { new Text(Integer.toString(1)), new IntWritable(1), new ByteWritable((byte) 1), new ShortWritable((short) 1), new LongWritable(1), new FloatWritable(1.0F), new DoubleWritable(1.0), new BooleanWritable(true) }, { new Text(Integer.toString(Integer.MIN_VALUE)), new IntWritable(Integer.MIN_VALUE), new ByteWritable(Byte.MIN_VALUE), new ShortWritable(Short.MIN_VALUE), new LongWritable(Long.MIN_VALUE), new FloatWritable(Float.MIN_VALUE), new DoubleWritable(Double.MIN_VALUE), new BooleanWritable(false) }, { new Text(Integer.toString(Integer.MAX_VALUE)), new IntWritable(Integer.MAX_VALUE), new ByteWritable(Byte.MAX_VALUE), new ShortWritable(Short.MAX_VALUE), new LongWritable(Long.MAX_VALUE), new FloatWritable(Float.MAX_VALUE), new DoubleWritable(Double.MAX_VALUE), new BooleanWritable(true) } };
HBaseSerDe hbaseSerDe = new HBaseSerDe();
Configuration conf = new Configuration();
Properties tbl = createPropertiesForHiveMapHBaseColumnFamily();
hbaseSerDe.initialize(conf, tbl, null);
deserializeAndSerializeHiveMapHBaseColumnFamily(hbaseSerDe, r, p, expectedData, rowKeys, columnFamilies, columnQualifiersAndValues);
hbaseSerDe = new HBaseSerDe();
conf = new Configuration();
tbl = createPropertiesForHiveMapHBaseColumnFamilyII();
hbaseSerDe.initialize(conf, tbl, null);
deserializeAndSerializeHiveMapHBaseColumnFamily(hbaseSerDe, r, p, expectedData, rowKeys, columnFamilies, columnQualifiersAndValues);
}
use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.
the class TestHBaseSerDe method testHBaseSerDeWithHiveMapToHBaseColumnFamilyII.
@Test
public void testHBaseSerDeWithHiveMapToHBaseColumnFamilyII() throws SerDeException {
byte[] cfbyte = "cf-byte".getBytes();
byte[] cfshort = "cf-short".getBytes();
byte[] cfint = "cf-int".getBytes();
byte[] cflong = "cf-long".getBytes();
byte[] cffloat = "cf-float".getBytes();
byte[] cfdouble = "cf-double".getBytes();
byte[] cfstring = "cf-string".getBytes();
byte[] cfbool = "cf-bool".getBytes();
byte[][] columnFamilies = new byte[][] { cfbyte, cfshort, cfint, cflong, cffloat, cfdouble, cfstring, cfbool };
byte[] rowKey = Bytes.toBytes("row-key");
byte[][] columnQualifiersAndValues = new byte[][] { Bytes.toBytes("123"), Bytes.toBytes("456"), Bytes.toBytes("789"), Bytes.toBytes("1000"), Bytes.toBytes("-0.01"), Bytes.toBytes("5.3"), Bytes.toBytes("Hive"), Bytes.toBytes("true") };
Put p = new Put(rowKey);
List<Cell> kvs = new ArrayList<Cell>();
for (int j = 0; j < columnQualifiersAndValues.length; j++) {
kvs.add(new KeyValue(rowKey, columnFamilies[j], columnQualifiersAndValues[j], columnQualifiersAndValues[j]));
p.addColumn(columnFamilies[j], columnQualifiersAndValues[j], columnQualifiersAndValues[j]);
}
Result r = Result.create(kvs);
Object[] expectedData = { new Text("row-key"), new ByteWritable((byte) 123), new ShortWritable((short) 456), new IntWritable(789), new LongWritable(1000), new FloatWritable(-0.01F), new DoubleWritable(5.3), new Text("Hive"), new BooleanWritable(true) };
HBaseSerDe hbaseSerDe = new HBaseSerDe();
Configuration conf = new Configuration();
Properties tbl = createPropertiesForHiveMapHBaseColumnFamilyII_I();
hbaseSerDe.initialize(conf, tbl, null);
deserializeAndSerializeHiveMapHBaseColumnFamilyII(hbaseSerDe, r, p, expectedData, columnFamilies, columnQualifiersAndValues);
hbaseSerDe = new HBaseSerDe();
conf = new Configuration();
tbl = createPropertiesForHiveMapHBaseColumnFamilyII_II();
hbaseSerDe.initialize(conf, tbl, null);
deserializeAndSerializeHiveMapHBaseColumnFamilyII(hbaseSerDe, r, p, expectedData, columnFamilies, columnQualifiersAndValues);
}
use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.
the class TestHBaseSerDe method testHBaseSerDeI.
/**
* Test the default behavior of the Lazy family of objects and object inspectors.
*/
@Test
public void testHBaseSerDeI() throws SerDeException {
byte[] cfa = "cola".getBytes();
byte[] cfb = "colb".getBytes();
byte[] cfc = "colc".getBytes();
byte[] qualByte = "byte".getBytes();
byte[] qualShort = "short".getBytes();
byte[] qualInt = "int".getBytes();
byte[] qualLong = "long".getBytes();
byte[] qualFloat = "float".getBytes();
byte[] qualDouble = "double".getBytes();
byte[] qualString = "string".getBytes();
byte[] qualBool = "boolean".getBytes();
byte[] rowKey = Bytes.toBytes("test-row1");
// Data
List<Cell> kvs = new ArrayList<Cell>();
kvs.add(new KeyValue(rowKey, cfa, qualByte, Bytes.toBytes("123")));
kvs.add(new KeyValue(rowKey, cfb, qualShort, Bytes.toBytes("456")));
kvs.add(new KeyValue(rowKey, cfc, qualInt, Bytes.toBytes("789")));
kvs.add(new KeyValue(rowKey, cfa, qualLong, Bytes.toBytes("1000")));
kvs.add(new KeyValue(rowKey, cfb, qualFloat, Bytes.toBytes("-0.01")));
kvs.add(new KeyValue(rowKey, cfc, qualDouble, Bytes.toBytes("5.3")));
kvs.add(new KeyValue(rowKey, cfa, qualString, Bytes.toBytes("Hadoop, HBase, and Hive")));
kvs.add(new KeyValue(rowKey, cfb, qualBool, Bytes.toBytes("true")));
Collections.sort(kvs, KeyValue.COMPARATOR);
Result r = Result.create(kvs);
Put p = new Put(rowKey);
p.addColumn(cfa, qualByte, Bytes.toBytes("123"));
p.addColumn(cfb, qualShort, Bytes.toBytes("456"));
p.addColumn(cfc, qualInt, Bytes.toBytes("789"));
p.addColumn(cfa, qualLong, Bytes.toBytes("1000"));
p.addColumn(cfb, qualFloat, Bytes.toBytes("-0.01"));
p.addColumn(cfc, qualDouble, Bytes.toBytes("5.3"));
p.addColumn(cfa, qualString, Bytes.toBytes("Hadoop, HBase, and Hive"));
p.addColumn(cfb, qualBool, Bytes.toBytes("true"));
Object[] expectedFieldsData = { new Text("test-row1"), new ByteWritable((byte) 123), new ShortWritable((short) 456), new IntWritable(789), new LongWritable(1000), new FloatWritable(-0.01F), new DoubleWritable(5.3), new Text("Hadoop, HBase, and Hive"), new BooleanWritable(true) };
// Create, initialize, and test the SerDe
HBaseSerDe serDe = new HBaseSerDe();
Configuration conf = new Configuration();
Properties tbl = createPropertiesI_I();
serDe.initialize(conf, tbl, null);
deserializeAndSerialize(serDe, r, p, expectedFieldsData);
serDe = new HBaseSerDe();
conf = new Configuration();
tbl = createPropertiesI_II();
serDe.initialize(conf, tbl, null);
deserializeAndSerialize(serDe, r, p, expectedFieldsData);
serDe = new HBaseSerDe();
conf = new Configuration();
tbl = createPropertiesI_III();
serDe.initialize(conf, tbl, null);
deserializeAndSerialize(serDe, r, p, expectedFieldsData);
serDe = new HBaseSerDe();
conf = new Configuration();
tbl = createPropertiesI_IV();
serDe.initialize(conf, tbl, null);
deserializeAndSerialize(serDe, r, p, expectedFieldsData);
}
Aggregations