use of org.apache.hadoop.io.FloatWritable in project druid by druid-io.
the class OrcStructConverterTest method testConvertRootFieldWithStructOfNonNullPrimitivesReturningValuesAsTheyAre.
@Test
public void testConvertRootFieldWithStructOfNonNullPrimitivesReturningValuesAsTheyAre() {
final TypeDescription structType = TypeDescription.createStruct();
structType.addField("int", TypeDescription.createInt());
structType.addField("float", TypeDescription.createFloat());
final OrcStruct orcStruct = new OrcStruct(structType);
orcStruct.setFieldValue("int", new IntWritable(10));
orcStruct.setFieldValue("float", new FloatWritable(10.f));
final Map<String, Object> expectedResult = new HashMap<>();
expectedResult.put("int", ((IntWritable) orcStruct.getFieldValue("int")).get());
expectedResult.put("float", ((FloatWritable) orcStruct.getFieldValue("float")).get());
final OrcStructConverter converter = new OrcStructConverter(false);
assertConversion(converter, structType, expectedResult, orcStruct);
}
use of org.apache.hadoop.io.FloatWritable in project druid by druid-io.
the class OrcStructConverterTest method testConvertRootFieldWithMapOfNonNullPrimitivesReturningValuesAsTheyAre.
@Test
public void testConvertRootFieldWithMapOfNonNullPrimitivesReturningValuesAsTheyAre() {
final TypeDescription mapType = TypeDescription.createMap(TypeDescription.createInt(), TypeDescription.createFloat());
final OrcMap<IntWritable, FloatWritable> map = new OrcMap<>(mapType);
for (int i = 0; i < 3; i++) {
map.put(new IntWritable(i * 10), new FloatWritable(i / 10.f));
}
final Map<Integer, Float> expectedResult = new HashMap<>();
for (Entry<IntWritable, FloatWritable> entry : map.entrySet()) {
expectedResult.put(entry.getKey().get(), entry.getValue().get());
}
final OrcStructConverter converter = new OrcStructConverter(false);
assertConversion(converter, mapType, expectedResult, map);
}
use of org.apache.hadoop.io.FloatWritable in project hive by apache.
the class VectorExtractRow method extractRowColumn.
public Object extractRowColumn(ColumnVector colVector, TypeInfo typeInfo, ObjectInspector objectInspector, int batchIndex) {
if (colVector == null) {
// may ask for them..
return null;
}
final int adjustedIndex = (colVector.isRepeating ? 0 : batchIndex);
if (!colVector.noNulls && colVector.isNull[adjustedIndex]) {
return null;
}
final Category category = typeInfo.getCategory();
switch(category) {
case PRIMITIVE:
{
final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
final PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
final Writable primitiveWritable = VectorizedBatchUtil.getPrimitiveWritable(primitiveCategory);
switch(primitiveCategory) {
case VOID:
return null;
case BOOLEAN:
((BooleanWritable) primitiveWritable).set(((LongColumnVector) colVector).vector[adjustedIndex] == 0 ? false : true);
return primitiveWritable;
case BYTE:
((ByteWritable) primitiveWritable).set((byte) ((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case SHORT:
((ShortWritable) primitiveWritable).set((short) ((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case INT:
((IntWritable) primitiveWritable).set((int) ((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case LONG:
((LongWritable) primitiveWritable).set(((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case TIMESTAMP:
// From java.sql.Timestamp used by vectorization to serializable org.apache.hadoop.hive.common.type.Timestamp
java.sql.Timestamp ts = ((TimestampColumnVector) colVector).asScratchTimestamp(adjustedIndex);
Timestamp serializableTS = Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos());
((TimestampWritableV2) primitiveWritable).set(serializableTS);
return primitiveWritable;
case DATE:
((DateWritableV2) primitiveWritable).set((int) ((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case FLOAT:
((FloatWritable) primitiveWritable).set((float) ((DoubleColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case DOUBLE:
((DoubleWritable) primitiveWritable).set(((DoubleColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case BINARY:
{
final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
final byte[] bytes = bytesColVector.vector[adjustedIndex];
final int start = bytesColVector.start[adjustedIndex];
final int length = bytesColVector.length[adjustedIndex];
BytesWritable bytesWritable = (BytesWritable) primitiveWritable;
if (bytes == null || length == 0) {
if (length > 0) {
nullBytesReadError(primitiveCategory, batchIndex);
}
bytesWritable.set(EMPTY_BYTES, 0, 0);
} else {
bytesWritable.set(bytes, start, length);
}
return primitiveWritable;
}
case STRING:
{
final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
final byte[] bytes = bytesColVector.vector[adjustedIndex];
final int start = bytesColVector.start[adjustedIndex];
final int length = bytesColVector.length[adjustedIndex];
if (bytes == null || length == 0) {
if (length > 0) {
nullBytesReadError(primitiveCategory, batchIndex);
}
((Text) primitiveWritable).set(EMPTY_BYTES, 0, 0);
} else {
// Use org.apache.hadoop.io.Text as our helper to go from byte[] to String.
((Text) primitiveWritable).set(bytes, start, length);
}
return primitiveWritable;
}
case VARCHAR:
{
final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
final byte[] bytes = bytesColVector.vector[adjustedIndex];
final int start = bytesColVector.start[adjustedIndex];
final int length = bytesColVector.length[adjustedIndex];
final HiveVarcharWritable hiveVarcharWritable = (HiveVarcharWritable) primitiveWritable;
if (bytes == null || length == 0) {
if (length > 0) {
nullBytesReadError(primitiveCategory, batchIndex);
}
hiveVarcharWritable.set(EMPTY_STRING, -1);
} else {
final int adjustedLength = StringExpr.truncate(bytes, start, length, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
if (adjustedLength == 0) {
hiveVarcharWritable.set(EMPTY_STRING, -1);
} else {
hiveVarcharWritable.set(new String(bytes, start, adjustedLength, Charsets.UTF_8), -1);
}
}
return primitiveWritable;
}
case CHAR:
{
final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
final byte[] bytes = bytesColVector.vector[adjustedIndex];
final int start = bytesColVector.start[adjustedIndex];
final int length = bytesColVector.length[adjustedIndex];
final HiveCharWritable hiveCharWritable = (HiveCharWritable) primitiveWritable;
final int maxLength = ((CharTypeInfo) primitiveTypeInfo).getLength();
if (bytes == null || length == 0) {
if (length > 0) {
nullBytesReadError(primitiveCategory, batchIndex);
}
hiveCharWritable.set(EMPTY_STRING, maxLength);
} else {
final int adjustedLength = StringExpr.rightTrimAndTruncate(bytes, start, length, ((CharTypeInfo) primitiveTypeInfo).getLength());
if (adjustedLength == 0) {
hiveCharWritable.set(EMPTY_STRING, maxLength);
} else {
hiveCharWritable.set(new String(bytes, start, adjustedLength, Charsets.UTF_8), maxLength);
}
}
return primitiveWritable;
}
case DECIMAL:
if (colVector instanceof Decimal64ColumnVector) {
Decimal64ColumnVector dec32ColVector = (Decimal64ColumnVector) colVector;
((HiveDecimalWritable) primitiveWritable).deserialize64(dec32ColVector.vector[adjustedIndex], dec32ColVector.scale);
} else {
// The HiveDecimalWritable set method will quickly copy the deserialized decimal writable fields.
((HiveDecimalWritable) primitiveWritable).set(((DecimalColumnVector) colVector).vector[adjustedIndex]);
}
return primitiveWritable;
case INTERVAL_YEAR_MONTH:
((HiveIntervalYearMonthWritable) primitiveWritable).set((int) ((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case INTERVAL_DAY_TIME:
((HiveIntervalDayTimeWritable) primitiveWritable).set(((IntervalDayTimeColumnVector) colVector).asScratchIntervalDayTime(adjustedIndex));
return primitiveWritable;
default:
throw new RuntimeException("Primitive category " + primitiveCategory.name() + " not supported");
}
}
case LIST:
{
final ListColumnVector listColumnVector = (ListColumnVector) colVector;
final ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
final ListObjectInspector listObjectInspector = (ListObjectInspector) objectInspector;
final int offset = (int) listColumnVector.offsets[adjustedIndex];
final int size = (int) listColumnVector.lengths[adjustedIndex];
final List list = new ArrayList();
for (int i = 0; i < size; i++) {
list.add(extractRowColumn(listColumnVector.child, listTypeInfo.getListElementTypeInfo(), listObjectInspector.getListElementObjectInspector(), offset + i));
}
return list;
}
case MAP:
{
final MapColumnVector mapColumnVector = (MapColumnVector) colVector;
final MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
final MapObjectInspector mapObjectInspector = (MapObjectInspector) objectInspector;
final int offset = (int) mapColumnVector.offsets[adjustedIndex];
final int size = (int) mapColumnVector.lengths[adjustedIndex];
final Map<Object, Object> map = new LinkedHashMap<Object, Object>();
for (int i = 0; i < size; i++) {
final Object key = extractRowColumn(mapColumnVector.keys, mapTypeInfo.getMapKeyTypeInfo(), mapObjectInspector.getMapKeyObjectInspector(), offset + i);
final Object value = extractRowColumn(mapColumnVector.values, mapTypeInfo.getMapValueTypeInfo(), mapObjectInspector.getMapValueObjectInspector(), offset + i);
map.put(key, value);
}
return map;
}
case STRUCT:
{
final StructColumnVector structColumnVector = (StructColumnVector) colVector;
final StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
final StandardStructObjectInspector structInspector = (StandardStructObjectInspector) objectInspector;
final List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
final int size = fieldTypeInfos.size();
final List<? extends StructField> structFields = structInspector.getAllStructFieldRefs();
final Object struct = structInspector.create();
for (int i = 0; i < size; i++) {
final StructField structField = structFields.get(i);
final TypeInfo fieldTypeInfo = fieldTypeInfos.get(i);
final Object value = extractRowColumn(structColumnVector.fields[i], fieldTypeInfo, structField.getFieldObjectInspector(), adjustedIndex);
structInspector.setStructFieldData(struct, structField, value);
}
return struct;
}
case UNION:
{
final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
final List<TypeInfo> objectTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
final UnionObjectInspector unionInspector = (UnionObjectInspector) objectInspector;
final List<ObjectInspector> unionInspectors = unionInspector.getObjectInspectors();
final UnionColumnVector unionColumnVector = (UnionColumnVector) colVector;
final byte tag = (byte) unionColumnVector.tags[adjustedIndex];
final Object object = extractRowColumn(unionColumnVector.fields[tag], objectTypeInfos.get(tag), unionInspectors.get(tag), adjustedIndex);
final StandardUnion standardUnion = new StandardUnion();
standardUnion.setTag(tag);
standardUnion.setObject(object);
return standardUnion;
}
default:
throw new RuntimeException("Category " + category.name() + " not supported");
}
}
use of org.apache.hadoop.io.FloatWritable in project hive by apache.
the class VectorAssignRow method assignRowColumn.
private void assignRowColumn(ColumnVector columnVector, int batchIndex, TypeInfo targetTypeInfo, Object object) {
if (object == null) {
assignNullRowColumn(columnVector, batchIndex, targetTypeInfo);
return;
}
switch(targetTypeInfo.getCategory()) {
case PRIMITIVE:
{
final PrimitiveCategory targetPrimitiveCategory = ((PrimitiveTypeInfo) targetTypeInfo).getPrimitiveCategory();
switch(targetPrimitiveCategory) {
case VOID:
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
case BOOLEAN:
if (object instanceof Boolean) {
((LongColumnVector) columnVector).vector[batchIndex] = (((Boolean) object) ? 1 : 0);
} else {
((LongColumnVector) columnVector).vector[batchIndex] = (((BooleanWritable) object).get() ? 1 : 0);
}
break;
case BYTE:
if (object instanceof Byte) {
((LongColumnVector) columnVector).vector[batchIndex] = ((Byte) object);
} else {
((LongColumnVector) columnVector).vector[batchIndex] = ((ByteWritable) object).get();
}
break;
case SHORT:
if (object instanceof Short) {
((LongColumnVector) columnVector).vector[batchIndex] = ((Short) object);
} else {
((LongColumnVector) columnVector).vector[batchIndex] = ((ShortWritable) object).get();
}
break;
case INT:
if (object instanceof Integer) {
((LongColumnVector) columnVector).vector[batchIndex] = ((Integer) object);
} else {
((LongColumnVector) columnVector).vector[batchIndex] = ((IntWritable) object).get();
}
break;
case LONG:
if (object instanceof Long) {
((LongColumnVector) columnVector).vector[batchIndex] = ((Long) object);
} else {
((LongColumnVector) columnVector).vector[batchIndex] = ((LongWritable) object).get();
}
break;
case TIMESTAMP:
if (object instanceof Timestamp) {
((TimestampColumnVector) columnVector).set(batchIndex, ((Timestamp) object).toSqlTimestamp());
} else {
((TimestampColumnVector) columnVector).set(batchIndex, ((TimestampWritableV2) object).getTimestamp().toSqlTimestamp());
}
break;
case DATE:
if (object instanceof Date) {
((LongColumnVector) columnVector).vector[batchIndex] = DateWritableV2.dateToDays((Date) object);
} else {
((LongColumnVector) columnVector).vector[batchIndex] = ((DateWritableV2) object).getDays();
}
break;
case FLOAT:
if (object instanceof Float) {
((DoubleColumnVector) columnVector).vector[batchIndex] = ((Float) object);
} else {
((DoubleColumnVector) columnVector).vector[batchIndex] = ((FloatWritable) object).get();
}
break;
case DOUBLE:
if (object instanceof Double) {
((DoubleColumnVector) columnVector).vector[batchIndex] = ((Double) object);
} else {
((DoubleColumnVector) columnVector).vector[batchIndex] = ((DoubleWritable) object).get();
}
break;
case BINARY:
{
if (object instanceof byte[]) {
byte[] bytes = (byte[]) object;
((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
} else {
BytesWritable bw = (BytesWritable) object;
((BytesColumnVector) columnVector).setVal(batchIndex, bw.getBytes(), 0, bw.getLength());
}
}
break;
case STRING:
{
if (object instanceof String) {
String string = (String) object;
byte[] bytes = string.getBytes();
((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
} else {
Text tw = (Text) object;
((BytesColumnVector) columnVector).setVal(batchIndex, tw.getBytes(), 0, tw.getLength());
}
}
break;
case VARCHAR:
{
// UNDONE: Performance problem with conversion to String, then bytes...
// We store VARCHAR type stripped of pads.
HiveVarchar hiveVarchar;
if (object instanceof HiveVarchar) {
hiveVarchar = (HiveVarchar) object;
} else {
hiveVarchar = ((HiveVarcharWritable) object).getHiveVarchar();
}
// TODO: HIVE-13624 Do we need maxLength checking?
byte[] bytes = hiveVarchar.getValue().getBytes();
((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
}
break;
case CHAR:
{
// UNDONE: Performance problem with conversion to String, then bytes...
// We store CHAR type stripped of pads.
HiveChar hiveChar;
if (object instanceof HiveChar) {
hiveChar = (HiveChar) object;
} else {
hiveChar = ((HiveCharWritable) object).getHiveChar();
}
// TODO: HIVE-13624 Do we need maxLength checking?
// We store CHAR in vector row batch with padding stripped.
byte[] bytes = hiveChar.getStrippedValue().getBytes();
((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
}
break;
case DECIMAL:
if (columnVector instanceof DecimalColumnVector) {
if (object instanceof HiveDecimal) {
((DecimalColumnVector) columnVector).set(batchIndex, (HiveDecimal) object);
} else {
((DecimalColumnVector) columnVector).set(batchIndex, (HiveDecimalWritable) object);
}
} else {
if (object instanceof HiveDecimal) {
((Decimal64ColumnVector) columnVector).set(batchIndex, (HiveDecimal) object);
} else {
((Decimal64ColumnVector) columnVector).set(batchIndex, (HiveDecimalWritable) object);
}
}
break;
case INTERVAL_YEAR_MONTH:
if (object instanceof HiveIntervalYearMonth) {
((LongColumnVector) columnVector).vector[batchIndex] = ((HiveIntervalYearMonth) object).getTotalMonths();
} else {
((LongColumnVector) columnVector).vector[batchIndex] = ((HiveIntervalYearMonthWritable) object).getHiveIntervalYearMonth().getTotalMonths();
}
break;
case INTERVAL_DAY_TIME:
if (object instanceof HiveIntervalDayTime) {
((IntervalDayTimeColumnVector) columnVector).set(batchIndex, (HiveIntervalDayTime) object);
} else {
((IntervalDayTimeColumnVector) columnVector).set(batchIndex, ((HiveIntervalDayTimeWritable) object).getHiveIntervalDayTime());
}
break;
default:
throw new RuntimeException("Primitive category " + targetPrimitiveCategory.name() + " not supported");
}
}
break;
case LIST:
{
final ListColumnVector listColumnVector = (ListColumnVector) columnVector;
final ListTypeInfo listTypeInfo = (ListTypeInfo) targetTypeInfo;
final TypeInfo elementTypeInfo = listTypeInfo.getListElementTypeInfo();
final List list = (List) object;
final int size = list.size();
final int childCount = listColumnVector.childCount;
listColumnVector.offsets[batchIndex] = childCount;
listColumnVector.lengths[batchIndex] = size;
listColumnVector.childCount = childCount + size;
listColumnVector.child.ensureSize(childCount + size, true);
for (int i = 0; i < size; i++) {
assignRowColumn(listColumnVector.child, childCount + i, elementTypeInfo, list.get(i));
}
}
break;
case MAP:
{
final MapColumnVector mapColumnVector = (MapColumnVector) columnVector;
final MapTypeInfo mapTypeInfo = (MapTypeInfo) targetTypeInfo;
final Map<Object, Object> map = (Map<Object, Object>) object;
final int size = map.size();
int childCount = mapColumnVector.childCount;
mapColumnVector.offsets[batchIndex] = childCount;
mapColumnVector.lengths[batchIndex] = size;
mapColumnVector.keys.ensureSize(childCount + size, true);
mapColumnVector.values.ensureSize(childCount + size, true);
for (Map.Entry<Object, Object> entry : map.entrySet()) {
assignRowColumn(mapColumnVector.keys, childCount, mapTypeInfo.getMapKeyTypeInfo(), entry.getKey());
assignRowColumn(mapColumnVector.values, childCount, mapTypeInfo.getMapValueTypeInfo(), entry.getValue());
childCount++;
}
mapColumnVector.childCount = childCount;
}
break;
case STRUCT:
{
final StructColumnVector structColumnVector = (StructColumnVector) columnVector;
final StructTypeInfo targetStructTypeInfo = (StructTypeInfo) targetTypeInfo;
final List<TypeInfo> targetFieldTypeInfos = targetStructTypeInfo.getAllStructFieldTypeInfos();
final int size = targetFieldTypeInfos.size();
if (object instanceof List) {
final List struct = (List) object;
for (int i = 0; i < size; i++) {
assignRowColumn(structColumnVector.fields[i], batchIndex, targetFieldTypeInfos.get(i), struct.get(i));
}
} else {
final Object[] array = (Object[]) object;
for (int i = 0; i < size; i++) {
assignRowColumn(structColumnVector.fields[i], batchIndex, targetFieldTypeInfos.get(i), array[i]);
}
}
}
break;
case UNION:
{
final StandardUnion union = (StandardUnion) object;
final UnionColumnVector unionColumnVector = (UnionColumnVector) columnVector;
final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) targetTypeInfo;
final List<TypeInfo> objectTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
final byte tag = union.getTag();
unionColumnVector.tags[batchIndex] = tag;
assignRowColumn(unionColumnVector.fields[tag], batchIndex, objectTypeInfos.get(tag), union.getObject());
}
break;
default:
throw new RuntimeException("Category " + targetTypeInfo.getCategory().name() + " not supported");
}
/*
* We always set the null flag to false when there is a value.
*/
columnVector.isNull[batchIndex] = false;
}
use of org.apache.hadoop.io.FloatWritable in project hive by apache.
the class TestHBaseSerDe method testHBaseSerDeWithHiveMapToHBaseColumnFamily.
@Test
public void testHBaseSerDeWithHiveMapToHBaseColumnFamily() throws SerDeException {
byte[] cfint = "cf-int".getBytes();
byte[] cfbyte = "cf-byte".getBytes();
byte[] cfshort = "cf-short".getBytes();
byte[] cflong = "cf-long".getBytes();
byte[] cffloat = "cf-float".getBytes();
byte[] cfdouble = "cf-double".getBytes();
byte[] cfbool = "cf-bool".getBytes();
byte[][] columnFamilies = new byte[][] { cfint, cfbyte, cfshort, cflong, cffloat, cfdouble, cfbool };
byte[][] rowKeys = new byte[][] { Integer.toString(1).getBytes(), Integer.toString(Integer.MIN_VALUE).getBytes(), Integer.toString(Integer.MAX_VALUE).getBytes() };
byte[][][] columnQualifiersAndValues = new byte[][][] { { Bytes.toBytes(1), new byte[] { 1 }, Bytes.toBytes((short) 1), Bytes.toBytes((long) 1), Bytes.toBytes(1.0F), Bytes.toBytes(1.0), Bytes.toBytes(true) }, { Bytes.toBytes(Integer.MIN_VALUE), new byte[] { Byte.MIN_VALUE }, Bytes.toBytes(Short.MIN_VALUE), Bytes.toBytes(Long.MIN_VALUE), Bytes.toBytes(Float.MIN_VALUE), Bytes.toBytes(Double.MIN_VALUE), Bytes.toBytes(false) }, { Bytes.toBytes(Integer.MAX_VALUE), new byte[] { Byte.MAX_VALUE }, Bytes.toBytes(Short.MAX_VALUE), Bytes.toBytes(Long.MAX_VALUE), Bytes.toBytes(Float.MAX_VALUE), Bytes.toBytes(Double.MAX_VALUE), Bytes.toBytes(true) } };
List<Cell> kvs = new ArrayList<Cell>();
Result[] r = new Result[] { null, null, null };
Put[] p = new Put[] { null, null, null };
for (int i = 0; i < r.length; i++) {
kvs.clear();
p[i] = new Put(rowKeys[i]);
for (int j = 0; j < columnQualifiersAndValues[i].length; j++) {
kvs.add(new KeyValue(rowKeys[i], columnFamilies[j], columnQualifiersAndValues[i][j], columnQualifiersAndValues[i][j]));
p[i].addColumn(columnFamilies[j], columnQualifiersAndValues[i][j], columnQualifiersAndValues[i][j]);
}
r[i] = Result.create(kvs);
}
Object[][] expectedData = { { new Text(Integer.toString(1)), new IntWritable(1), new ByteWritable((byte) 1), new ShortWritable((short) 1), new LongWritable(1), new FloatWritable(1.0F), new DoubleWritable(1.0), new BooleanWritable(true) }, { new Text(Integer.toString(Integer.MIN_VALUE)), new IntWritable(Integer.MIN_VALUE), new ByteWritable(Byte.MIN_VALUE), new ShortWritable(Short.MIN_VALUE), new LongWritable(Long.MIN_VALUE), new FloatWritable(Float.MIN_VALUE), new DoubleWritable(Double.MIN_VALUE), new BooleanWritable(false) }, { new Text(Integer.toString(Integer.MAX_VALUE)), new IntWritable(Integer.MAX_VALUE), new ByteWritable(Byte.MAX_VALUE), new ShortWritable(Short.MAX_VALUE), new LongWritable(Long.MAX_VALUE), new FloatWritable(Float.MAX_VALUE), new DoubleWritable(Double.MAX_VALUE), new BooleanWritable(true) } };
HBaseSerDe hbaseSerDe = new HBaseSerDe();
Configuration conf = new Configuration();
Properties tbl = createPropertiesForHiveMapHBaseColumnFamily();
hbaseSerDe.initialize(conf, tbl, null);
deserializeAndSerializeHiveMapHBaseColumnFamily(hbaseSerDe, r, p, expectedData, rowKeys, columnFamilies, columnQualifiersAndValues);
hbaseSerDe = new HBaseSerDe();
conf = new Configuration();
tbl = createPropertiesForHiveMapHBaseColumnFamilyII();
hbaseSerDe.initialize(conf, tbl, null);
deserializeAndSerializeHiveMapHBaseColumnFamily(hbaseSerDe, r, p, expectedData, rowKeys, columnFamilies, columnQualifiersAndValues);
}
Aggregations