use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class TestVectorFilterExpressions method testFilterDecimalColLessScalar.
/**
* Spot check col < scalar for decimal.
*/
@Test
public void testFilterDecimalColLessScalar() {
VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol();
HiveDecimal scalar = HiveDecimal.create("0");
VectorExpression expr = new FilterDecimalColLessDecimalScalar(0, scalar);
expr.evaluate(b);
// check that right row(s) are selected
assertTrue(b.selectedInUse);
assertEquals(1, b.selected[0]);
assertEquals(1, b.size);
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class GenericUDFFormatNumber method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Object arg0;
Object arg1;
if ((arg0 = arguments[0].get()) == null || (arg1 = arguments[1].get()) == null) {
return null;
}
if (!dType.equals(PrimitiveCategory.STRING)) {
int dValue = ((IntObjectInspector) argumentOIs[1]).get(arg1);
if (dValue < 0) {
throw new HiveException("Argument 2 of function FORMAT_NUMBER must be >= 0, but \"" + dValue + "\" was found");
}
if (dValue != lastDValue) {
// construct a new DecimalFormat only if a new dValue
pattern.delete(0, pattern.length());
pattern.append("#,###,###,###,###,###,##0");
// decimal place
if (dValue > 0) {
pattern.append(".");
for (int i = 0; i < dValue; i++) {
pattern.append("0");
}
}
DecimalFormat dFormat = new DecimalFormat(pattern.toString());
lastDValue = dValue;
numberFormat.applyPattern(dFormat.toPattern());
}
}
double xDoubleValue = 0.0;
float xFloatValue = 0.0f;
HiveDecimal xDecimalValue = null;
int xIntValue = 0;
long xLongValue = 0L;
PrimitiveObjectInspector xObjectInspector = (PrimitiveObjectInspector) argumentOIs[0];
switch(xObjectInspector.getPrimitiveCategory()) {
case VOID:
case DOUBLE:
xDoubleValue = ((DoubleObjectInspector) argumentOIs[0]).get(arg0);
resultText.set(numberFormat.format(xDoubleValue));
break;
case FLOAT:
xFloatValue = ((FloatObjectInspector) argumentOIs[0]).get(arg0);
resultText.set(numberFormat.format(xFloatValue));
break;
case DECIMAL:
xDecimalValue = ((HiveDecimalObjectInspector) argumentOIs[0]).getPrimitiveJavaObject(arg0);
resultText.set(numberFormat.format(xDecimalValue.bigDecimalValue()));
break;
case BYTE:
case SHORT:
case INT:
xIntValue = ((IntObjectInspector) argumentOIs[0]).get(arg0);
resultText.set(numberFormat.format(xIntValue));
break;
case LONG:
xLongValue = ((LongObjectInspector) argumentOIs[0]).get(arg0);
resultText.set(numberFormat.format(xLongValue));
break;
default:
throw new HiveException("Argument 1 of function FORMAT_NUMBER must be " + serdeConstants.TINYINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.SMALLINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.INT_TYPE_NAME + "\"" + " or \"" + serdeConstants.BIGINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.DOUBLE_TYPE_NAME + "\"" + " or \"" + serdeConstants.FLOAT_TYPE_NAME + "\"" + " or \"" + serdeConstants.DECIMAL_TYPE_NAME + "\", but \"" + argumentOIs[0].getTypeName() + "\" was found.");
}
return resultText;
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class TestOrcFile method testUnionAndTimestamp.
/**
* We test union, timestamp, and decimal separately since we need to make the
* object inspector manually. (The Hive reflection-based doesn't handle
* them properly.)
*/
@Test
public void testUnionAndTimestamp() throws Exception {
List<OrcProto.Type> types = new ArrayList<OrcProto.Type>();
types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.STRUCT).addFieldNames("time").addFieldNames("union").addFieldNames("decimal").addSubtypes(1).addSubtypes(2).addSubtypes(5).build());
types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.TIMESTAMP).build());
types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.UNION).addSubtypes(3).addSubtypes(4).build());
types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.INT).build());
types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.STRING).build());
types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.DECIMAL).build());
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector = OrcStruct.createObjectInspector(0, types);
}
HiveDecimal maxValue = HiveDecimal.create("10000000000000000000");
Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf).inspector(inspector).stripeSize(1000).compress(CompressionKind.NONE).batchSize(1000).bufferSize(100).blockPadding(false));
OrcStruct row = new OrcStruct(3);
OrcUnion union = new OrcUnion();
row.setFieldValue(1, union);
row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf("2000-03-12 15:00:00")));
HiveDecimal value = HiveDecimal.create("12345678.6547456");
row.setFieldValue(2, new HiveDecimalWritable(value));
union.set((byte) 0, new IntWritable(42));
writer.addRow(row);
row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")));
union.set((byte) 1, new Text("hello"));
value = HiveDecimal.create("-5643.234");
row.setFieldValue(2, new HiveDecimalWritable(value));
writer.addRow(row);
row.setFieldValue(0, null);
row.setFieldValue(1, null);
row.setFieldValue(2, null);
writer.addRow(row);
row.setFieldValue(1, union);
union.set((byte) 0, null);
writer.addRow(row);
union.set((byte) 1, null);
writer.addRow(row);
union.set((byte) 0, new IntWritable(200000));
row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf("1970-01-01 00:00:00")));
value = HiveDecimal.create("10000000000000000000");
row.setFieldValue(2, new HiveDecimalWritable(value));
writer.addRow(row);
Random rand = new Random(42);
for (int i = 1970; i < 2038; ++i) {
row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf(i + "-05-05 12:34:56." + i)));
if ((i & 1) == 0) {
union.set((byte) 0, new IntWritable(i * i));
} else {
union.set((byte) 1, new Text(Integer.toString(i * i)));
}
value = HiveDecimal.create(new BigInteger(64, rand), rand.nextInt(18));
row.setFieldValue(2, new HiveDecimalWritable(value));
if (maxValue.compareTo(value) < 0) {
maxValue = value;
}
writer.addRow(row);
}
// let's add a lot of constant rows to test the rle
row.setFieldValue(0, null);
union.set((byte) 0, new IntWritable(1732050807));
row.setFieldValue(2, null);
for (int i = 0; i < 5000; ++i) {
writer.addRow(row);
}
union.set((byte) 0, new IntWritable(0));
writer.addRow(row);
union.set((byte) 0, new IntWritable(10));
writer.addRow(row);
union.set((byte) 0, new IntWritable(138));
writer.addRow(row);
writer.close();
Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs));
TypeDescription schema = writer.getSchema();
assertEquals(5, schema.getMaximumId());
boolean[] expected = new boolean[] { false, false, false, false, false, false };
boolean[] included = OrcUtils.includeColumns("", schema);
assertEquals(true, Arrays.equals(expected, included));
expected = new boolean[] { false, true, false, false, false, true };
included = OrcUtils.includeColumns("time,decimal", schema);
assertEquals(true, Arrays.equals(expected, included));
expected = new boolean[] { false, false, true, true, true, false };
included = OrcUtils.includeColumns("union", schema);
assertEquals(true, Arrays.equals(expected, included));
assertEquals(false, reader.getMetadataKeys().iterator().hasNext());
assertEquals(5077, reader.getNumberOfRows());
DecimalColumnStatistics stats = (DecimalColumnStatistics) reader.getStatistics()[5];
assertEquals(71, stats.getNumberOfValues());
assertEquals(HiveDecimal.create("-5643.234"), stats.getMinimum());
assertEquals(maxValue, stats.getMaximum());
// TODO: fix this
// assertEquals(null,stats.getSum());
int stripeCount = 0;
int rowCount = 0;
long currentOffset = -1;
for (StripeInformation stripe : reader.getStripes()) {
stripeCount += 1;
rowCount += stripe.getNumberOfRows();
if (currentOffset < 0) {
currentOffset = stripe.getOffset() + stripe.getLength();
} else {
assertEquals(currentOffset, stripe.getOffset());
currentOffset += stripe.getLength();
}
}
assertEquals(reader.getNumberOfRows(), rowCount);
assertEquals(2, stripeCount);
assertEquals(reader.getContentLength(), currentOffset);
RecordReader rows = reader.rows();
assertEquals(0, rows.getRowNumber());
assertEquals(0.0, rows.getProgress(), 0.000001);
assertEquals(true, rows.hasNext());
row = (OrcStruct) rows.next(null);
assertEquals(1, rows.getRowNumber());
inspector = reader.getObjectInspector();
assertEquals("struct<time:timestamp,union:uniontype<int,string>,decimal:decimal(38,18)>", inspector.getTypeName());
assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-12 15:00:00")), row.getFieldValue(0));
union = (OrcUnion) row.getFieldValue(1);
assertEquals(0, union.getTag());
assertEquals(new IntWritable(42), union.getObject());
assertEquals(new HiveDecimalWritable(HiveDecimal.create("12345678.6547456")), row.getFieldValue(2));
row = (OrcStruct) rows.next(row);
assertEquals(2, rows.getRowNumber());
assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")), row.getFieldValue(0));
assertEquals(1, union.getTag());
assertEquals(new Text("hello"), union.getObject());
assertEquals(new HiveDecimalWritable(HiveDecimal.create("-5643.234")), row.getFieldValue(2));
row = (OrcStruct) rows.next(row);
assertEquals(null, row.getFieldValue(0));
assertEquals(null, row.getFieldValue(1));
assertEquals(null, row.getFieldValue(2));
row = (OrcStruct) rows.next(row);
assertEquals(null, row.getFieldValue(0));
union = (OrcUnion) row.getFieldValue(1);
assertEquals(0, union.getTag());
assertEquals(null, union.getObject());
assertEquals(null, row.getFieldValue(2));
row = (OrcStruct) rows.next(row);
assertEquals(null, row.getFieldValue(0));
assertEquals(1, union.getTag());
assertEquals(null, union.getObject());
assertEquals(null, row.getFieldValue(2));
row = (OrcStruct) rows.next(row);
assertEquals(new TimestampWritable(Timestamp.valueOf("1970-01-01 00:00:00")), row.getFieldValue(0));
assertEquals(new IntWritable(200000), union.getObject());
assertEquals(new HiveDecimalWritable(HiveDecimal.create("10000000000000000000")), row.getFieldValue(2));
rand = new Random(42);
for (int i = 1970; i < 2038; ++i) {
row = (OrcStruct) rows.next(row);
assertEquals(new TimestampWritable(Timestamp.valueOf(i + "-05-05 12:34:56." + i)), row.getFieldValue(0));
if ((i & 1) == 0) {
assertEquals(0, union.getTag());
assertEquals(new IntWritable(i * i), union.getObject());
} else {
assertEquals(1, union.getTag());
assertEquals(new Text(Integer.toString(i * i)), union.getObject());
}
assertEquals(new HiveDecimalWritable(HiveDecimal.create(new BigInteger(64, rand), rand.nextInt(18))), row.getFieldValue(2));
}
for (int i = 0; i < 5000; ++i) {
row = (OrcStruct) rows.next(row);
assertEquals(new IntWritable(1732050807), union.getObject());
}
row = (OrcStruct) rows.next(row);
assertEquals(new IntWritable(0), union.getObject());
row = (OrcStruct) rows.next(row);
assertEquals(new IntWritable(10), union.getObject());
row = (OrcStruct) rows.next(row);
assertEquals(new IntWritable(138), union.getObject());
assertEquals(false, rows.hasNext());
assertEquals(1.0, rows.getProgress(), 0.00001);
assertEquals(reader.getNumberOfRows(), rows.getRowNumber());
rows.seekToRow(1);
row = (OrcStruct) rows.next(row);
assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")), row.getFieldValue(0));
assertEquals(1, union.getTag());
assertEquals(new Text("hello"), union.getObject());
assertEquals(new HiveDecimalWritable(HiveDecimal.create("-5643.234")), row.getFieldValue(2));
rows.close();
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class TestVectorizedMapColumnReader method writeMapData.
protected static void writeMapData(ParquetWriter<Group> writer, boolean isDictionaryEncoding, int elementNum) throws IOException {
SimpleGroupFactory f = new SimpleGroupFactory(schema);
int mapMaxSize = 4;
int mapElementIndex = 0;
for (int i = 0; i < elementNum; i++) {
boolean isNull = isNull(i);
Group group = f.newGroup();
int mapSize = i % mapMaxSize + 1;
if (!isNull) {
// the map_field is to test multiple level map definition
Group multipleLevelGroup = group.addGroup("map_field");
for (int j = 0; j < mapSize; j++) {
int intValForMap = getIntValue(isDictionaryEncoding, mapElementIndex);
long longValForMap = getLongValue(isDictionaryEncoding, mapElementIndex);
double doubleValForMap = getDoubleValue(isDictionaryEncoding, mapElementIndex);
float floatValForMap = getFloatValue(isDictionaryEncoding, mapElementIndex);
Binary binaryValForMap = getBinaryValue(isDictionaryEncoding, mapElementIndex);
HiveDecimal hd = getDecimal(isDictionaryEncoding, mapElementIndex).setScale(2);
HiveDecimalWritable hdw = new HiveDecimalWritable(hd);
Binary decimalValForMap = Binary.fromConstantByteArray(hdw.getInternalStorage());
group.addGroup("map_int32").append("key", intValForMap).append("value", intValForMap);
group.addGroup("map_int64").append("key", longValForMap).append("value", longValForMap);
group.addGroup("map_double").append("key", doubleValForMap).append("value", doubleValForMap);
group.addGroup("map_float").append("key", floatValForMap).append("value", floatValForMap);
group.addGroup("map_binary").append("key", binaryValForMap).append("value", binaryValForMap);
group.addGroup("map_decimal").append("key", decimalValForMap).append("value", decimalValForMap);
multipleLevelGroup.addGroup("map").append("key", binaryValForMap).append("value", binaryValForMap);
mapElementIndex++;
}
}
writer.write(group);
}
writer.close();
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class BinarySortableSerDe method deserialize.
static Object deserialize(InputByteBuffer buffer, TypeInfo type, boolean invert, byte nullMarker, byte notNullMarker, Object reuse) throws IOException {
// Is this field a null?
byte isNull = buffer.read(invert);
if (isNull == nullMarker) {
return null;
}
assert (isNull == notNullMarker);
switch(type.getCategory()) {
case PRIMITIVE:
{
PrimitiveTypeInfo ptype = (PrimitiveTypeInfo) type;
switch(ptype.getPrimitiveCategory()) {
case VOID:
{
return null;
}
case BOOLEAN:
{
BooleanWritable r = reuse == null ? new BooleanWritable() : (BooleanWritable) reuse;
byte b = buffer.read(invert);
assert (b == 1 || b == 2);
r.set(b == 2);
return r;
}
case BYTE:
{
ByteWritable r = reuse == null ? new ByteWritable() : (ByteWritable) reuse;
r.set((byte) (buffer.read(invert) ^ 0x80));
return r;
}
case SHORT:
{
ShortWritable r = reuse == null ? new ShortWritable() : (ShortWritable) reuse;
int v = buffer.read(invert) ^ 0x80;
v = (v << 8) + (buffer.read(invert) & 0xff);
r.set((short) v);
return r;
}
case INT:
{
IntWritable r = reuse == null ? new IntWritable() : (IntWritable) reuse;
r.set(deserializeInt(buffer, invert));
return r;
}
case LONG:
{
LongWritable r = reuse == null ? new LongWritable() : (LongWritable) reuse;
r.set(deserializeLong(buffer, invert));
return r;
}
case FLOAT:
{
FloatWritable r = reuse == null ? new FloatWritable() : (FloatWritable) reuse;
int v = 0;
for (int i = 0; i < 4; i++) {
v = (v << 8) + (buffer.read(invert) & 0xff);
}
if ((v & (1 << 31)) == 0) {
// negative number, flip all bits
v = ~v;
} else {
// positive number, flip the first bit
v = v ^ (1 << 31);
}
r.set(Float.intBitsToFloat(v));
return r;
}
case DOUBLE:
{
DoubleWritable r = reuse == null ? new DoubleWritable() : (DoubleWritable) reuse;
long v = 0;
for (int i = 0; i < 8; i++) {
v = (v << 8) + (buffer.read(invert) & 0xff);
}
if ((v & (1L << 63)) == 0) {
// negative number, flip all bits
v = ~v;
} else {
// positive number, flip the first bit
v = v ^ (1L << 63);
}
r.set(Double.longBitsToDouble(v));
return r;
}
case STRING:
{
Text r = reuse == null ? new Text() : (Text) reuse;
return deserializeText(buffer, invert, r);
}
case CHAR:
{
HiveCharWritable r = reuse == null ? new HiveCharWritable() : (HiveCharWritable) reuse;
// Use internal text member to read value
deserializeText(buffer, invert, r.getTextValue());
r.enforceMaxLength(getCharacterMaxLength(type));
return r;
}
case VARCHAR:
{
HiveVarcharWritable r = reuse == null ? new HiveVarcharWritable() : (HiveVarcharWritable) reuse;
// Use HiveVarchar's internal Text member to read the value.
deserializeText(buffer, invert, r.getTextValue());
// If we cache helper data for deserialization we could avoid having
// to call getVarcharMaxLength() on every deserialize call.
r.enforceMaxLength(getCharacterMaxLength(type));
return r;
}
case BINARY:
{
BytesWritable bw = new BytesWritable();
// Get the actual length first
int start = buffer.tell();
int length = 0;
do {
byte b = buffer.read(invert);
if (b == 0) {
// end of string
break;
}
if (b == 1) {
// the last char is an escape char. read the actual char
buffer.read(invert);
}
length++;
} while (true);
if (length == buffer.tell() - start) {
// No escaping happened, so we are already done.
bw.set(buffer.getData(), start, length);
} else {
// Escaping happened, we need to copy byte-by-byte.
// 1. Set the length first.
bw.set(buffer.getData(), start, length);
// 2. Reset the pointer.
buffer.seek(start);
// 3. Copy the data.
byte[] rdata = bw.getBytes();
for (int i = 0; i < length; i++) {
byte b = buffer.read(invert);
if (b == 1) {
// The last char is an escape char, read the actual char.
// The serialization format escape \0 to \1, and \1 to \2,
// to make sure the string is null-terminated.
b = (byte) (buffer.read(invert) - 1);
}
rdata[i] = b;
}
// 4. Read the null terminator.
byte b = buffer.read(invert);
assert (b == 0);
}
return bw;
}
case DATE:
{
DateWritable d = reuse == null ? new DateWritable() : (DateWritable) reuse;
d.set(deserializeInt(buffer, invert));
return d;
}
case TIMESTAMP:
TimestampWritable t = (reuse == null ? new TimestampWritable() : (TimestampWritable) reuse);
byte[] bytes = new byte[TimestampWritable.BINARY_SORTABLE_LENGTH];
for (int i = 0; i < bytes.length; i++) {
bytes[i] = buffer.read(invert);
}
t.setBinarySortable(bytes, 0);
return t;
case TIMESTAMPLOCALTZ:
TimestampLocalTZWritable tstz = (reuse == null ? new TimestampLocalTZWritable() : (TimestampLocalTZWritable) reuse);
byte[] data = new byte[TimestampLocalTZWritable.BINARY_SORTABLE_LENGTH];
for (int i = 0; i < data.length; i++) {
data[i] = buffer.read(invert);
}
// Across MR process boundary tz is normalized and stored in type
// and is not carried in data for each row.
tstz.fromBinarySortable(data, 0, ((TimestampLocalTZTypeInfo) type).timeZone());
return tstz;
case INTERVAL_YEAR_MONTH:
{
HiveIntervalYearMonthWritable i = reuse == null ? new HiveIntervalYearMonthWritable() : (HiveIntervalYearMonthWritable) reuse;
i.set(deserializeInt(buffer, invert));
return i;
}
case INTERVAL_DAY_TIME:
{
HiveIntervalDayTimeWritable i = reuse == null ? new HiveIntervalDayTimeWritable() : (HiveIntervalDayTimeWritable) reuse;
long totalSecs = deserializeLong(buffer, invert);
int nanos = deserializeInt(buffer, invert);
i.set(totalSecs, nanos);
return i;
}
case DECIMAL:
{
// See serialization of decimal for explanation (below)
HiveDecimalWritable bdw = (reuse == null ? new HiveDecimalWritable() : (HiveDecimalWritable) reuse);
int b = buffer.read(invert) - 1;
assert (b == 1 || b == -1 || b == 0);
boolean positive = b != -1;
int factor = buffer.read(invert) ^ 0x80;
for (int i = 0; i < 3; i++) {
factor = (factor << 8) + (buffer.read(invert) & 0xff);
}
if (!positive) {
factor = -factor;
}
int start = buffer.tell();
int length = 0;
do {
b = buffer.read(positive ? invert : !invert);
assert (b != 1);
if (b == 0) {
// end of digits
break;
}
length++;
} while (true);
final byte[] decimalBuffer = new byte[length];
buffer.seek(start);
for (int i = 0; i < length; ++i) {
decimalBuffer[i] = buffer.read(positive ? invert : !invert);
}
// read the null byte again
buffer.read(positive ? invert : !invert);
String digits = new String(decimalBuffer, 0, length, decimalCharSet);
BigInteger bi = new BigInteger(digits);
HiveDecimal bd = HiveDecimal.create(bi).scaleByPowerOfTen(factor - length);
if (!positive) {
bd = bd.negate();
}
bdw.set(bd);
return bdw;
}
default:
{
throw new RuntimeException("Unrecognized type: " + ptype.getPrimitiveCategory());
}
}
}
case LIST:
{
ListTypeInfo ltype = (ListTypeInfo) type;
TypeInfo etype = ltype.getListElementTypeInfo();
// Create the list if needed
ArrayList<Object> r = reuse == null ? new ArrayList<Object>() : (ArrayList<Object>) reuse;
// Read the list
int size = 0;
while (true) {
int more = buffer.read(invert);
if (more == 0) {
// \0 to terminate
break;
}
// \1 followed by each element
assert (more == 1);
if (size == r.size()) {
r.add(null);
}
r.set(size, deserialize(buffer, etype, invert, nullMarker, notNullMarker, r.get(size)));
size++;
}
// Remove additional elements if the list is reused
while (r.size() > size) {
r.remove(r.size() - 1);
}
return r;
}
case MAP:
{
MapTypeInfo mtype = (MapTypeInfo) type;
TypeInfo ktype = mtype.getMapKeyTypeInfo();
TypeInfo vtype = mtype.getMapValueTypeInfo();
// Create the map if needed
Map<Object, Object> r;
if (reuse == null) {
r = new HashMap<Object, Object>();
} else {
r = (HashMap<Object, Object>) reuse;
r.clear();
}
while (true) {
int more = buffer.read(invert);
if (more == 0) {
// \0 to terminate
break;
}
// \1 followed by each key and then each value
assert (more == 1);
Object k = deserialize(buffer, ktype, invert, nullMarker, notNullMarker, null);
Object v = deserialize(buffer, vtype, invert, nullMarker, notNullMarker, null);
r.put(k, v);
}
return r;
}
case STRUCT:
{
StructTypeInfo stype = (StructTypeInfo) type;
List<TypeInfo> fieldTypes = stype.getAllStructFieldTypeInfos();
int size = fieldTypes.size();
// Create the struct if needed
ArrayList<Object> r = reuse == null ? new ArrayList<Object>(size) : (ArrayList<Object>) reuse;
assert (r.size() <= size);
// Set the size of the struct
while (r.size() < size) {
r.add(null);
}
// Read one field by one field
for (int eid = 0; eid < size; eid++) {
r.set(eid, deserialize(buffer, fieldTypes.get(eid), invert, nullMarker, notNullMarker, r.get(eid)));
}
return r;
}
case UNION:
{
UnionTypeInfo utype = (UnionTypeInfo) type;
StandardUnion r = reuse == null ? new StandardUnion() : (StandardUnion) reuse;
// Read the tag
byte tag = buffer.read(invert);
r.setTag(tag);
r.setObject(deserialize(buffer, utype.getAllUnionObjectTypeInfos().get(tag), invert, nullMarker, notNullMarker, null));
return r;
}
default:
{
throw new RuntimeException("Unrecognized type: " + type.getCategory());
}
}
}
Aggregations