use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.
the class TestTeradataBinarySerdeGeneral method testDeserializeAndSerialize.
@Test
public void testDeserializeAndSerialize() throws Exception {
BytesWritable in = new BytesWritable(BaseEncoding.base16().lowerCase().decode("00004e6f762020202020201b006120646179203d2031312f31312f31312020202020202020203435ec10000000000000c5feffff" + "7707010000000000002a40ef2b3dab0d14e6531c8908a72700000007b20100313931312d31312d31312031393a32303a32312e34" + "33333230301b00746573743a20202020202020343333322020202020202020333135"));
List<Object> row = (List<Object>) serde.deserialize(in);
Assert.assertEquals("Nov", ((HiveCharWritable) row.get(0)).toString());
Assert.assertEquals("a day = 11/11/11 45", ((HiveVarcharWritable) row.get(1)).toString());
Assert.assertEquals(4332L, ((LongWritable) row.get(2)).get());
Assert.assertEquals(-315, ((IntWritable) row.get(3)).get());
Assert.assertEquals((short) 1911, ((ShortWritable) row.get(4)).get());
Assert.assertEquals((byte) 1, ((ByteWritable) row.get(5)).get());
Assert.assertEquals((double) 13, ((DoubleWritable) row.get(6)).get(), 0);
Assert.assertEquals(30, ((HiveDecimalWritable) row.get(7)).getScale());
Assert.assertEquals((double) 3.141592653589793238462643383279, ((HiveDecimalWritable) row.get(7)).getHiveDecimal().doubleValue(), 0);
Assert.assertEquals("1911-11-11", ((DateWritableV2) row.get(8)).toString());
Assert.assertEquals("1911-11-11 19:20:21.4332", ((TimestampWritableV2) row.get(9)).toString());
Assert.assertEquals(27, ((BytesWritable) row.get(10)).getLength());
BytesWritable res = (BytesWritable) serde.serialize(row, serde.getObjectInspector());
Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes()));
}
use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.
the class TestVectorizedORCReader method checkVectorizedReader.
private void checkVectorizedReader() throws Exception {
Reader vreader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf));
Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf));
RecordReaderImpl vrr = (RecordReaderImpl) vreader.rows();
RecordReaderImpl rr = (RecordReaderImpl) reader.rows();
VectorizedRowBatch batch = reader.getSchema().createRowBatchV2();
OrcStruct row = null;
long lastRowNumber = -1;
// Check Vectorized ORC reader against ORC row reader
while (vrr.nextBatch(batch)) {
Assert.assertEquals(lastRowNumber + 1, vrr.getRowNumber());
for (int i = 0; i < batch.size; i++) {
Assert.assertEquals(rr.getRowNumber(), vrr.getRowNumber() + i);
lastRowNumber = rr.getRowNumber();
row = (OrcStruct) rr.next(row);
for (int j = 0; j < batch.cols.length; j++) {
Object a = (row.getFieldValue(j));
ColumnVector cv = batch.cols[j];
// if the value is repeating, use row 0
int rowId = cv.isRepeating ? 0 : i;
// make sure the null flag agrees
if (a == null) {
Assert.assertEquals(true, !cv.noNulls && cv.isNull[rowId]);
} else if (a instanceof BooleanWritable) {
// Boolean values are stores a 1's and 0's, so convert and compare
Long temp = (long) (((BooleanWritable) a).get() ? 1 : 0);
long b = ((LongColumnVector) cv).vector[rowId];
Assert.assertEquals(temp.toString(), Long.toString(b));
} else if (a instanceof TimestampWritableV2) {
// Timestamps are stored as long, so convert and compare
TimestampWritableV2 t = ((TimestampWritableV2) a);
TimestampColumnVector tcv = ((TimestampColumnVector) cv);
java.sql.Timestamp ts = tcv.asScratchTimestamp(rowId);
Assert.assertEquals(t.getTimestamp(), Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
} else if (a instanceof DateWritableV2) {
// Dates are stored as long, so convert and compare
DateWritableV2 adt = (DateWritableV2) a;
long b = ((LongColumnVector) cv).vector[rowId];
Assert.assertEquals(adt.get().toEpochMilli(), DateWritableV2.daysToMillis((int) b));
} else if (a instanceof HiveDecimalWritable) {
// Decimals are stored as BigInteger, so convert and compare
HiveDecimalWritable dec = (HiveDecimalWritable) a;
HiveDecimalWritable b = ((DecimalColumnVector) cv).vector[i];
Assert.assertEquals(dec, b);
} else if (a instanceof DoubleWritable) {
double b = ((DoubleColumnVector) cv).vector[rowId];
assertEquals(a.toString(), Double.toString(b));
} else if (a instanceof Text) {
BytesColumnVector bcv = (BytesColumnVector) cv;
Text b = new Text();
b.set(bcv.vector[rowId], bcv.start[rowId], bcv.length[rowId]);
assertEquals(a, b);
} else if (a instanceof IntWritable || a instanceof LongWritable || a instanceof ByteWritable || a instanceof ShortWritable) {
assertEquals(a.toString(), Long.toString(((LongColumnVector) cv).vector[rowId]));
} else {
assertEquals("huh", a.getClass().getName());
}
}
}
// Check repeating
Assert.assertEquals(false, batch.cols[0].isRepeating);
Assert.assertEquals(false, batch.cols[1].isRepeating);
Assert.assertEquals(false, batch.cols[2].isRepeating);
Assert.assertEquals(true, batch.cols[3].isRepeating);
Assert.assertEquals(false, batch.cols[4].isRepeating);
Assert.assertEquals(false, batch.cols[5].isRepeating);
Assert.assertEquals(false, batch.cols[6].isRepeating);
Assert.assertEquals(false, batch.cols[7].isRepeating);
Assert.assertEquals(false, batch.cols[8].isRepeating);
Assert.assertEquals(false, batch.cols[9].isRepeating);
// Check non null
Assert.assertEquals(false, batch.cols[0].noNulls);
Assert.assertEquals(false, batch.cols[1].noNulls);
Assert.assertEquals(true, batch.cols[2].noNulls);
Assert.assertEquals(true, batch.cols[3].noNulls);
Assert.assertEquals(false, batch.cols[4].noNulls);
Assert.assertEquals(false, batch.cols[5].noNulls);
Assert.assertEquals(false, batch.cols[6].noNulls);
Assert.assertEquals(false, batch.cols[7].noNulls);
Assert.assertEquals(false, batch.cols[8].noNulls);
Assert.assertEquals(false, batch.cols[9].noNulls);
}
Assert.assertEquals(false, rr.nextBatch(batch));
}
use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.
the class TestParquetSerDe method testParquetHiveSerDe.
@Test
public void testParquetHiveSerDe() throws Throwable {
try {
// Create the SerDe
System.out.println("test: testParquetHiveSerDe");
final ParquetHiveSerDe serDe = new ParquetHiveSerDe();
final Configuration conf = new Configuration();
final Properties tbl = createProperties();
serDe.initialize(conf, tbl, null);
// Data
final Writable[] arr = new Writable[9];
// primitive types
arr[0] = new ByteWritable((byte) 123);
arr[1] = new ShortWritable((short) 456);
arr[2] = new IntWritable(789);
arr[3] = new LongWritable(1000l);
arr[4] = new DoubleWritable((double) 5.3);
arr[5] = new BytesWritable("hive and hadoop and parquet. Big family.".getBytes("UTF-8"));
arr[6] = new BytesWritable("parquetSerde binary".getBytes("UTF-8"));
final Writable[] map = new Writable[3];
for (int i = 0; i < 3; ++i) {
final Writable[] pair = new Writable[2];
pair[0] = new BytesWritable(("key_" + i).getBytes("UTF-8"));
pair[1] = new IntWritable(i);
map[i] = new ArrayWritable(Writable.class, pair);
}
arr[7] = new ArrayWritable(Writable.class, map);
final Writable[] array = new Writable[5];
for (int i = 0; i < 5; ++i) {
array[i] = new BytesWritable(("elem_" + i).getBytes("UTF-8"));
}
arr[8] = new ArrayWritable(Writable.class, array);
final ArrayWritable arrWritable = new ArrayWritable(Writable.class, arr);
// Test
deserializeAndSerializeLazySimple(serDe, arrWritable);
System.out.println("test: testParquetHiveSerDe - OK");
} catch (final Throwable e) {
e.printStackTrace();
throw e;
}
}
use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.
the class VerifyFastRow method verifyDeserializeRead.
public static void verifyDeserializeRead(DeserializeRead deserializeRead, PrimitiveTypeInfo primitiveTypeInfo, Writable writable) throws IOException {
boolean isNull;
isNull = !deserializeRead.readNextField();
if (isNull) {
if (writable != null) {
TestCase.fail(deserializeRead.getClass().getName() + " field reports null but object is not null " + "(class " + writable.getClass().getName() + ", " + writable.toString() + ")");
}
return;
} else if (writable == null) {
TestCase.fail("Field report not null but object is null");
}
switch(primitiveTypeInfo.getPrimitiveCategory()) {
case BOOLEAN:
{
boolean value = deserializeRead.currentBoolean;
if (!(writable instanceof BooleanWritable)) {
TestCase.fail("Boolean expected writable not Boolean");
}
boolean expected = ((BooleanWritable) writable).get();
if (value != expected) {
TestCase.fail("Boolean field mismatch (expected " + expected + " found " + value + ")");
}
}
break;
case BYTE:
{
byte value = deserializeRead.currentByte;
if (!(writable instanceof ByteWritable)) {
TestCase.fail("Byte expected writable not Byte");
}
byte expected = ((ByteWritable) writable).get();
if (value != expected) {
TestCase.fail("Byte field mismatch (expected " + (int) expected + " found " + (int) value + ")");
}
}
break;
case SHORT:
{
short value = deserializeRead.currentShort;
if (!(writable instanceof ShortWritable)) {
TestCase.fail("Short expected writable not Short");
}
short expected = ((ShortWritable) writable).get();
if (value != expected) {
TestCase.fail("Short field mismatch (expected " + expected + " found " + value + ")");
}
}
break;
case INT:
{
int value = deserializeRead.currentInt;
if (!(writable instanceof IntWritable)) {
TestCase.fail("Integer expected writable not Integer");
}
int expected = ((IntWritable) writable).get();
if (value != expected) {
TestCase.fail("Int field mismatch (expected " + expected + " found " + value + ")");
}
}
break;
case LONG:
{
long value = deserializeRead.currentLong;
if (!(writable instanceof LongWritable)) {
TestCase.fail("Long expected writable not Long");
}
Long expected = ((LongWritable) writable).get();
if (value != expected) {
TestCase.fail("Long field mismatch (expected " + expected + " found " + value + ")");
}
}
break;
case FLOAT:
{
float value = deserializeRead.currentFloat;
if (!(writable instanceof FloatWritable)) {
TestCase.fail("Float expected writable not Float");
}
float expected = ((FloatWritable) writable).get();
if (value != expected) {
TestCase.fail("Float field mismatch (expected " + expected + " found " + value + ")");
}
}
break;
case DOUBLE:
{
double value = deserializeRead.currentDouble;
if (!(writable instanceof DoubleWritable)) {
TestCase.fail("Double expected writable not Double");
}
double expected = ((DoubleWritable) writable).get();
if (value != expected) {
TestCase.fail("Double field mismatch (expected " + expected + " found " + value + ")");
}
}
break;
case STRING:
{
byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
Text text = new Text(stringBytes);
String string = text.toString();
String expected = ((Text) writable).toString();
if (!string.equals(expected)) {
TestCase.fail("String field mismatch (expected '" + expected + "' found '" + string + "')");
}
}
break;
case CHAR:
{
byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
Text text = new Text(stringBytes);
String string = text.toString();
HiveChar hiveChar = new HiveChar(string, ((CharTypeInfo) primitiveTypeInfo).getLength());
HiveChar expected = ((HiveCharWritable) writable).getHiveChar();
if (!hiveChar.equals(expected)) {
TestCase.fail("Char field mismatch (expected '" + expected + "' found '" + hiveChar + "')");
}
}
break;
case VARCHAR:
{
byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
Text text = new Text(stringBytes);
String string = text.toString();
HiveVarchar hiveVarchar = new HiveVarchar(string, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
HiveVarchar expected = ((HiveVarcharWritable) writable).getHiveVarchar();
if (!hiveVarchar.equals(expected)) {
TestCase.fail("Varchar field mismatch (expected '" + expected + "' found '" + hiveVarchar + "')");
}
}
break;
case DECIMAL:
{
HiveDecimal value = deserializeRead.currentHiveDecimalWritable.getHiveDecimal();
if (value == null) {
TestCase.fail("Decimal field evaluated to NULL");
}
HiveDecimal expected = ((HiveDecimalWritable) writable).getHiveDecimal();
if (!value.equals(expected)) {
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
int precision = decimalTypeInfo.getPrecision();
int scale = decimalTypeInfo.getScale();
TestCase.fail("Decimal field mismatch (expected " + expected.toString() + " found " + value.toString() + ") precision " + precision + ", scale " + scale);
}
}
break;
case DATE:
{
Date value = deserializeRead.currentDateWritable.get();
Date expected = ((DateWritable) writable).get();
if (!value.equals(expected)) {
TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
}
}
break;
case TIMESTAMP:
{
Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
Timestamp expected = ((TimestampWritable) writable).getTimestamp();
if (!value.equals(expected)) {
TestCase.fail("Timestamp field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
}
}
break;
case INTERVAL_YEAR_MONTH:
{
HiveIntervalYearMonth value = deserializeRead.currentHiveIntervalYearMonthWritable.getHiveIntervalYearMonth();
HiveIntervalYearMonth expected = ((HiveIntervalYearMonthWritable) writable).getHiveIntervalYearMonth();
if (!value.equals(expected)) {
TestCase.fail("HiveIntervalYearMonth field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
}
}
break;
case INTERVAL_DAY_TIME:
{
HiveIntervalDayTime value = deserializeRead.currentHiveIntervalDayTimeWritable.getHiveIntervalDayTime();
HiveIntervalDayTime expected = ((HiveIntervalDayTimeWritable) writable).getHiveIntervalDayTime();
if (!value.equals(expected)) {
TestCase.fail("HiveIntervalDayTime field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
}
}
break;
case BINARY:
{
byte[] byteArray = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
BytesWritable bytesWritable = (BytesWritable) writable;
byte[] expected = Arrays.copyOfRange(bytesWritable.getBytes(), 0, bytesWritable.getLength());
if (byteArray.length != expected.length) {
TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
}
for (int b = 0; b < byteArray.length; b++) {
if (byteArray[b] != expected[b]) {
TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
}
}
}
break;
default:
throw new Error("Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory());
}
}
use of org.apache.hadoop.hive.serde2.io.ShortWritable in project hive by apache.
the class VectorColumnAssignFactory method buildObjectAssign.
public static VectorColumnAssign buildObjectAssign(VectorizedRowBatch outputBatch, int outColIndex, PrimitiveCategory category) throws HiveException {
VectorColumnAssign outVCA = null;
ColumnVector destCol = outputBatch.cols[outColIndex];
if (destCol == null) {
switch(category) {
case VOID:
outVCA = new VectorLongColumnAssign() {
// This is a dummy assigner
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
// This is no-op, there is no column to assign to and val is expected to be null
assert (val == null);
}
};
break;
default:
throw new HiveException("Incompatible (null) vector column and primitive category " + category);
}
} else if (destCol instanceof LongColumnVector) {
switch(category) {
case BOOLEAN:
outVCA = new VectorLongColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
BooleanWritable bw = (BooleanWritable) val;
assignLong(bw.get() ? 1 : 0, destIndex);
}
}
}.init(outputBatch, (LongColumnVector) destCol);
break;
case BYTE:
outVCA = new VectorLongColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
ByteWritable bw = (ByteWritable) val;
assignLong(bw.get(), destIndex);
}
}
}.init(outputBatch, (LongColumnVector) destCol);
break;
case SHORT:
outVCA = new VectorLongColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
ShortWritable bw = (ShortWritable) val;
assignLong(bw.get(), destIndex);
}
}
}.init(outputBatch, (LongColumnVector) destCol);
break;
case INT:
outVCA = new VectorLongColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
IntWritable bw = (IntWritable) val;
assignLong(bw.get(), destIndex);
}
}
}.init(outputBatch, (LongColumnVector) destCol);
break;
case LONG:
outVCA = new VectorLongColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
LongWritable bw = (LongWritable) val;
assignLong(bw.get(), destIndex);
}
}
}.init(outputBatch, (LongColumnVector) destCol);
break;
case TIMESTAMP:
outVCA = new VectorTimestampColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
assignTimestamp((TimestampWritable) val, destIndex);
}
}
}.init(outputBatch, (TimestampColumnVector) destCol);
break;
case DATE:
outVCA = new VectorLongColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
DateWritable bw = (DateWritable) val;
assignLong(bw.getDays(), destIndex);
}
}
}.init(outputBatch, (LongColumnVector) destCol);
break;
case INTERVAL_YEAR_MONTH:
outVCA = new VectorLongColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
HiveIntervalYearMonthWritable bw = (HiveIntervalYearMonthWritable) val;
assignLong(bw.getHiveIntervalYearMonth().getTotalMonths(), destIndex);
}
}
}.init(outputBatch, (LongColumnVector) destCol);
break;
case INTERVAL_DAY_TIME:
outVCA = new VectorIntervalDayTimeColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
HiveIntervalDayTimeWritable bw = (HiveIntervalDayTimeWritable) val;
assignIntervalDayTime(bw.getHiveIntervalDayTime(), destIndex);
}
}
}.init(outputBatch, (IntervalDayTimeColumnVector) destCol);
break;
default:
throw new HiveException("Incompatible Long vector column and primitive category " + category);
}
} else if (destCol instanceof DoubleColumnVector) {
switch(category) {
case DOUBLE:
outVCA = new VectorDoubleColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
DoubleWritable bw = (DoubleWritable) val;
assignDouble(bw.get(), destIndex);
}
}
}.init(outputBatch, (DoubleColumnVector) destCol);
break;
case FLOAT:
outVCA = new VectorDoubleColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
FloatWritable bw = (FloatWritable) val;
assignDouble(bw.get(), destIndex);
}
}
}.init(outputBatch, (DoubleColumnVector) destCol);
break;
default:
throw new HiveException("Incompatible Double vector column and primitive category " + category);
}
} else if (destCol instanceof BytesColumnVector) {
switch(category) {
case BINARY:
outVCA = new VectorBytesColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
BytesWritable bw = (BytesWritable) val;
byte[] bytes = bw.getBytes();
assignBytes(bytes, 0, bw.getLength(), destIndex);
}
}
}.init(outputBatch, (BytesColumnVector) destCol);
break;
case STRING:
outVCA = new VectorBytesColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
Text bw = (Text) val;
byte[] bytes = bw.getBytes();
assignBytes(bytes, 0, bw.getLength(), destIndex);
}
}
}.init(outputBatch, (BytesColumnVector) destCol);
break;
case VARCHAR:
outVCA = new VectorBytesColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
// We store VARCHAR type stripped of pads.
HiveVarchar hiveVarchar;
if (val instanceof HiveVarchar) {
hiveVarchar = (HiveVarchar) val;
} else {
hiveVarchar = ((HiveVarcharWritable) val).getHiveVarchar();
}
byte[] bytes = hiveVarchar.getValue().getBytes();
assignBytes(bytes, 0, bytes.length, destIndex);
}
}
}.init(outputBatch, (BytesColumnVector) destCol);
break;
case CHAR:
outVCA = new VectorBytesColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
// We store CHAR type stripped of pads.
HiveChar hiveChar;
if (val instanceof HiveChar) {
hiveChar = (HiveChar) val;
} else {
hiveChar = ((HiveCharWritable) val).getHiveChar();
}
byte[] bytes = hiveChar.getStrippedValue().getBytes();
assignBytes(bytes, 0, bytes.length, destIndex);
}
}
}.init(outputBatch, (BytesColumnVector) destCol);
break;
default:
throw new HiveException("Incompatible Bytes vector column and primitive category " + category);
}
} else if (destCol instanceof DecimalColumnVector) {
switch(category) {
case DECIMAL:
outVCA = new VectorDecimalColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
if (val instanceof HiveDecimal) {
assignDecimal((HiveDecimal) val, destIndex);
} else {
assignDecimal((HiveDecimalWritable) val, destIndex);
}
}
}
}.init(outputBatch, (DecimalColumnVector) destCol);
break;
default:
throw new HiveException("Incompatible Decimal vector column and primitive category " + category);
}
} else {
throw new HiveException("Unknown vector column type " + destCol.getClass().getName());
}
return outVCA;
}
Aggregations