use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.
the class GenericUDFOPDTIMinus method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentException(opName + " requires two arguments.");
}
PrimitiveObjectInspector resultOI = null;
for (int i = 0; i < 2; i++) {
Category category = arguments[i].getCategory();
if (category != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(i, "The " + GenericUDFUtils.getOrdinal(i + 1) + " argument of " + opName + " is expected to a " + Category.PRIMITIVE.toString().toLowerCase() + " type, but " + category.toString().toLowerCase() + " is found");
}
}
inputOIs = new PrimitiveObjectInspector[] { (PrimitiveObjectInspector) arguments[0], (PrimitiveObjectInspector) arguments[1] };
PrimitiveObjectInspector leftOI = inputOIs[0];
PrimitiveObjectInspector rightOI = inputOIs[1];
// Timestamp - Date = IntervalDayTime (operands reversible)
if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
minusOpType = OperationType.INTERVALYM_MINUS_INTERVALYM;
intervalArg1Idx = 0;
intervalArg2Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalYearMonthTypeInfo);
} else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
minusOpType = OperationType.DATE_MINUS_INTERVALYM;
dtArg1Idx = 0;
intervalArg1Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.dateTypeInfo);
} else if (checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
minusOpType = OperationType.TIMESTAMP_MINUS_INTERVALYM;
dtArg1Idx = 0;
intervalArg1Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
} else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.INTERVAL_DAY_TIME)) {
minusOpType = OperationType.INTERVALDT_MINUS_INTERVALDT;
intervalArg1Idx = 0;
intervalArg2Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalDayTimeTypeInfo);
} else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_DAY_TIME) || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_DAY_TIME)) {
minusOpType = OperationType.TIMESTAMP_MINUS_INTERVALDT;
dtArg1Idx = 0;
intervalArg1Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
dt1Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
} else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.DATE) || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.TIMESTAMP) || checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.TIMESTAMP) || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.DATE)) {
// Operands converted to timestamp, result as interval day-time
minusOpType = OperationType.TIMESTAMP_MINUS_TIMESTAMP;
dtArg1Idx = 0;
dtArg2Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalDayTimeTypeInfo);
dt1Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
dt2Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
} else {
// Unsupported types - error
List<TypeInfo> argTypeInfos = new ArrayList<TypeInfo>(2);
argTypeInfos.add(leftOI.getTypeInfo());
argTypeInfos.add(rightOI.getTypeInfo());
throw new NoMatchingMethodException(this.getClass(), argTypeInfos, null);
}
return resultOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.
the class GenericUDFFloorCeilBase method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentException(opName + " requires one argument.");
}
Category category = arguments[0].getCategory();
if (category != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "The " + GenericUDFUtils.getOrdinal(1) + " argument of " + opName + " is expected to a " + Category.PRIMITIVE.toString().toLowerCase() + " type, but " + category.toString().toLowerCase() + " is found");
}
inputOI = (PrimitiveObjectInspector) arguments[0];
if (!FunctionRegistry.isNumericType(inputOI.getTypeInfo())) {
throw new UDFArgumentTypeException(0, "The " + GenericUDFUtils.getOrdinal(1) + " argument of " + opName + " is expected to a " + "numeric type, but " + inputOI.getTypeName() + " is found");
}
PrimitiveTypeInfo resultTypeInfo = null;
PrimitiveTypeInfo inputTypeInfo = inputOI.getTypeInfo();
if (inputTypeInfo instanceof DecimalTypeInfo) {
DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) inputTypeInfo;
resultTypeInfo = TypeInfoFactory.getDecimalTypeInfo(decTypeInfo.precision() - decTypeInfo.scale() + 1, 0);
ObjectInspector decimalOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(decTypeInfo);
converter = ObjectInspectorConverters.getConverter(inputOI, decimalOI);
} else {
resultTypeInfo = TypeInfoFactory.longTypeInfo;
ObjectInspector doubleObjectInspector = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
converter = ObjectInspectorConverters.getConverter(inputOI, doubleObjectInspector);
}
return resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(resultTypeInfo);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.
the class HCatTypeCheckHive method getJavaObject.
private Object getJavaObject(Object o, ObjectInspector oi, List<Category> categories) {
if (categories != null) {
categories.add(oi.getCategory());
}
if (oi.getCategory() == ObjectInspector.Category.LIST) {
List<?> l = ((ListObjectInspector) oi).getList(o);
List<Object> result = new ArrayList<Object>();
ObjectInspector elemOI = ((ListObjectInspector) oi).getListElementObjectInspector();
for (Object lo : l) {
result.add(getJavaObject(lo, elemOI, categories));
}
return result;
} else if (oi.getCategory() == ObjectInspector.Category.MAP) {
Map<?, ?> m = ((MapObjectInspector) oi).getMap(o);
Map<String, String> result = new HashMap<String, String>();
ObjectInspector koi = ((MapObjectInspector) oi).getMapKeyObjectInspector();
ObjectInspector voi = ((MapObjectInspector) oi).getMapValueObjectInspector();
for (Entry<?, ?> e : m.entrySet()) {
result.put((String) getJavaObject(e.getKey(), koi, null), (String) getJavaObject(e.getValue(), voi, null));
}
return result;
} else if (oi.getCategory() == ObjectInspector.Category.STRUCT) {
List<Object> s = ((StructObjectInspector) oi).getStructFieldsDataAsList(o);
List<? extends StructField> sf = ((StructObjectInspector) oi).getAllStructFieldRefs();
List<Object> result = new ArrayList<Object>();
for (int i = 0; i < s.size(); i++) {
result.add(getJavaObject(s.get(i), sf.get(i).getFieldObjectInspector(), categories));
}
return result;
} else if (oi.getCategory() == ObjectInspector.Category.PRIMITIVE) {
return ((PrimitiveObjectInspector) oi).getPrimitiveJavaObject(o);
}
throw new RuntimeException("Unexpected error!");
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.
the class VectorAssignRow method assignConvertRowColumn.
/**
* Convert row's column object and then assign it the ColumnVector at batchIndex
* in the VectorizedRowBatch.
*
* Public so VectorDeserializeRow can use this method to convert a row's column object.
*
* @param batch
* @param batchIndex
* @param logicalColumnIndex
* @param object The row column object whose type is the VectorAssignRow.initConversion
* source data type.
*
*/
public void assignConvertRowColumn(VectorizedRowBatch batch, int batchIndex, int logicalColumnIndex, Object object) {
Preconditions.checkState(isConvert[logicalColumnIndex]);
Category targetCategory = targetCategories[logicalColumnIndex];
if (targetCategory == null) {
/*
* This is a column that we don't want (i.e. not included) -- we are done.
*/
return;
}
final int projectionColumnNum = projectionColumnNums[logicalColumnIndex];
if (object == null) {
VectorizedBatchUtil.setNullColIsNullValue(batch.cols[projectionColumnNum], batchIndex);
return;
}
try {
switch(targetCategory) {
case PRIMITIVE:
PrimitiveCategory targetPrimitiveCategory = targetPrimitiveCategories[logicalColumnIndex];
switch(targetPrimitiveCategory) {
case VOID:
VectorizedBatchUtil.setNullColIsNullValue(batch.cols[projectionColumnNum], batchIndex);
return;
case BOOLEAN:
((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = (PrimitiveObjectInspectorUtils.getBoolean(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]) ? 1 : 0);
break;
case BYTE:
((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = PrimitiveObjectInspectorUtils.getByte(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]);
break;
case SHORT:
((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = PrimitiveObjectInspectorUtils.getShort(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]);
break;
case INT:
((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = PrimitiveObjectInspectorUtils.getInt(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]);
break;
case LONG:
((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = PrimitiveObjectInspectorUtils.getLong(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]);
break;
case TIMESTAMP:
{
Timestamp timestamp = PrimitiveObjectInspectorUtils.getTimestamp(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]);
if (timestamp == null) {
VectorizedBatchUtil.setNullColIsNullValue(batch.cols[projectionColumnNum], batchIndex);
return;
}
((TimestampColumnVector) batch.cols[projectionColumnNum]).set(batchIndex, timestamp);
}
break;
case DATE:
{
Date date = PrimitiveObjectInspectorUtils.getDate(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]);
if (date == null) {
VectorizedBatchUtil.setNullColIsNullValue(batch.cols[projectionColumnNum], batchIndex);
return;
}
DateWritable dateWritable = (DateWritable) convertTargetWritables[logicalColumnIndex];
dateWritable.set(date);
((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = dateWritable.getDays();
}
break;
case FLOAT:
((DoubleColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = PrimitiveObjectInspectorUtils.getFloat(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]);
break;
case DOUBLE:
((DoubleColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = PrimitiveObjectInspectorUtils.getDouble(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]);
break;
case BINARY:
{
BytesWritable bytesWritable = PrimitiveObjectInspectorUtils.getBinary(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]);
if (bytesWritable == null) {
VectorizedBatchUtil.setNullColIsNullValue(batch.cols[projectionColumnNum], batchIndex);
return;
}
((BytesColumnVector) batch.cols[projectionColumnNum]).setVal(batchIndex, bytesWritable.getBytes(), 0, bytesWritable.getLength());
}
break;
case STRING:
{
String string = PrimitiveObjectInspectorUtils.getString(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]);
if (string == null) {
VectorizedBatchUtil.setNullColIsNullValue(batch.cols[projectionColumnNum], batchIndex);
return;
}
Text text = (Text) convertTargetWritables[logicalColumnIndex];
text.set(string);
((BytesColumnVector) batch.cols[projectionColumnNum]).setVal(batchIndex, text.getBytes(), 0, text.getLength());
}
break;
case VARCHAR:
{
// UNDONE: Performance problem with conversion to String, then bytes...
HiveVarchar hiveVarchar = PrimitiveObjectInspectorUtils.getHiveVarchar(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]);
if (hiveVarchar == null) {
VectorizedBatchUtil.setNullColIsNullValue(batch.cols[projectionColumnNum], batchIndex);
return;
}
// TODO: Do we need maxLength checking?
byte[] bytes = hiveVarchar.getValue().getBytes();
((BytesColumnVector) batch.cols[projectionColumnNum]).setVal(batchIndex, bytes, 0, bytes.length);
}
break;
case CHAR:
{
// UNDONE: Performance problem with conversion to String, then bytes...
HiveChar hiveChar = PrimitiveObjectInspectorUtils.getHiveChar(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]);
if (hiveChar == null) {
VectorizedBatchUtil.setNullColIsNullValue(batch.cols[projectionColumnNum], batchIndex);
return;
}
// We store CHAR in vector row batch with padding stripped.
// TODO: Do we need maxLength checking?
byte[] bytes = hiveChar.getStrippedValue().getBytes();
((BytesColumnVector) batch.cols[projectionColumnNum]).setVal(batchIndex, bytes, 0, bytes.length);
}
break;
case DECIMAL:
{
HiveDecimal hiveDecimal = PrimitiveObjectInspectorUtils.getHiveDecimal(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]);
if (hiveDecimal == null) {
VectorizedBatchUtil.setNullColIsNullValue(batch.cols[projectionColumnNum], batchIndex);
return;
}
((DecimalColumnVector) batch.cols[projectionColumnNum]).set(batchIndex, hiveDecimal);
}
break;
case INTERVAL_YEAR_MONTH:
{
HiveIntervalYearMonth intervalYearMonth = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]);
if (intervalYearMonth == null) {
VectorizedBatchUtil.setNullColIsNullValue(batch.cols[projectionColumnNum], batchIndex);
return;
}
((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = intervalYearMonth.getTotalMonths();
}
break;
case INTERVAL_DAY_TIME:
{
HiveIntervalDayTime intervalDayTime = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime(object, convertSourcePrimitiveObjectInspectors[logicalColumnIndex]);
if (intervalDayTime == null) {
VectorizedBatchUtil.setNullColIsNullValue(batch.cols[projectionColumnNum], batchIndex);
return;
}
((IntervalDayTimeColumnVector) batch.cols[projectionColumnNum]).set(batchIndex, intervalDayTime);
}
break;
default:
throw new RuntimeException("Primitive category " + targetPrimitiveCategory.name() + " not supported");
}
break;
default:
throw new RuntimeException("Category " + targetCategory.name() + " not supported");
}
} catch (NumberFormatException e) {
// Some of the conversion methods throw this exception on numeric parsing errors.
VectorizedBatchUtil.setNullColIsNullValue(batch.cols[projectionColumnNum], batchIndex);
return;
}
// We always set the null flag to false when there is a value.
batch.cols[projectionColumnNum].isNull[batchIndex] = false;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category in project hive by apache.
the class VectorColumnAssignFactory method buildObjectAssign.
public static VectorColumnAssign buildObjectAssign(VectorizedRowBatch outputBatch, int outColIndex, PrimitiveCategory category) throws HiveException {
VectorColumnAssign outVCA = null;
ColumnVector destCol = outputBatch.cols[outColIndex];
if (destCol == null) {
switch(category) {
case VOID:
outVCA = new VectorLongColumnAssign() {
// This is a dummy assigner
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
// This is no-op, there is no column to assign to and val is expected to be null
assert (val == null);
}
};
break;
default:
throw new HiveException("Incompatible (null) vector column and primitive category " + category);
}
} else if (destCol instanceof LongColumnVector) {
switch(category) {
case BOOLEAN:
outVCA = new VectorLongColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
BooleanWritable bw = (BooleanWritable) val;
assignLong(bw.get() ? 1 : 0, destIndex);
}
}
}.init(outputBatch, (LongColumnVector) destCol);
break;
case BYTE:
outVCA = new VectorLongColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
ByteWritable bw = (ByteWritable) val;
assignLong(bw.get(), destIndex);
}
}
}.init(outputBatch, (LongColumnVector) destCol);
break;
case SHORT:
outVCA = new VectorLongColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
ShortWritable bw = (ShortWritable) val;
assignLong(bw.get(), destIndex);
}
}
}.init(outputBatch, (LongColumnVector) destCol);
break;
case INT:
outVCA = new VectorLongColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
IntWritable bw = (IntWritable) val;
assignLong(bw.get(), destIndex);
}
}
}.init(outputBatch, (LongColumnVector) destCol);
break;
case LONG:
outVCA = new VectorLongColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
LongWritable bw = (LongWritable) val;
assignLong(bw.get(), destIndex);
}
}
}.init(outputBatch, (LongColumnVector) destCol);
break;
case TIMESTAMP:
outVCA = new VectorTimestampColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
assignTimestamp((TimestampWritable) val, destIndex);
}
}
}.init(outputBatch, (TimestampColumnVector) destCol);
break;
case DATE:
outVCA = new VectorLongColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
DateWritable bw = (DateWritable) val;
assignLong(bw.getDays(), destIndex);
}
}
}.init(outputBatch, (LongColumnVector) destCol);
break;
case INTERVAL_YEAR_MONTH:
outVCA = new VectorLongColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
HiveIntervalYearMonthWritable bw = (HiveIntervalYearMonthWritable) val;
assignLong(bw.getHiveIntervalYearMonth().getTotalMonths(), destIndex);
}
}
}.init(outputBatch, (LongColumnVector) destCol);
break;
case INTERVAL_DAY_TIME:
outVCA = new VectorIntervalDayTimeColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
HiveIntervalDayTimeWritable bw = (HiveIntervalDayTimeWritable) val;
assignIntervalDayTime(bw.getHiveIntervalDayTime(), destIndex);
}
}
}.init(outputBatch, (IntervalDayTimeColumnVector) destCol);
break;
default:
throw new HiveException("Incompatible Long vector column and primitive category " + category);
}
} else if (destCol instanceof DoubleColumnVector) {
switch(category) {
case DOUBLE:
outVCA = new VectorDoubleColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
DoubleWritable bw = (DoubleWritable) val;
assignDouble(bw.get(), destIndex);
}
}
}.init(outputBatch, (DoubleColumnVector) destCol);
break;
case FLOAT:
outVCA = new VectorDoubleColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
FloatWritable bw = (FloatWritable) val;
assignDouble(bw.get(), destIndex);
}
}
}.init(outputBatch, (DoubleColumnVector) destCol);
break;
default:
throw new HiveException("Incompatible Double vector column and primitive category " + category);
}
} else if (destCol instanceof BytesColumnVector) {
switch(category) {
case BINARY:
outVCA = new VectorBytesColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
BytesWritable bw = (BytesWritable) val;
byte[] bytes = bw.getBytes();
assignBytes(bytes, 0, bw.getLength(), destIndex);
}
}
}.init(outputBatch, (BytesColumnVector) destCol);
break;
case STRING:
outVCA = new VectorBytesColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
Text bw = (Text) val;
byte[] bytes = bw.getBytes();
assignBytes(bytes, 0, bw.getLength(), destIndex);
}
}
}.init(outputBatch, (BytesColumnVector) destCol);
break;
case VARCHAR:
outVCA = new VectorBytesColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
// We store VARCHAR type stripped of pads.
HiveVarchar hiveVarchar;
if (val instanceof HiveVarchar) {
hiveVarchar = (HiveVarchar) val;
} else {
hiveVarchar = ((HiveVarcharWritable) val).getHiveVarchar();
}
byte[] bytes = hiveVarchar.getValue().getBytes();
assignBytes(bytes, 0, bytes.length, destIndex);
}
}
}.init(outputBatch, (BytesColumnVector) destCol);
break;
case CHAR:
outVCA = new VectorBytesColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
// We store CHAR type stripped of pads.
HiveChar hiveChar;
if (val instanceof HiveChar) {
hiveChar = (HiveChar) val;
} else {
hiveChar = ((HiveCharWritable) val).getHiveChar();
}
byte[] bytes = hiveChar.getStrippedValue().getBytes();
assignBytes(bytes, 0, bytes.length, destIndex);
}
}
}.init(outputBatch, (BytesColumnVector) destCol);
break;
default:
throw new HiveException("Incompatible Bytes vector column and primitive category " + category);
}
} else if (destCol instanceof DecimalColumnVector) {
switch(category) {
case DECIMAL:
outVCA = new VectorDecimalColumnAssign() {
@Override
public void assignObjectValue(Object val, int destIndex) throws HiveException {
if (val == null) {
assignNull(destIndex);
} else {
if (val instanceof HiveDecimal) {
assignDecimal((HiveDecimal) val, destIndex);
} else {
assignDecimal((HiveDecimalWritable) val, destIndex);
}
}
}
}.init(outputBatch, (DecimalColumnVector) destCol);
break;
default:
throw new HiveException("Incompatible Decimal vector column and primitive category " + category);
}
} else {
throw new HiveException("Unknown vector column type " + destCol.getClass().getName());
}
return outVCA;
}
Aggregations