use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class VectorExtractRow method extractRowColumn.
public Object extractRowColumn(ColumnVector colVector, TypeInfo typeInfo, ObjectInspector objectInspector, int batchIndex) {
if (colVector == null) {
// may ask for them..
return null;
}
final int adjustedIndex = (colVector.isRepeating ? 0 : batchIndex);
if (!colVector.noNulls && colVector.isNull[adjustedIndex]) {
return null;
}
final Category category = typeInfo.getCategory();
switch(category) {
case PRIMITIVE:
{
final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
final PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
final Writable primitiveWritable = VectorizedBatchUtil.getPrimitiveWritable(primitiveCategory);
switch(primitiveCategory) {
case VOID:
return null;
case BOOLEAN:
((BooleanWritable) primitiveWritable).set(((LongColumnVector) colVector).vector[adjustedIndex] == 0 ? false : true);
return primitiveWritable;
case BYTE:
((ByteWritable) primitiveWritable).set((byte) ((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case SHORT:
((ShortWritable) primitiveWritable).set((short) ((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case INT:
((IntWritable) primitiveWritable).set((int) ((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case LONG:
((LongWritable) primitiveWritable).set(((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case TIMESTAMP:
// From java.sql.Timestamp used by vectorization to serializable org.apache.hadoop.hive.common.type.Timestamp
java.sql.Timestamp ts = ((TimestampColumnVector) colVector).asScratchTimestamp(adjustedIndex);
Timestamp serializableTS = Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos());
((TimestampWritableV2) primitiveWritable).set(serializableTS);
return primitiveWritable;
case DATE:
((DateWritableV2) primitiveWritable).set((int) ((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case FLOAT:
((FloatWritable) primitiveWritable).set((float) ((DoubleColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case DOUBLE:
((DoubleWritable) primitiveWritable).set(((DoubleColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case BINARY:
{
final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
final byte[] bytes = bytesColVector.vector[adjustedIndex];
final int start = bytesColVector.start[adjustedIndex];
final int length = bytesColVector.length[adjustedIndex];
BytesWritable bytesWritable = (BytesWritable) primitiveWritable;
if (bytes == null || length == 0) {
if (length > 0) {
nullBytesReadError(primitiveCategory, batchIndex);
}
bytesWritable.set(EMPTY_BYTES, 0, 0);
} else {
bytesWritable.set(bytes, start, length);
}
return primitiveWritable;
}
case STRING:
{
final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
final byte[] bytes = bytesColVector.vector[adjustedIndex];
final int start = bytesColVector.start[adjustedIndex];
final int length = bytesColVector.length[adjustedIndex];
if (bytes == null || length == 0) {
if (length > 0) {
nullBytesReadError(primitiveCategory, batchIndex);
}
((Text) primitiveWritable).set(EMPTY_BYTES, 0, 0);
} else {
// Use org.apache.hadoop.io.Text as our helper to go from byte[] to String.
((Text) primitiveWritable).set(bytes, start, length);
}
return primitiveWritable;
}
case VARCHAR:
{
final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
final byte[] bytes = bytesColVector.vector[adjustedIndex];
final int start = bytesColVector.start[adjustedIndex];
final int length = bytesColVector.length[adjustedIndex];
final HiveVarcharWritable hiveVarcharWritable = (HiveVarcharWritable) primitiveWritable;
if (bytes == null || length == 0) {
if (length > 0) {
nullBytesReadError(primitiveCategory, batchIndex);
}
hiveVarcharWritable.set(EMPTY_STRING, -1);
} else {
final int adjustedLength = StringExpr.truncate(bytes, start, length, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
if (adjustedLength == 0) {
hiveVarcharWritable.set(EMPTY_STRING, -1);
} else {
hiveVarcharWritable.set(new String(bytes, start, adjustedLength, Charsets.UTF_8), -1);
}
}
return primitiveWritable;
}
case CHAR:
{
final BytesColumnVector bytesColVector = ((BytesColumnVector) colVector);
final byte[] bytes = bytesColVector.vector[adjustedIndex];
final int start = bytesColVector.start[adjustedIndex];
final int length = bytesColVector.length[adjustedIndex];
final HiveCharWritable hiveCharWritable = (HiveCharWritable) primitiveWritable;
final int maxLength = ((CharTypeInfo) primitiveTypeInfo).getLength();
if (bytes == null || length == 0) {
if (length > 0) {
nullBytesReadError(primitiveCategory, batchIndex);
}
hiveCharWritable.set(EMPTY_STRING, maxLength);
} else {
final int adjustedLength = StringExpr.rightTrimAndTruncate(bytes, start, length, ((CharTypeInfo) primitiveTypeInfo).getLength());
if (adjustedLength == 0) {
hiveCharWritable.set(EMPTY_STRING, maxLength);
} else {
hiveCharWritable.set(new String(bytes, start, adjustedLength, Charsets.UTF_8), maxLength);
}
}
return primitiveWritable;
}
case DECIMAL:
if (colVector instanceof Decimal64ColumnVector) {
Decimal64ColumnVector dec32ColVector = (Decimal64ColumnVector) colVector;
((HiveDecimalWritable) primitiveWritable).deserialize64(dec32ColVector.vector[adjustedIndex], dec32ColVector.scale);
} else {
// The HiveDecimalWritable set method will quickly copy the deserialized decimal writable fields.
((HiveDecimalWritable) primitiveWritable).set(((DecimalColumnVector) colVector).vector[adjustedIndex]);
}
return primitiveWritable;
case INTERVAL_YEAR_MONTH:
((HiveIntervalYearMonthWritable) primitiveWritable).set((int) ((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case INTERVAL_DAY_TIME:
((HiveIntervalDayTimeWritable) primitiveWritable).set(((IntervalDayTimeColumnVector) colVector).asScratchIntervalDayTime(adjustedIndex));
return primitiveWritable;
default:
throw new RuntimeException("Primitive category " + primitiveCategory.name() + " not supported");
}
}
case LIST:
{
final ListColumnVector listColumnVector = (ListColumnVector) colVector;
final ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
final ListObjectInspector listObjectInspector = (ListObjectInspector) objectInspector;
final int offset = (int) listColumnVector.offsets[adjustedIndex];
final int size = (int) listColumnVector.lengths[adjustedIndex];
final List list = new ArrayList();
for (int i = 0; i < size; i++) {
list.add(extractRowColumn(listColumnVector.child, listTypeInfo.getListElementTypeInfo(), listObjectInspector.getListElementObjectInspector(), offset + i));
}
return list;
}
case MAP:
{
final MapColumnVector mapColumnVector = (MapColumnVector) colVector;
final MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
final MapObjectInspector mapObjectInspector = (MapObjectInspector) objectInspector;
final int offset = (int) mapColumnVector.offsets[adjustedIndex];
final int size = (int) mapColumnVector.lengths[adjustedIndex];
final Map<Object, Object> map = new LinkedHashMap<Object, Object>();
for (int i = 0; i < size; i++) {
final Object key = extractRowColumn(mapColumnVector.keys, mapTypeInfo.getMapKeyTypeInfo(), mapObjectInspector.getMapKeyObjectInspector(), offset + i);
final Object value = extractRowColumn(mapColumnVector.values, mapTypeInfo.getMapValueTypeInfo(), mapObjectInspector.getMapValueObjectInspector(), offset + i);
map.put(key, value);
}
return map;
}
case STRUCT:
{
final StructColumnVector structColumnVector = (StructColumnVector) colVector;
final StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
final StandardStructObjectInspector structInspector = (StandardStructObjectInspector) objectInspector;
final List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
final int size = fieldTypeInfos.size();
final List<? extends StructField> structFields = structInspector.getAllStructFieldRefs();
final Object struct = structInspector.create();
for (int i = 0; i < size; i++) {
final StructField structField = structFields.get(i);
final TypeInfo fieldTypeInfo = fieldTypeInfos.get(i);
final Object value = extractRowColumn(structColumnVector.fields[i], fieldTypeInfo, structField.getFieldObjectInspector(), adjustedIndex);
structInspector.setStructFieldData(struct, structField, value);
}
return struct;
}
case UNION:
{
final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
final List<TypeInfo> objectTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
final UnionObjectInspector unionInspector = (UnionObjectInspector) objectInspector;
final List<ObjectInspector> unionInspectors = unionInspector.getObjectInspectors();
final UnionColumnVector unionColumnVector = (UnionColumnVector) colVector;
final byte tag = (byte) unionColumnVector.tags[adjustedIndex];
final Object object = extractRowColumn(unionColumnVector.fields[tag], objectTypeInfos.get(tag), unionInspectors.get(tag), adjustedIndex);
final StandardUnion standardUnion = new StandardUnion();
standardUnion.setTag(tag);
standardUnion.setObject(object);
return standardUnion;
}
default:
throw new RuntimeException("Category " + category.name() + " not supported");
}
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestIcebergTimestampObjectInspectorHive3 method testIcebergTimestampObjectInspector.
@Test
public void testIcebergTimestampObjectInspector() {
IcebergTimestampObjectInspectorHive3 oi = IcebergTimestampObjectInspectorHive3.get();
Assert.assertEquals(ObjectInspector.Category.PRIMITIVE, oi.getCategory());
Assert.assertEquals(PrimitiveObjectInspector.PrimitiveCategory.TIMESTAMP, oi.getPrimitiveCategory());
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo.getTypeName(), oi.getTypeName());
Assert.assertEquals(Timestamp.class, oi.getJavaPrimitiveClass());
Assert.assertEquals(TimestampWritableV2.class, oi.getPrimitiveWritableClass());
Assert.assertNull(oi.copyObject(null));
Assert.assertNull(oi.getPrimitiveJavaObject(null));
Assert.assertNull(oi.getPrimitiveWritableObject(null));
Assert.assertNull(oi.convert(null));
long epochMilli = 1601471970000L;
LocalDateTime local = LocalDateTime.ofInstant(Instant.ofEpochMilli(epochMilli), ZoneId.of("UTC")).plusNanos(34000);
Timestamp ts = Timestamp.ofEpochMilli(epochMilli);
ts.setNanos(34000);
Assert.assertEquals(ts, oi.getPrimitiveJavaObject(local));
Assert.assertEquals(new TimestampWritableV2(ts), oi.getPrimitiveWritableObject(local));
Timestamp copy = (Timestamp) oi.copyObject(ts);
Assert.assertEquals(ts, copy);
Assert.assertNotSame(ts, copy);
Assert.assertFalse(oi.preferWritable());
Assert.assertEquals(local, oi.convert(ts));
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class VectorAssignRow method assignRowColumn.
private void assignRowColumn(ColumnVector columnVector, int batchIndex, TypeInfo targetTypeInfo, Object object) {
if (object == null) {
assignNullRowColumn(columnVector, batchIndex, targetTypeInfo);
return;
}
switch(targetTypeInfo.getCategory()) {
case PRIMITIVE:
{
final PrimitiveCategory targetPrimitiveCategory = ((PrimitiveTypeInfo) targetTypeInfo).getPrimitiveCategory();
switch(targetPrimitiveCategory) {
case VOID:
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
case BOOLEAN:
if (object instanceof Boolean) {
((LongColumnVector) columnVector).vector[batchIndex] = (((Boolean) object) ? 1 : 0);
} else {
((LongColumnVector) columnVector).vector[batchIndex] = (((BooleanWritable) object).get() ? 1 : 0);
}
break;
case BYTE:
if (object instanceof Byte) {
((LongColumnVector) columnVector).vector[batchIndex] = ((Byte) object);
} else {
((LongColumnVector) columnVector).vector[batchIndex] = ((ByteWritable) object).get();
}
break;
case SHORT:
if (object instanceof Short) {
((LongColumnVector) columnVector).vector[batchIndex] = ((Short) object);
} else {
((LongColumnVector) columnVector).vector[batchIndex] = ((ShortWritable) object).get();
}
break;
case INT:
if (object instanceof Integer) {
((LongColumnVector) columnVector).vector[batchIndex] = ((Integer) object);
} else {
((LongColumnVector) columnVector).vector[batchIndex] = ((IntWritable) object).get();
}
break;
case LONG:
if (object instanceof Long) {
((LongColumnVector) columnVector).vector[batchIndex] = ((Long) object);
} else {
((LongColumnVector) columnVector).vector[batchIndex] = ((LongWritable) object).get();
}
break;
case TIMESTAMP:
if (object instanceof Timestamp) {
((TimestampColumnVector) columnVector).set(batchIndex, ((Timestamp) object).toSqlTimestamp());
} else {
((TimestampColumnVector) columnVector).set(batchIndex, ((TimestampWritableV2) object).getTimestamp().toSqlTimestamp());
}
break;
case DATE:
if (object instanceof Date) {
((LongColumnVector) columnVector).vector[batchIndex] = DateWritableV2.dateToDays((Date) object);
} else {
((LongColumnVector) columnVector).vector[batchIndex] = ((DateWritableV2) object).getDays();
}
break;
case FLOAT:
if (object instanceof Float) {
((DoubleColumnVector) columnVector).vector[batchIndex] = ((Float) object);
} else {
((DoubleColumnVector) columnVector).vector[batchIndex] = ((FloatWritable) object).get();
}
break;
case DOUBLE:
if (object instanceof Double) {
((DoubleColumnVector) columnVector).vector[batchIndex] = ((Double) object);
} else {
((DoubleColumnVector) columnVector).vector[batchIndex] = ((DoubleWritable) object).get();
}
break;
case BINARY:
{
if (object instanceof byte[]) {
byte[] bytes = (byte[]) object;
((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
} else {
BytesWritable bw = (BytesWritable) object;
((BytesColumnVector) columnVector).setVal(batchIndex, bw.getBytes(), 0, bw.getLength());
}
}
break;
case STRING:
{
if (object instanceof String) {
String string = (String) object;
byte[] bytes = string.getBytes();
((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
} else {
Text tw = (Text) object;
((BytesColumnVector) columnVector).setVal(batchIndex, tw.getBytes(), 0, tw.getLength());
}
}
break;
case VARCHAR:
{
// UNDONE: Performance problem with conversion to String, then bytes...
// We store VARCHAR type stripped of pads.
HiveVarchar hiveVarchar;
if (object instanceof HiveVarchar) {
hiveVarchar = (HiveVarchar) object;
} else {
hiveVarchar = ((HiveVarcharWritable) object).getHiveVarchar();
}
// TODO: HIVE-13624 Do we need maxLength checking?
byte[] bytes = hiveVarchar.getValue().getBytes();
((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
}
break;
case CHAR:
{
// UNDONE: Performance problem with conversion to String, then bytes...
// We store CHAR type stripped of pads.
HiveChar hiveChar;
if (object instanceof HiveChar) {
hiveChar = (HiveChar) object;
} else {
hiveChar = ((HiveCharWritable) object).getHiveChar();
}
// TODO: HIVE-13624 Do we need maxLength checking?
// We store CHAR in vector row batch with padding stripped.
byte[] bytes = hiveChar.getStrippedValue().getBytes();
((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
}
break;
case DECIMAL:
if (columnVector instanceof DecimalColumnVector) {
if (object instanceof HiveDecimal) {
((DecimalColumnVector) columnVector).set(batchIndex, (HiveDecimal) object);
} else {
((DecimalColumnVector) columnVector).set(batchIndex, (HiveDecimalWritable) object);
}
} else {
if (object instanceof HiveDecimal) {
((Decimal64ColumnVector) columnVector).set(batchIndex, (HiveDecimal) object);
} else {
((Decimal64ColumnVector) columnVector).set(batchIndex, (HiveDecimalWritable) object);
}
}
break;
case INTERVAL_YEAR_MONTH:
if (object instanceof HiveIntervalYearMonth) {
((LongColumnVector) columnVector).vector[batchIndex] = ((HiveIntervalYearMonth) object).getTotalMonths();
} else {
((LongColumnVector) columnVector).vector[batchIndex] = ((HiveIntervalYearMonthWritable) object).getHiveIntervalYearMonth().getTotalMonths();
}
break;
case INTERVAL_DAY_TIME:
if (object instanceof HiveIntervalDayTime) {
((IntervalDayTimeColumnVector) columnVector).set(batchIndex, (HiveIntervalDayTime) object);
} else {
((IntervalDayTimeColumnVector) columnVector).set(batchIndex, ((HiveIntervalDayTimeWritable) object).getHiveIntervalDayTime());
}
break;
default:
throw new RuntimeException("Primitive category " + targetPrimitiveCategory.name() + " not supported");
}
}
break;
case LIST:
{
final ListColumnVector listColumnVector = (ListColumnVector) columnVector;
final ListTypeInfo listTypeInfo = (ListTypeInfo) targetTypeInfo;
final TypeInfo elementTypeInfo = listTypeInfo.getListElementTypeInfo();
final List list = (List) object;
final int size = list.size();
final int childCount = listColumnVector.childCount;
listColumnVector.offsets[batchIndex] = childCount;
listColumnVector.lengths[batchIndex] = size;
listColumnVector.childCount = childCount + size;
listColumnVector.child.ensureSize(childCount + size, true);
for (int i = 0; i < size; i++) {
assignRowColumn(listColumnVector.child, childCount + i, elementTypeInfo, list.get(i));
}
}
break;
case MAP:
{
final MapColumnVector mapColumnVector = (MapColumnVector) columnVector;
final MapTypeInfo mapTypeInfo = (MapTypeInfo) targetTypeInfo;
final Map<Object, Object> map = (Map<Object, Object>) object;
final int size = map.size();
int childCount = mapColumnVector.childCount;
mapColumnVector.offsets[batchIndex] = childCount;
mapColumnVector.lengths[batchIndex] = size;
mapColumnVector.keys.ensureSize(childCount + size, true);
mapColumnVector.values.ensureSize(childCount + size, true);
for (Map.Entry<Object, Object> entry : map.entrySet()) {
assignRowColumn(mapColumnVector.keys, childCount, mapTypeInfo.getMapKeyTypeInfo(), entry.getKey());
assignRowColumn(mapColumnVector.values, childCount, mapTypeInfo.getMapValueTypeInfo(), entry.getValue());
childCount++;
}
mapColumnVector.childCount = childCount;
}
break;
case STRUCT:
{
final StructColumnVector structColumnVector = (StructColumnVector) columnVector;
final StructTypeInfo targetStructTypeInfo = (StructTypeInfo) targetTypeInfo;
final List<TypeInfo> targetFieldTypeInfos = targetStructTypeInfo.getAllStructFieldTypeInfos();
final int size = targetFieldTypeInfos.size();
if (object instanceof List) {
final List struct = (List) object;
for (int i = 0; i < size; i++) {
assignRowColumn(structColumnVector.fields[i], batchIndex, targetFieldTypeInfos.get(i), struct.get(i));
}
} else {
final Object[] array = (Object[]) object;
for (int i = 0; i < size; i++) {
assignRowColumn(structColumnVector.fields[i], batchIndex, targetFieldTypeInfos.get(i), array[i]);
}
}
}
break;
case UNION:
{
final StandardUnion union = (StandardUnion) object;
final UnionColumnVector unionColumnVector = (UnionColumnVector) columnVector;
final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) targetTypeInfo;
final List<TypeInfo> objectTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
final byte tag = union.getTag();
unionColumnVector.tags[batchIndex] = tag;
assignRowColumn(unionColumnVector.fields[tag], batchIndex, objectTypeInfos.get(tag), union.getObject());
}
break;
default:
throw new RuntimeException("Category " + targetTypeInfo.getCategory().name() + " not supported");
}
/*
* We always set the null flag to false when there is a value.
*/
columnVector.isNull[batchIndex] = false;
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class VectorAssignRow method assignConvertRowColumn.
private void assignConvertRowColumn(ColumnVector columnVector, int batchIndex, TypeInfo targetTypeInfo, ObjectInspector sourceObjectInspector, Writable convertTargetWritable, Object object) {
final Category targetCategory = targetTypeInfo.getCategory();
if (targetCategory == null) {
/*
* This is a column that we don't want (i.e. not included) -- we are done.
*/
return;
}
if (object == null) {
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
}
try {
switch(targetCategory) {
case PRIMITIVE:
final PrimitiveObjectInspector sourcePrimitiveOI = (PrimitiveObjectInspector) sourceObjectInspector;
final PrimitiveCategory targetPrimitiveCategory = ((PrimitiveTypeInfo) targetTypeInfo).getPrimitiveCategory();
switch(targetPrimitiveCategory) {
case VOID:
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
case BOOLEAN:
((LongColumnVector) columnVector).vector[batchIndex] = (PrimitiveObjectInspectorUtils.getBoolean(object, sourcePrimitiveOI) ? 1 : 0);
break;
case BYTE:
((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getByte(object, sourcePrimitiveOI);
break;
case SHORT:
((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getShort(object, sourcePrimitiveOI);
break;
case INT:
((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getInt(object, sourcePrimitiveOI);
break;
case LONG:
((LongColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getLong(object, sourcePrimitiveOI);
break;
case TIMESTAMP:
{
final Timestamp timestamp = PrimitiveObjectInspectorUtils.getTimestamp(object, sourcePrimitiveOI);
if (timestamp == null) {
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
}
((TimestampColumnVector) columnVector).set(batchIndex, timestamp.toSqlTimestamp());
}
break;
case DATE:
{
final Date date = PrimitiveObjectInspectorUtils.getDate(object, sourcePrimitiveOI);
if (date == null) {
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
}
DateWritableV2 dateWritable = (DateWritableV2) convertTargetWritable;
if (dateWritable == null) {
dateWritable = new DateWritableV2();
}
dateWritable.set(date);
((LongColumnVector) columnVector).vector[batchIndex] = dateWritable.getDays();
}
break;
case FLOAT:
((DoubleColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getFloat(object, sourcePrimitiveOI);
break;
case DOUBLE:
((DoubleColumnVector) columnVector).vector[batchIndex] = PrimitiveObjectInspectorUtils.getDouble(object, sourcePrimitiveOI);
break;
case BINARY:
{
final BytesWritable bytesWritable = PrimitiveObjectInspectorUtils.getBinary(object, sourcePrimitiveOI);
if (bytesWritable == null) {
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
}
((BytesColumnVector) columnVector).setVal(batchIndex, bytesWritable.getBytes(), 0, bytesWritable.getLength());
}
break;
case STRING:
{
final String string = PrimitiveObjectInspectorUtils.getString(object, sourcePrimitiveOI);
if (string == null) {
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
}
Text text = (Text) convertTargetWritable;
if (text == null) {
text = new Text();
}
text.set(string);
((BytesColumnVector) columnVector).setVal(batchIndex, text.getBytes(), 0, text.getLength());
}
break;
case VARCHAR:
{
// UNDONE: Performance problem with conversion to String, then bytes...
final HiveVarchar hiveVarchar = PrimitiveObjectInspectorUtils.getHiveVarchar(object, sourcePrimitiveOI);
if (hiveVarchar == null) {
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
}
// TODO: Do we need maxLength checking?
byte[] bytes = hiveVarchar.getValue().getBytes();
((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
}
break;
case CHAR:
{
// UNDONE: Performance problem with conversion to String, then bytes...
final HiveChar hiveChar = PrimitiveObjectInspectorUtils.getHiveChar(object, sourcePrimitiveOI);
if (hiveChar == null) {
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
}
// We store CHAR in vector row batch with padding stripped.
// TODO: Do we need maxLength checking?
final byte[] bytes = hiveChar.getStrippedValue().getBytes();
((BytesColumnVector) columnVector).setVal(batchIndex, bytes, 0, bytes.length);
}
break;
case DECIMAL:
{
final HiveDecimal hiveDecimal = PrimitiveObjectInspectorUtils.getHiveDecimal(object, sourcePrimitiveOI);
if (hiveDecimal == null) {
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
}
if (columnVector instanceof Decimal64ColumnVector) {
Decimal64ColumnVector dec64ColVector = (Decimal64ColumnVector) columnVector;
dec64ColVector.set(batchIndex, hiveDecimal);
if (dec64ColVector.isNull[batchIndex]) {
return;
}
} else {
((DecimalColumnVector) columnVector).set(batchIndex, hiveDecimal);
}
}
break;
case INTERVAL_YEAR_MONTH:
{
final HiveIntervalYearMonth intervalYearMonth = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(object, sourcePrimitiveOI);
if (intervalYearMonth == null) {
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
}
((LongColumnVector) columnVector).vector[batchIndex] = intervalYearMonth.getTotalMonths();
}
break;
case INTERVAL_DAY_TIME:
{
final HiveIntervalDayTime intervalDayTime = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime(object, sourcePrimitiveOI);
if (intervalDayTime == null) {
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
}
((IntervalDayTimeColumnVector) columnVector).set(batchIndex, intervalDayTime);
}
break;
default:
throw new RuntimeException("Primitive category " + targetPrimitiveCategory.name() + " not supported");
}
break;
case LIST:
{
final ListColumnVector listColumnVector = (ListColumnVector) columnVector;
final ListObjectInspector sourceListOI = (ListObjectInspector) sourceObjectInspector;
final ObjectInspector sourceElementOI = sourceListOI.getListElementObjectInspector();
final int size = sourceListOI.getListLength(object);
final TypeInfo targetElementTypeInfo = ((ListTypeInfo) targetTypeInfo).getListElementTypeInfo();
listColumnVector.offsets[batchIndex] = listColumnVector.childCount;
listColumnVector.childCount += size;
listColumnVector.ensureSize(listColumnVector.childCount, true);
listColumnVector.lengths[batchIndex] = size;
for (int i = 0; i < size; i++) {
final Object element = sourceListOI.getListElement(object, i);
final int offset = (int) (listColumnVector.offsets[batchIndex] + i);
assignConvertRowColumn(listColumnVector.child, offset, targetElementTypeInfo, sourceElementOI, null, element);
}
}
break;
case MAP:
{
final MapColumnVector mapColumnVector = (MapColumnVector) columnVector;
final MapObjectInspector mapObjectInspector = (MapObjectInspector) sourceObjectInspector;
final MapTypeInfo mapTypeInfo = (MapTypeInfo) targetTypeInfo;
final Map<?, ?> map = mapObjectInspector.getMap(object);
for (Map.Entry<?, ?> entry : map.entrySet()) {
assignConvertRowColumn(mapColumnVector.keys, batchIndex, mapTypeInfo.getMapKeyTypeInfo(), mapObjectInspector.getMapKeyObjectInspector(), null, entry.getKey());
assignConvertRowColumn(mapColumnVector.values, batchIndex, mapTypeInfo.getMapValueTypeInfo(), mapObjectInspector.getMapValueObjectInspector(), null, entry.getValue());
}
}
break;
case STRUCT:
{
final StructColumnVector structColumnVector = (StructColumnVector) columnVector;
final StructObjectInspector sourceStructOI = (StructObjectInspector) sourceObjectInspector;
final List<? extends StructField> sourceFields = sourceStructOI.getAllStructFieldRefs();
final StructTypeInfo targetStructTypeInfo = (StructTypeInfo) targetTypeInfo;
final List<TypeInfo> targetTypeInfos = targetStructTypeInfo.getAllStructFieldTypeInfos();
final int size = targetTypeInfos.size();
for (int i = 0; i < size; i++) {
if (i < sourceFields.size()) {
final StructField sourceStructField = sourceFields.get(i);
final ObjectInspector sourceFieldOI = sourceStructField.getFieldObjectInspector();
final Object sourceData = sourceStructOI.getStructFieldData(object, sourceStructField);
assignConvertRowColumn(structColumnVector.fields[i], batchIndex, targetTypeInfos.get(i), sourceFieldOI, null, sourceData);
} else {
final ColumnVector fieldColumnVector = structColumnVector.fields[i];
VectorizedBatchUtil.setNullColIsNullValue(fieldColumnVector, batchIndex);
}
}
}
break;
case UNION:
{
final UnionColumnVector unionColumnVector = (UnionColumnVector) columnVector;
final UnionObjectInspector unionObjectInspector = (UnionObjectInspector) sourceObjectInspector;
final UnionTypeInfo unionTypeInfo = (UnionTypeInfo) targetTypeInfo;
final int tag = unionObjectInspector.getTag(object);
assignConvertRowColumn(unionColumnVector.fields[tag], batchIndex, unionTypeInfo.getAllUnionObjectTypeInfos().get(tag), unionObjectInspector.getObjectInspectors().get(tag), null, unionObjectInspector.getField(tag));
}
break;
default:
throw new RuntimeException("Category " + targetCategory.name() + " not supported");
}
} catch (NumberFormatException e) {
// Some of the conversion methods throw this exception on numeric parsing errors.
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
}
// We always set the null flag to false when there is a value.
columnVector.isNull[batchIndex] = false;
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestHiveAccumuloTypes method testBinaryTypes.
@Test
public void testBinaryTypes() throws Exception {
final String tableName = test.getMethodName(), user = "root", pass = "";
MockInstance mockInstance = new MockInstance(test.getMethodName());
Connector conn = mockInstance.getConnector(user, new PasswordToken(pass));
HiveAccumuloTableInputFormat inputformat = new HiveAccumuloTableInputFormat();
JobConf conf = new JobConf();
conf.set(AccumuloSerDeParameters.TABLE_NAME, tableName);
conf.set(AccumuloSerDeParameters.USE_MOCK_INSTANCE, "true");
conf.set(AccumuloSerDeParameters.INSTANCE_NAME, test.getMethodName());
conf.set(AccumuloSerDeParameters.USER_NAME, user);
conf.set(AccumuloSerDeParameters.USER_PASS, pass);
// not used for mock, but
conf.set(AccumuloSerDeParameters.ZOOKEEPERS, "localhost:2181");
// required by input format.
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, AccumuloHiveConstants.ROWID + ",cf:string,cf:boolean,cf:tinyint,cf:smallint,cf:int,cf:bigint" + ",cf:float,cf:double,cf:decimal,cf:date,cf:timestamp,cf:char,cf:varchar");
conf.set(serdeConstants.LIST_COLUMNS, "string,string,boolean,tinyint,smallint,int,bigint,float,double,decimal,date,timestamp,char(4),varchar(7)");
conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,string,boolean,tinyint,smallint,int,bigint,float,double,decimal,date,timestamp,char(4),varchar(7)");
conf.set(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE, "binary");
conn.tableOperations().create(tableName);
BatchWriterConfig writerConf = new BatchWriterConfig();
BatchWriter writer = conn.createBatchWriter(tableName, writerConf);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(baos);
String cf = "cf";
byte[] cfBytes = cf.getBytes();
Mutation m = new Mutation("row1");
// string
String stringValue = "string";
JavaStringObjectInspector stringOI = (JavaStringObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME));
LazyUtils.writePrimitiveUTF8(baos, stringOI.create(stringValue), stringOI, false, (byte) 0, null);
m.put(cfBytes, "string".getBytes(), baos.toByteArray());
// boolean
boolean booleanValue = true;
baos.reset();
JavaBooleanObjectInspector booleanOI = (JavaBooleanObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
LazyUtils.writePrimitive(baos, booleanOI.create(booleanValue), booleanOI);
m.put(cfBytes, "boolean".getBytes(), baos.toByteArray());
// tinyint
byte tinyintValue = -127;
baos.reset();
JavaByteObjectInspector byteOI = (JavaByteObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME));
LazyUtils.writePrimitive(baos, tinyintValue, byteOI);
m.put(cfBytes, "tinyint".getBytes(), baos.toByteArray());
// smallint
short smallintValue = Short.MAX_VALUE;
baos.reset();
JavaShortObjectInspector shortOI = (JavaShortObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME));
LazyUtils.writePrimitive(baos, smallintValue, shortOI);
m.put(cfBytes, "smallint".getBytes(), baos.toByteArray());
// int
int intValue = Integer.MAX_VALUE;
baos.reset();
JavaIntObjectInspector intOI = (JavaIntObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
LazyUtils.writePrimitive(baos, intValue, intOI);
m.put(cfBytes, "int".getBytes(), baos.toByteArray());
// bigint
long bigintValue = Long.MAX_VALUE;
baos.reset();
JavaLongObjectInspector longOI = (JavaLongObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BIGINT_TYPE_NAME));
LazyUtils.writePrimitive(baos, bigintValue, longOI);
m.put(cfBytes, "bigint".getBytes(), baos.toByteArray());
// float
float floatValue = Float.MAX_VALUE;
baos.reset();
JavaFloatObjectInspector floatOI = (JavaFloatObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.FLOAT_TYPE_NAME));
LazyUtils.writePrimitive(baos, floatValue, floatOI);
m.put(cfBytes, "float".getBytes(), baos.toByteArray());
// double
double doubleValue = Double.MAX_VALUE;
baos.reset();
JavaDoubleObjectInspector doubleOI = (JavaDoubleObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME));
LazyUtils.writePrimitive(baos, doubleValue, doubleOI);
m.put(cfBytes, "double".getBytes(), baos.toByteArray());
// decimal
baos.reset();
HiveDecimal decimalValue = HiveDecimal.create(65536l);
HiveDecimalWritable decimalWritable = new HiveDecimalWritable(decimalValue);
decimalWritable.write(out);
m.put(cfBytes, "decimal".getBytes(), baos.toByteArray());
// date
baos.reset();
Date now = Date.ofEpochMilli(System.currentTimeMillis());
DateWritableV2 dateWritable = new DateWritableV2(now);
Date dateValue = dateWritable.get();
dateWritable.write(out);
m.put(cfBytes, "date".getBytes(), baos.toByteArray());
// tiemestamp
baos.reset();
Timestamp timestampValue = Timestamp.ofEpochMilli(System.currentTimeMillis());
ByteStream.Output output = new ByteStream.Output();
TimestampWritableV2 timestampWritable = new TimestampWritableV2(timestampValue);
timestampWritable.write(new DataOutputStream(output));
output.close();
m.put(cfBytes, "timestamp".getBytes(), output.toByteArray());
// char
baos.reset();
HiveChar charValue = new HiveChar("char", 4);
JavaHiveCharObjectInspector charOI = (JavaHiveCharObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new CharTypeInfo(4));
LazyUtils.writePrimitiveUTF8(baos, charOI.create(charValue), charOI, false, (byte) 0, null);
m.put(cfBytes, "char".getBytes(), baos.toByteArray());
baos.reset();
HiveVarchar varcharValue = new HiveVarchar("varchar", 7);
JavaHiveVarcharObjectInspector varcharOI = (JavaHiveVarcharObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new VarcharTypeInfo(7));
LazyUtils.writePrimitiveUTF8(baos, varcharOI.create(varcharValue), varcharOI, false, (byte) 0, null);
m.put(cfBytes, "varchar".getBytes(), baos.toByteArray());
writer.addMutation(m);
writer.close();
for (Entry<Key, Value> e : conn.createScanner(tableName, new Authorizations())) {
System.out.println(e);
}
// Create the RecordReader
FileInputFormat.addInputPath(conf, new Path("unused"));
InputSplit[] splits = inputformat.getSplits(conf, 0);
assertEquals(splits.length, 1);
RecordReader<Text, AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
Text key = reader.createKey();
AccumuloHiveRow value = reader.createValue();
reader.next(key, value);
Assert.assertEquals(13, value.getTuples().size());
ByteArrayRef byteRef = new ByteArrayRef();
// string
Text cfText = new Text(cf), cqHolder = new Text();
cqHolder.set("string");
byte[] valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyStringObjectInspector lazyStringOI = LazyPrimitiveObjectInspectorFactory.getLazyStringObjectInspector(false, (byte) 0);
LazyString lazyString = (LazyString) LazyFactory.createLazyObject(lazyStringOI);
lazyString.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(stringValue, lazyString.getWritableObject().toString());
// boolean
cqHolder.set("boolean");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyBooleanObjectInspector lazyBooleanOI = (LazyBooleanObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
LazyBoolean lazyBoolean = (LazyBoolean) LazyFactory.createLazyPrimitiveBinaryClass(lazyBooleanOI);
lazyBoolean.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(booleanValue, lazyBoolean.getWritableObject().get());
// tinyint
cqHolder.set("tinyint");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyByteObjectInspector lazyByteOI = (LazyByteObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME));
LazyByte lazyByte = (LazyByte) LazyFactory.createLazyPrimitiveBinaryClass(lazyByteOI);
lazyByte.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(tinyintValue, lazyByte.getWritableObject().get());
// smallint
cqHolder.set("smallint");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyShortObjectInspector lazyShortOI = (LazyShortObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME));
LazyShort lazyShort = (LazyShort) LazyFactory.createLazyPrimitiveBinaryClass(lazyShortOI);
lazyShort.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(smallintValue, lazyShort.getWritableObject().get());
// int
cqHolder.set("int");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyIntObjectInspector lazyIntOI = (LazyIntObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
LazyInteger lazyInt = (LazyInteger) LazyFactory.createLazyPrimitiveBinaryClass(lazyIntOI);
lazyInt.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(intValue, lazyInt.getWritableObject().get());
// bigint
cqHolder.set("bigint");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyLongObjectInspector lazyLongOI = (LazyLongObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BIGINT_TYPE_NAME));
LazyLong lazyLong = (LazyLong) LazyFactory.createLazyPrimitiveBinaryClass(lazyLongOI);
lazyLong.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(bigintValue, lazyLong.getWritableObject().get());
// float
cqHolder.set("float");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyFloatObjectInspector lazyFloatOI = (LazyFloatObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.FLOAT_TYPE_NAME));
LazyFloat lazyFloat = (LazyFloat) LazyFactory.createLazyPrimitiveBinaryClass(lazyFloatOI);
lazyFloat.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(floatValue, lazyFloat.getWritableObject().get(), 0);
// double
cqHolder.set("double");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyDoubleObjectInspector lazyDoubleOI = (LazyDoubleObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME));
LazyDouble lazyDouble = (LazyDouble) LazyFactory.createLazyPrimitiveBinaryClass(lazyDoubleOI);
lazyDouble.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(doubleValue, lazyDouble.getWritableObject().get(), 0);
// decimal
cqHolder.set("decimal");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
ByteArrayInputStream bais = new ByteArrayInputStream(valueBytes);
DataInputStream in = new DataInputStream(bais);
decimalWritable.readFields(in);
Assert.assertEquals(decimalValue, decimalWritable.getHiveDecimal());
// date
cqHolder.set("date");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
bais = new ByteArrayInputStream(valueBytes);
in = new DataInputStream(bais);
dateWritable.readFields(in);
Assert.assertEquals(dateValue, dateWritable.get());
// timestamp
cqHolder.set("timestamp");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
bais = new ByteArrayInputStream(valueBytes);
in = new DataInputStream(bais);
timestampWritable.readFields(in);
Assert.assertEquals(timestampValue, timestampWritable.getTimestamp());
// char
cqHolder.set("char");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyHiveCharObjectInspector lazyCharOI = (LazyHiveCharObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(new CharTypeInfo(4));
LazyHiveChar lazyChar = (LazyHiveChar) LazyFactory.createLazyObject(lazyCharOI);
lazyChar.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(charValue, lazyChar.getWritableObject().getHiveChar());
// varchar
cqHolder.set("varchar");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyHiveVarcharObjectInspector lazyVarcharOI = (LazyHiveVarcharObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(new VarcharTypeInfo(7));
LazyHiveVarchar lazyVarchar = (LazyHiveVarchar) LazyFactory.createLazyObject(lazyVarcharOI);
lazyVarchar.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(varcharValue.toString(), lazyVarchar.getWritableObject().getHiveVarchar().toString());
}
Aggregations