use of org.apache.hadoop.hive.serde2.thrift.test.Complex in project presto by prestodb.
the class TestHiveFileFormats method testRcBinaryPageSource.
@Test(dataProvider = "rowCount")
public void testRcBinaryPageSource(int rowCount) throws Exception {
// RCBinary does not support complex type as key of a map and interprets empty VARCHAR as nulls
List<TestColumn> testColumns = TEST_COLUMNS.stream().filter(testColumn -> !testColumn.getName().equals("t_empty_varchar")).collect(toList());
TestingConnectorSession session = new TestingConnectorSession(new HiveSessionProperties(new HiveClientConfig().setRcfileOptimizedReaderEnabled(true)).getSessionProperties());
assertThatFileFormat(RCBINARY).withColumns(testColumns).withRowsCount(rowCount).withSession(session).isReadableByPageSource(new RcFilePageSourceFactory(TYPE_MANAGER, HDFS_ENVIRONMENT));
}
use of org.apache.hadoop.hive.serde2.thrift.test.Complex in project hive by apache.
the class MetadataTypedColumnsetSerDe method serialize.
@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
if (objInspector.getCategory() != Category.STRUCT) {
throw new SerDeException(getClass().toString() + " can only serialize struct types, but we got: " + objInspector.getTypeName());
}
StructObjectInspector soi = (StructObjectInspector) objInspector;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < fields.size(); i++) {
if (i > 0) {
sb.append(separator);
}
Object column = soi.getStructFieldData(obj, fields.get(i));
if (fields.get(i).getFieldObjectInspector().getCategory() == Category.PRIMITIVE) {
// For primitive object, serialize to plain string
sb.append(column == null ? nullString : column.toString());
} else {
// For complex object, serialize to JSON format
sb.append(SerDeUtils.getJSONString(column, fields.get(i).getFieldObjectInspector()));
}
}
serializeCache.set(sb.toString());
return serializeCache;
}
use of org.apache.hadoop.hive.serde2.thrift.test.Complex in project hive by apache.
the class VectorDeserializeRow method initSourceEntry.
/*
* Initialize one column's source deserializtion related arrays.
*/
private void initSourceEntry(int logicalColumnIndex, int projectionColumnNum, TypeInfo sourceTypeInfo) {
isConvert[logicalColumnIndex] = false;
projectionColumnNums[logicalColumnIndex] = projectionColumnNum;
Category sourceCategory = sourceTypeInfo.getCategory();
sourceCategories[logicalColumnIndex] = sourceCategory;
if (sourceCategory == Category.PRIMITIVE) {
PrimitiveTypeInfo sourcePrimitiveTypeInfo = (PrimitiveTypeInfo) sourceTypeInfo;
PrimitiveCategory sourcePrimitiveCategory = sourcePrimitiveTypeInfo.getPrimitiveCategory();
sourcePrimitiveCategories[logicalColumnIndex] = sourcePrimitiveCategory;
switch(sourcePrimitiveCategory) {
case CHAR:
maxLengths[logicalColumnIndex] = ((CharTypeInfo) sourcePrimitiveTypeInfo).getLength();
break;
case VARCHAR:
maxLengths[logicalColumnIndex] = ((VarcharTypeInfo) sourcePrimitiveTypeInfo).getLength();
break;
default:
// No additional data type specific setting.
break;
}
} else {
// We don't currently support complex types.
Preconditions.checkState(false);
}
}
use of org.apache.hadoop.hive.serde2.thrift.test.Complex in project hive by apache.
the class GenericUDFStruct method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
int numFields = arguments.length;
ret = new Object[numFields];
ArrayList<String> fname = new ArrayList<String>(numFields);
for (int f = 1; f <= numFields; f++) {
fname.add("col" + f);
}
boolean constantStruct = true;
for (int i = 0; i < arguments.length; i++) {
ObjectInspector oi = arguments[i];
constantStruct &= (oi.getCategory() == Category.PRIMITIVE) && (oi instanceof ConstantObjectInspector);
if (constantStruct) {
// nested complex types trigger Kryo issue #216 in plan deserialization
ret[i] = ((ConstantObjectInspector) oi).getWritableConstantValue();
}
}
if (constantStruct) {
return ObjectInspectorFactory.getStandardConstantStructObjectInspector(fname, Arrays.asList(arguments), Arrays.asList(ret));
} else {
return ObjectInspectorFactory.getStandardStructObjectInspector(fname, Arrays.asList(arguments));
}
}
use of org.apache.hadoop.hive.serde2.thrift.test.Complex in project hive by apache.
the class TypedBytesSerDe method serializeField.
private void serializeField(Object o, ObjectInspector oi, Object reuse) throws IOException {
switch(oi.getCategory()) {
case PRIMITIVE:
{
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
switch(poi.getPrimitiveCategory()) {
case VOID:
{
return;
}
case BOOLEAN:
{
BooleanObjectInspector boi = (BooleanObjectInspector) poi;
BooleanWritable r = reuse == null ? new BooleanWritable() : (BooleanWritable) reuse;
r.set(boi.get(o));
tbOut.write(r);
return;
}
case BYTE:
{
ByteObjectInspector boi = (ByteObjectInspector) poi;
ByteWritable r = reuse == null ? new ByteWritable() : (ByteWritable) reuse;
r.set(boi.get(o));
tbOut.write(r);
return;
}
case SHORT:
{
ShortObjectInspector spoi = (ShortObjectInspector) poi;
ShortWritable r = reuse == null ? new ShortWritable() : (ShortWritable) reuse;
r.set(spoi.get(o));
tbOut.write(r);
return;
}
case INT:
{
IntObjectInspector ioi = (IntObjectInspector) poi;
IntWritable r = reuse == null ? new IntWritable() : (IntWritable) reuse;
r.set(ioi.get(o));
tbOut.write(r);
return;
}
case LONG:
{
LongObjectInspector loi = (LongObjectInspector) poi;
LongWritable r = reuse == null ? new LongWritable() : (LongWritable) reuse;
r.set(loi.get(o));
tbOut.write(r);
return;
}
case FLOAT:
{
FloatObjectInspector foi = (FloatObjectInspector) poi;
FloatWritable r = reuse == null ? new FloatWritable() : (FloatWritable) reuse;
r.set(foi.get(o));
tbOut.write(r);
return;
}
case DOUBLE:
{
DoubleObjectInspector doi = (DoubleObjectInspector) poi;
DoubleWritable r = reuse == null ? new DoubleWritable() : (DoubleWritable) reuse;
r.set(doi.get(o));
tbOut.write(r);
return;
}
case STRING:
{
StringObjectInspector soi = (StringObjectInspector) poi;
Text t = soi.getPrimitiveWritableObject(o);
tbOut.write(t);
return;
}
default:
{
throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory());
}
}
}
case LIST:
case MAP:
case STRUCT:
{
// For complex object, serialize to JSON format
String s = SerDeUtils.getJSONString(o, oi);
Text t = reuse == null ? new Text() : (Text) reuse;
// convert to Text and write it
t.set(s);
tbOut.write(t);
}
default:
{
throw new RuntimeException("Unrecognized type: " + oi.getCategory());
}
}
}
Aggregations