Search in sources :

Example 86 with SerDeException

use of org.apache.hadoop.hive.serde2.SerDeException in project phoenix by apache.

the class PhoenixSerDe method initialize.

@Override
public void initialize(Configuration conf, Properties tbl) throws SerDeException {
    tableProperties = tbl;
    if (LOG.isDebugEnabled()) {
        LOG.debug("SerDe initialize : " + tbl.getProperty("name"));
    }
    serdeParams = new LazySerDeParameters(conf, tbl, getClass().getName());
    objectInspector = createLazyPhoenixInspector(conf, tbl);
    String inOutWork = tbl.getProperty(PhoenixStorageHandlerConstants.IN_OUT_WORK);
    if (inOutWork == null) {
        return;
    }
    serializer = new PhoenixSerializer(conf, tbl);
    row = new PhoenixRow(serdeParams.getColumnNames());
}
Also used : LazySerDeParameters(org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters)

Example 87 with SerDeException

use of org.apache.hadoop.hive.serde2.SerDeException in project phoenix by apache.

the class PhoenixSerDe method createLazyPhoenixInspector.

private ObjectInspector createLazyPhoenixInspector(Configuration conf, Properties tbl) throws SerDeException {
    List<String> columnNameList = Arrays.asList(tbl.getProperty(serdeConstants.LIST_COLUMNS).split(PhoenixStorageHandlerConstants.COMMA));
    List<TypeInfo> columnTypeList = TypeInfoUtils.getTypeInfosFromTypeString(tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES));
    List<ObjectInspector> columnObjectInspectors = Lists.newArrayListWithExpectedSize(columnTypeList.size());
    for (TypeInfo typeInfo : columnTypeList) {
        columnObjectInspectors.add(PhoenixObjectInspectorFactory.createObjectInspector(typeInfo, serdeParams));
    }
    return LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(columnNameList, columnObjectInspectors, null, serdeParams.getSeparators()[0], serdeParams, ObjectInspectorOptions.JAVA);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)

Example 88 with SerDeException

use of org.apache.hadoop.hive.serde2.SerDeException in project carbondata by apache.

the class CarbonHiveRecordReader method createArray.

private ArrayWritable createArray(Object obj, ListObjectInspector inspector) throws SerDeException {
    List sourceArray = inspector.getList(obj);
    ObjectInspector subInspector = inspector.getListElementObjectInspector();
    List array = new ArrayList();
    Iterator iterator;
    if (sourceArray != null) {
        for (iterator = sourceArray.iterator(); iterator.hasNext(); ) {
            Object curObj = iterator.next();
            Writable newObj = createObject(curObj, subInspector);
            if (newObj != null) {
                array.add(newObj);
            }
        }
    }
    if (array.size() > 0) {
        ArrayWritable subArray = new ArrayWritable(((Writable) array.get(0)).getClass(), (Writable[]) array.toArray(new Writable[array.size()]));
        return new ArrayWritable(Writable.class, new Writable[] { subArray });
    }
    return null;
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ArrayWritable(org.apache.hadoop.io.ArrayWritable) ArrayList(java.util.ArrayList) Iterator(java.util.Iterator) ChunkRowIterator(org.apache.carbondata.core.scan.result.iterator.ChunkRowIterator) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) ArrayList(java.util.ArrayList) List(java.util.List)

Example 89 with SerDeException

use of org.apache.hadoop.hive.serde2.SerDeException in project carbondata by apache.

the class CarbonHiveSerDe method initialize.

@Override
public void initialize(@Nullable Configuration configuration, Properties tbl) throws SerDeException {
    final TypeInfo rowTypeInfo;
    final List<String> columnNames;
    final List<String> reqColNames;
    final List<TypeInfo> columnTypes;
    // Get column names and sort order
    assert configuration != null;
    final String colIds = configuration.get("hive.io.file.readcolumn.ids");
    final String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
    final String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
    if (columnNameProperty.length() == 0) {
        columnNames = new ArrayList<String>();
    } else {
        columnNames = Arrays.asList(columnNameProperty.split(","));
    }
    if (columnTypeProperty.length() == 0) {
        columnTypes = new ArrayList<TypeInfo>();
    } else {
        columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
    }
    if (colIds != null) {
        reqColNames = new ArrayList<String>();
        String[] arraySelectedColId = colIds.split(",");
        List<TypeInfo> reqColTypes = new ArrayList<TypeInfo>();
        for (String anArrayColId : arraySelectedColId) {
            reqColNames.add(columnNames.get(Integer.parseInt(anArrayColId)));
            reqColTypes.add(columnTypes.get(Integer.parseInt(anArrayColId)));
        }
        // Create row related objects
        rowTypeInfo = TypeInfoFactory.getStructTypeInfo(reqColNames, reqColTypes);
        this.objInspector = new CarbonObjectInspector((StructTypeInfo) rowTypeInfo);
    } else {
        // Create row related objects
        rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
        this.objInspector = new CarbonObjectInspector((StructTypeInfo) rowTypeInfo);
        // Stats part
        serializedSize = 0;
        deserializedSize = 0;
        status = LAST_OPERATION.UNKNOWN;
    }
}
Also used : ArrayList(java.util.ArrayList) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)

Example 90 with SerDeException

use of org.apache.hadoop.hive.serde2.SerDeException in project carbondata by apache.

the class CarbonHiveSerDe method createArray.

private ArrayWritable createArray(Object obj, ListObjectInspector inspector) throws SerDeException {
    List sourceArray = inspector.getList(obj);
    ObjectInspector subInspector = inspector.getListElementObjectInspector();
    List array = new ArrayList();
    Iterator iterator;
    if (sourceArray != null) {
        for (iterator = sourceArray.iterator(); iterator.hasNext(); ) {
            Object curObj = iterator.next();
            Writable newObj = createObject(curObj, subInspector);
            if (newObj != null) {
                array.add(newObj);
            }
        }
    }
    if (array.size() > 0) {
        ArrayWritable subArray = new ArrayWritable(((Writable) array.get(0)).getClass(), (Writable[]) array.toArray(new Writable[array.size()]));
        return new ArrayWritable(Writable.class, new Writable[] { subArray });
    }
    return null;
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) ArrayWritable(org.apache.hadoop.io.ArrayWritable) ArrayList(java.util.ArrayList) Iterator(java.util.Iterator) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) IntWritable(org.apache.hadoop.io.IntWritable) ArrayList(java.util.ArrayList) List(java.util.List)

Aggregations

SerDeException (org.apache.hadoop.hive.serde2.SerDeException)124 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)108 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)100 ArrayList (java.util.ArrayList)98 Properties (java.util.Properties)59 Test (org.junit.Test)59 Configuration (org.apache.hadoop.conf.Configuration)52 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)52 Text (org.apache.hadoop.io.Text)50 IOException (java.io.IOException)37 ListObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector)33 Schema (org.apache.avro.Schema)31 StructField (org.apache.hadoop.hive.serde2.objectinspector.StructField)31 MapObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector)28 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)28 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)24 Put (org.apache.hadoop.hbase.client.Put)22 LazySerDeParameters (org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters)22 IntWritable (org.apache.hadoop.io.IntWritable)22 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)21