use of org.apache.hadoop.hive.serde2.SerDeException in project phoenix by apache.
the class PhoenixSerDe method initialize.
@Override
public void initialize(Configuration conf, Properties tbl) throws SerDeException {
tableProperties = tbl;
if (LOG.isDebugEnabled()) {
LOG.debug("SerDe initialize : " + tbl.getProperty("name"));
}
serdeParams = new LazySerDeParameters(conf, tbl, getClass().getName());
objectInspector = createLazyPhoenixInspector(conf, tbl);
String inOutWork = tbl.getProperty(PhoenixStorageHandlerConstants.IN_OUT_WORK);
if (inOutWork == null) {
return;
}
serializer = new PhoenixSerializer(conf, tbl);
row = new PhoenixRow(serdeParams.getColumnNames());
}
use of org.apache.hadoop.hive.serde2.SerDeException in project phoenix by apache.
the class PhoenixSerDe method createLazyPhoenixInspector.
private ObjectInspector createLazyPhoenixInspector(Configuration conf, Properties tbl) throws SerDeException {
List<String> columnNameList = Arrays.asList(tbl.getProperty(serdeConstants.LIST_COLUMNS).split(PhoenixStorageHandlerConstants.COMMA));
List<TypeInfo> columnTypeList = TypeInfoUtils.getTypeInfosFromTypeString(tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES));
List<ObjectInspector> columnObjectInspectors = Lists.newArrayListWithExpectedSize(columnTypeList.size());
for (TypeInfo typeInfo : columnTypeList) {
columnObjectInspectors.add(PhoenixObjectInspectorFactory.createObjectInspector(typeInfo, serdeParams));
}
return LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(columnNameList, columnObjectInspectors, null, serdeParams.getSeparators()[0], serdeParams, ObjectInspectorOptions.JAVA);
}
use of org.apache.hadoop.hive.serde2.SerDeException in project carbondata by apache.
the class CarbonHiveRecordReader method createArray.
private ArrayWritable createArray(Object obj, ListObjectInspector inspector) throws SerDeException {
List sourceArray = inspector.getList(obj);
ObjectInspector subInspector = inspector.getListElementObjectInspector();
List array = new ArrayList();
Iterator iterator;
if (sourceArray != null) {
for (iterator = sourceArray.iterator(); iterator.hasNext(); ) {
Object curObj = iterator.next();
Writable newObj = createObject(curObj, subInspector);
if (newObj != null) {
array.add(newObj);
}
}
}
if (array.size() > 0) {
ArrayWritable subArray = new ArrayWritable(((Writable) array.get(0)).getClass(), (Writable[]) array.toArray(new Writable[array.size()]));
return new ArrayWritable(Writable.class, new Writable[] { subArray });
}
return null;
}
use of org.apache.hadoop.hive.serde2.SerDeException in project carbondata by apache.
the class CarbonHiveSerDe method initialize.
@Override
public void initialize(@Nullable Configuration configuration, Properties tbl) throws SerDeException {
final TypeInfo rowTypeInfo;
final List<String> columnNames;
final List<String> reqColNames;
final List<TypeInfo> columnTypes;
// Get column names and sort order
assert configuration != null;
final String colIds = configuration.get("hive.io.file.readcolumn.ids");
final String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
final String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
if (columnNameProperty.length() == 0) {
columnNames = new ArrayList<String>();
} else {
columnNames = Arrays.asList(columnNameProperty.split(","));
}
if (columnTypeProperty.length() == 0) {
columnTypes = new ArrayList<TypeInfo>();
} else {
columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
}
if (colIds != null) {
reqColNames = new ArrayList<String>();
String[] arraySelectedColId = colIds.split(",");
List<TypeInfo> reqColTypes = new ArrayList<TypeInfo>();
for (String anArrayColId : arraySelectedColId) {
reqColNames.add(columnNames.get(Integer.parseInt(anArrayColId)));
reqColTypes.add(columnTypes.get(Integer.parseInt(anArrayColId)));
}
// Create row related objects
rowTypeInfo = TypeInfoFactory.getStructTypeInfo(reqColNames, reqColTypes);
this.objInspector = new CarbonObjectInspector((StructTypeInfo) rowTypeInfo);
} else {
// Create row related objects
rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
this.objInspector = new CarbonObjectInspector((StructTypeInfo) rowTypeInfo);
// Stats part
serializedSize = 0;
deserializedSize = 0;
status = LAST_OPERATION.UNKNOWN;
}
}
use of org.apache.hadoop.hive.serde2.SerDeException in project carbondata by apache.
the class CarbonHiveSerDe method createArray.
private ArrayWritable createArray(Object obj, ListObjectInspector inspector) throws SerDeException {
List sourceArray = inspector.getList(obj);
ObjectInspector subInspector = inspector.getListElementObjectInspector();
List array = new ArrayList();
Iterator iterator;
if (sourceArray != null) {
for (iterator = sourceArray.iterator(); iterator.hasNext(); ) {
Object curObj = iterator.next();
Writable newObj = createObject(curObj, subInspector);
if (newObj != null) {
array.add(newObj);
}
}
}
if (array.size() > 0) {
ArrayWritable subArray = new ArrayWritable(((Writable) array.get(0)).getClass(), (Writable[]) array.toArray(new Writable[array.size()]));
return new ArrayWritable(Writable.class, new Writable[] { subArray });
}
return null;
}
Aggregations