use of org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector in project phoenix by apache.
the class PhoenixSerializer method serialize.
public Writable serialize(Object values, ObjectInspector objInspector, DmlType dmlType) {
pResultWritable.clear();
final StructObjectInspector structInspector = (StructObjectInspector) objInspector;
final List<? extends StructField> fieldList = structInspector.getAllStructFieldRefs();
if (LOG.isTraceEnabled()) {
LOG.trace("FieldList : " + fieldList + " values(" + values.getClass() + ") : " + values);
}
int fieldCount = columnCount;
if (dmlType == DmlType.UPDATE || dmlType == DmlType.DELETE) {
fieldCount++;
}
for (int i = 0; i < fieldCount; i++) {
if (fieldList.size() <= i) {
break;
}
StructField structField = fieldList.get(i);
if (LOG.isTraceEnabled()) {
LOG.trace("structField[" + i + "] : " + structField);
}
if (structField != null) {
Object fieldValue = structInspector.getStructFieldData(values, structField);
ObjectInspector fieldOI = structField.getFieldObjectInspector();
String fieldName = structField.getFieldName();
if (LOG.isTraceEnabled()) {
LOG.trace("Field " + fieldName + "[" + i + "] : " + fieldValue + ", " + fieldOI);
}
Object value = null;
switch(fieldOI.getCategory()) {
case PRIMITIVE:
value = ((PrimitiveObjectInspector) fieldOI).getPrimitiveJavaObject(fieldValue);
if (LOG.isTraceEnabled()) {
LOG.trace("Field " + fieldName + "[" + i + "] : " + value + "(" + value.getClass() + ")");
}
if (value instanceof HiveDecimal) {
value = ((HiveDecimal) value).bigDecimalValue();
} else if (value instanceof HiveChar) {
value = ((HiveChar) value).getValue().trim();
}
pResultWritable.add(value);
break;
case LIST:
// Not support for arrays in insert statement yet
break;
case STRUCT:
if (dmlType == DmlType.DELETE) {
// When update/delete, First value is struct<transactionid:bigint,
// bucketid:int,rowid:bigint,primaryKey:binary>>
List<Object> fieldValueList = ((StandardStructObjectInspector) fieldOI).getStructFieldsDataAsList(fieldValue);
// convert to map from binary of primary key.
@SuppressWarnings("unchecked") Map<String, Object> primaryKeyMap = (Map<String, Object>) PhoenixStorageHandlerUtil.toMap(((BytesWritable) fieldValueList.get(3)).getBytes());
for (Object pkValue : primaryKeyMap.values()) {
pResultWritable.add(pkValue);
}
}
break;
default:
new SerDeException("Phoenix Unsupported column type: " + fieldOI.getCategory());
}
}
}
return pResultWritable;
}
use of org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector in project hive by apache.
the class SemanticAnalyzer method genUDTFPlan.
private Operator genUDTFPlan(GenericUDTF genericUDTF, String outputTableAlias, ArrayList<String> colAliases, QB qb, Operator input, boolean outerLV) throws SemanticException {
// No GROUP BY / DISTRIBUTE BY / SORT BY / CLUSTER BY
QBParseInfo qbp = qb.getParseInfo();
if (!qbp.getDestToGroupBy().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_NO_GROUP_BY.getMsg());
}
if (!qbp.getDestToDistributeBy().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_NO_DISTRIBUTE_BY.getMsg());
}
if (!qbp.getDestToSortBy().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_NO_SORT_BY.getMsg());
}
if (!qbp.getDestToClusterBy().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_NO_CLUSTER_BY.getMsg());
}
if (!qbp.getAliasToLateralViews().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_LATERAL_VIEW.getMsg());
}
if (LOG.isDebugEnabled()) {
LOG.debug("Table alias: " + outputTableAlias + " Col aliases: " + colAliases);
}
// Use the RowResolver from the input operator to generate a input
// ObjectInspector that can be used to initialize the UDTF. Then, the
// resulting output object inspector can be used to make the RowResolver
// for the UDTF operator
RowResolver selectRR = opParseCtx.get(input).getRowResolver();
ArrayList<ColumnInfo> inputCols = selectRR.getColumnInfos();
// Create the object inspector for the input columns and initialize the UDTF
ArrayList<String> colNames = new ArrayList<String>();
ObjectInspector[] colOIs = new ObjectInspector[inputCols.size()];
for (int i = 0; i < inputCols.size(); i++) {
colNames.add(inputCols.get(i).getInternalName());
colOIs[i] = inputCols.get(i).getObjectInspector();
}
StandardStructObjectInspector rowOI = ObjectInspectorFactory.getStandardStructObjectInspector(colNames, Arrays.asList(colOIs));
StructObjectInspector outputOI = genericUDTF.initialize(rowOI);
int numUdtfCols = outputOI.getAllStructFieldRefs().size();
if (colAliases.isEmpty()) {
// user did not specfied alias names, infer names from outputOI
for (StructField field : outputOI.getAllStructFieldRefs()) {
colAliases.add(field.getFieldName());
}
}
// Make sure that the number of column aliases in the AS clause matches
// the number of columns output by the UDTF
int numSuppliedAliases = colAliases.size();
if (numUdtfCols != numSuppliedAliases) {
throw new SemanticException(ErrorMsg.UDTF_ALIAS_MISMATCH.getMsg("expected " + numUdtfCols + " aliases " + "but got " + numSuppliedAliases));
}
// Generate the output column info's / row resolver using internal names.
ArrayList<ColumnInfo> udtfCols = new ArrayList<ColumnInfo>();
Iterator<String> colAliasesIter = colAliases.iterator();
for (StructField sf : outputOI.getAllStructFieldRefs()) {
String colAlias = colAliasesIter.next();
assert (colAlias != null);
// Since the UDTF operator feeds into a LVJ operator that will rename
// all the internal names, we can just use field name from the UDTF's OI
// as the internal name
ColumnInfo col = new ColumnInfo(sf.getFieldName(), TypeInfoUtils.getTypeInfoFromObjectInspector(sf.getFieldObjectInspector()), outputTableAlias, false);
udtfCols.add(col);
}
// Create the row resolver for this operator from the output columns
RowResolver out_rwsch = new RowResolver();
for (int i = 0; i < udtfCols.size(); i++) {
out_rwsch.put(outputTableAlias, colAliases.get(i), udtfCols.get(i));
}
// Add the UDTFOperator to the operator DAG
Operator<?> udtf = putOpInsertMap(OperatorFactory.getAndMakeChild(new UDTFDesc(genericUDTF, outerLV), new RowSchema(out_rwsch.getColumnInfos()), input), out_rwsch);
return udtf;
}
use of org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector in project metacat by Netflix.
the class HiveTypeConverter method toMetacatType.
@Override
public Type toMetacatType(final String type) {
// Hack to fix presto "varchar" type coming in with no length which is required by Hive.
final TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString("varchar".equals(type.toLowerCase()) ? serdeConstants.STRING_TYPE_NAME : type);
ObjectInspector oi = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfo);
// their original case
if (typeInfo.getCategory().equals(ObjectInspector.Category.STRUCT)) {
final StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
final StandardStructObjectInspector objectInspector = (StandardStructObjectInspector) oi;
oi = new HiveTypeConverter.SameCaseStandardStructObjectInspector(structTypeInfo.getAllStructFieldNames(), objectInspector);
}
return getCanonicalType(oi);
}
Aggregations