use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector in project hive by apache.
the class HBaseRowSerializer method serializeField.
private void serializeField(Object value, StructField field, ColumnMapping colMap, Put put) throws IOException {
if (value == null) {
// a null object, we do not serialize it
return;
}
// Get the field objectInspector and the field object.
ObjectInspector foi = field.getFieldObjectInspector();
// If the field corresponds to a column family in HBase
if (colMap.qualifierName == null) {
MapObjectInspector moi = (MapObjectInspector) foi;
Map<?, ?> map = moi.getMap(value);
if (map == null) {
return;
}
ObjectInspector koi = moi.getMapKeyObjectInspector();
ObjectInspector voi = moi.getMapValueObjectInspector();
for (Map.Entry<?, ?> entry : map.entrySet()) {
// Get the Key
// Map keys are required to be primitive and may be serialized in binary format
byte[] columnQualifierBytes = serialize(entry.getKey(), koi, 3, colMap.binaryStorage.get(0));
if (columnQualifierBytes == null) {
continue;
}
// Map values may be serialized in binary format when they are primitive and binary
// serialization is the option selected
byte[] bytes = serialize(entry.getValue(), voi, 3, colMap.binaryStorage.get(1));
if (bytes == null) {
continue;
}
put.add(colMap.familyNameBytes, columnQualifierBytes, bytes);
}
} else {
byte[] bytes;
// delimited way.
if (!foi.getCategory().equals(ObjectInspector.Category.PRIMITIVE) && colMap.isCategory(ObjectInspector.Category.PRIMITIVE)) {
// we always serialize the String type using the escaped algorithm for LazyString
bytes = serialize(SerDeUtils.getJSONString(value, foi), PrimitiveObjectInspectorFactory.javaStringObjectInspector, 1, false);
} else {
// use the serialization option switch to write primitive values as either a variable
// length UTF8 string or a fixed width bytes if serializing in binary format
bytes = serialize(value, foi, 1, colMap.binaryStorage.get(0));
}
if (bytes == null) {
return;
}
put.add(colMap.familyNameBytes, colMap.qualifierNameBytes, bytes);
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector in project hive by apache.
the class DDLTask method alterTableAlterPart.
/**
* Alter partition column type in a table
*
* @param db
* Database to rename the partition.
* @param alterPartitionDesc
* change partition column type.
* @return Returns 0 when execution succeeds and above 0 if it fails.
* @throws HiveException
*/
private int alterTableAlterPart(Hive db, AlterTableAlterPartDesc alterPartitionDesc) throws HiveException {
Table tbl = db.getTable(alterPartitionDesc.getTableName(), true);
String tabName = alterPartitionDesc.getTableName();
// This is checked by DDLSemanticAnalyzer
assert (tbl.isPartitioned());
List<FieldSchema> newPartitionKeys = new ArrayList<FieldSchema>();
// with a non null value before trying to alter the partition column type.
try {
Set<Partition> partitions = db.getAllPartitionsOf(tbl);
int colIndex = -1;
for (FieldSchema col : tbl.getTTable().getPartitionKeys()) {
colIndex++;
if (col.getName().compareTo(alterPartitionDesc.getPartKeySpec().getName()) == 0) {
break;
}
}
if (colIndex == -1 || colIndex == tbl.getTTable().getPartitionKeys().size()) {
throw new HiveException("Cannot find partition column " + alterPartitionDesc.getPartKeySpec().getName());
}
TypeInfo expectedType = TypeInfoUtils.getTypeInfoFromTypeString(alterPartitionDesc.getPartKeySpec().getType());
ObjectInspector outputOI = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(expectedType);
Converter converter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, outputOI);
// For all the existing partitions, check if the value can be type casted to a non-null object
for (Partition part : partitions) {
if (part.getName().equals(conf.getVar(HiveConf.ConfVars.DEFAULTPARTITIONNAME))) {
continue;
}
try {
String value = part.getValues().get(colIndex);
Object convertedValue = converter.convert(value);
if (convertedValue == null) {
throw new HiveException(" Converting from " + TypeInfoFactory.stringTypeInfo + " to " + expectedType + " for value : " + value + " resulted in NULL object");
}
} catch (Exception e) {
throw new HiveException("Exception while converting " + TypeInfoFactory.stringTypeInfo + " to " + expectedType + " for value : " + part.getValues().get(colIndex));
}
}
} catch (Exception e) {
throw new HiveException("Exception while checking type conversion of existing partition values to " + alterPartitionDesc.getPartKeySpec() + " : " + e.getMessage());
}
for (FieldSchema col : tbl.getTTable().getPartitionKeys()) {
if (col.getName().compareTo(alterPartitionDesc.getPartKeySpec().getName()) == 0) {
newPartitionKeys.add(alterPartitionDesc.getPartKeySpec());
} else {
newPartitionKeys.add(col);
}
}
tbl.getTTable().setPartitionKeys(newPartitionKeys);
try {
db.alterTable(tabName, tbl, null);
} catch (InvalidOperationException e) {
throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "Unable to alter " + tabName);
}
work.getInputs().add(new ReadEntity(tbl));
// We've already locked the table as the input, don't relock it as the output.
addIfAbsentByName(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK));
return 0;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector in project hive by apache.
the class FetchOperator method createPartValue.
private Object[] createPartValue(PartitionDesc partDesc, StructObjectInspector partOI) {
Map<String, String> partSpec = partDesc.getPartSpec();
List<? extends StructField> fields = partOI.getAllStructFieldRefs();
Object[] partValues = new Object[fields.size()];
for (int i = 0; i < partValues.length; i++) {
StructField field = fields.get(i);
String value = partSpec.get(field.getFieldName());
ObjectInspector oi = field.getFieldObjectInspector();
partValues[i] = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi).convert(value);
}
return partValues;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector in project hive by apache.
the class SparkDynamicPartitionPruner method prunePartitionSingleSource.
private void prunePartitionSingleSource(SourceInfo info, MapWork work) throws HiveException {
Set<Object> values = info.values;
String columnName = info.columnName;
ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(info.fieldInspector.getTypeName()));
ObjectInspectorConverters.Converter converter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi);
StructObjectInspector soi = ObjectInspectorFactory.getStandardStructObjectInspector(Collections.singletonList(columnName), Collections.singletonList(oi));
@SuppressWarnings("rawtypes") ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(info.partKey);
eval.initialize(soi);
applyFilterToPartitions(work, converter, eval, columnName, values);
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector in project hive by apache.
the class VectorizedRowBatchCtx method getPartitionValues.
public static void getPartitionValues(VectorizedRowBatchCtx vrbCtx, PartitionDesc partDesc, Object[] partitionValues) {
LinkedHashMap<String, String> partSpec = partDesc.getPartSpec();
for (int i = 0; i < vrbCtx.partitionColumnCount; i++) {
Object objectValue;
if (partSpec == null) {
// For partition-less table, initialize partValue to empty string.
// We can have partition-less table even if we have partition keys
// when there is only only partition selected and the partition key is not
// part of the projection/include list.
objectValue = null;
} else {
String key = vrbCtx.rowColumnNames[vrbCtx.dataColumnCount + i];
// Create a Standard java object Inspector
TypeInfo partColTypeInfo = vrbCtx.rowColumnTypeInfos[vrbCtx.dataColumnCount + i];
ObjectInspector objectInspector = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(partColTypeInfo);
objectValue = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, objectInspector).convert(partSpec.get(key));
if (partColTypeInfo instanceof CharTypeInfo) {
objectValue = ((HiveChar) objectValue).getStrippedValue();
}
}
partitionValues[i] = objectValue;
}
}
Aggregations