use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.
the class PartExprEvalUtils method prepareExpr.
public static synchronized ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> prepareExpr(ExprNodeGenericFuncDesc expr, List<String> partColumnNames, List<PrimitiveTypeInfo> partColumnTypeInfos) throws HiveException {
// Create the row object
List<ObjectInspector> partObjectInspectors = new ArrayList<ObjectInspector>();
for (int i = 0; i < partColumnNames.size(); i++) {
partObjectInspectors.add(PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(partColumnTypeInfos.get(i)));
}
StructObjectInspector objectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partColumnNames, partObjectInspectors);
ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(expr);
ObjectInspector evaluateResultOI = evaluator.initialize(objectInspector);
return ObjectPair.create((PrimitiveObjectInspector) evaluateResultOI, evaluator);
}
use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.
the class GenericUDFUtils method deriveInType.
// Based on update() above.
public static TypeInfo deriveInType(List<ExprNodeDesc> children) {
TypeInfo returnType = null;
for (ExprNodeDesc node : children) {
TypeInfo ti = node.getTypeInfo();
if (ti.getCategory() == Category.PRIMITIVE && ((PrimitiveTypeInfo) ti).getPrimitiveCategory() == PrimitiveCategory.VOID) {
continue;
}
if (returnType == null) {
returnType = ti;
continue;
}
if (returnType == ti)
continue;
TypeInfo commonTypeInfo = FunctionRegistry.getCommonClass(returnType, ti);
if (commonTypeInfo == null)
return null;
returnType = updateCommonTypeForDecimal(commonTypeInfo, ti, returnType);
}
return returnType;
}
use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.
the class TypedBytesSerDe method deserializeField.
static Object deserializeField(TypedBytesWritableInput in, TypeInfo type, Object reuse) throws IOException {
// read the type
Class<? extends Writable> writableType = in.readType();
if (writableType != null && writableType.isAssignableFrom(NullWritable.class)) {
// indicates that the recorded value is null
return null;
}
switch(type.getCategory()) {
case PRIMITIVE:
{
PrimitiveTypeInfo ptype = (PrimitiveTypeInfo) type;
switch(ptype.getPrimitiveCategory()) {
case VOID:
{
return null;
}
case BOOLEAN:
{
BooleanWritable r = reuse == null ? new BooleanWritable() : (BooleanWritable) reuse;
r = in.readBoolean(r);
return r;
}
case BYTE:
{
ByteWritable r = reuse == null ? new ByteWritable() : (ByteWritable) reuse;
r = in.readByte(r);
return r;
}
case SHORT:
{
ShortWritable r = reuse == null ? new ShortWritable() : (ShortWritable) reuse;
r = in.readShort(r);
return r;
}
case INT:
{
IntWritable r = reuse == null ? new IntWritable() : (IntWritable) reuse;
r = in.readInt(r);
return r;
}
case LONG:
{
LongWritable r = reuse == null ? new LongWritable() : (LongWritable) reuse;
r = in.readLong(r);
return r;
}
case FLOAT:
{
FloatWritable r = reuse == null ? new FloatWritable() : (FloatWritable) reuse;
r = in.readFloat(r);
return r;
}
case DOUBLE:
{
DoubleWritable r = reuse == null ? new DoubleWritable() : (DoubleWritable) reuse;
r = in.readDouble(r);
return r;
}
case STRING:
{
Text r = reuse == null ? new Text() : (Text) reuse;
r = in.readText(r);
return r;
}
default:
{
throw new RuntimeException("Unrecognized type: " + ptype.getPrimitiveCategory());
}
}
}
// Currently, deserialization of complex types is not supported
case LIST:
case MAP:
case STRUCT:
default:
{
throw new RuntimeException("Unsupported category: " + type.getCategory());
}
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.
the class TestDruidSerDe method serializeObject.
private static void serializeObject(Properties properties, DruidSerDe serDe, Object[] rowObject, DruidWritable druidWritable) throws SerDeException {
// Build OI with timestamp granularity column
final List<String> columnNames = new ArrayList<>();
final List<PrimitiveTypeInfo> columnTypes = new ArrayList<>();
List<ObjectInspector> inspectors = new ArrayList<>();
columnNames.addAll(Utilities.getColumnNames(properties));
columnNames.add(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME);
columnTypes.addAll(Lists.transform(Utilities.getColumnTypes(properties), new Function<String, PrimitiveTypeInfo>() {
@Override
public PrimitiveTypeInfo apply(String type) {
return TypeInfoFactory.getPrimitiveTypeInfo(type);
}
}));
columnTypes.add(TypeInfoFactory.getPrimitiveTypeInfo("timestamp"));
inspectors.addAll(Lists.transform(columnTypes, new Function<PrimitiveTypeInfo, ObjectInspector>() {
@Override
public ObjectInspector apply(PrimitiveTypeInfo type) {
return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type);
}
}));
ObjectInspector inspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, inspectors);
// Serialize
DruidWritable writable = (DruidWritable) serDe.serialize(rowObject, inspector);
// Check result
assertEquals(DRUID_WRITABLE.getValue().size(), writable.getValue().size());
for (Entry<String, Object> e : DRUID_WRITABLE.getValue().entrySet()) {
assertEquals(e.getValue(), writable.getValue().get(e.getKey()));
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.
the class HBaseStorageHandler method decomposePredicate.
public static DecomposedPredicate decomposePredicate(JobConf jobConf, HBaseSerDe hBaseSerDe, ExprNodeDesc predicate) {
ColumnMapping keyMapping = hBaseSerDe.getHBaseSerdeParam().getKeyColumnMapping();
ColumnMapping tsMapping = hBaseSerDe.getHBaseSerdeParam().getTimestampColumnMapping();
IndexPredicateAnalyzer analyzer = HiveHBaseTableInputFormat.newIndexPredicateAnalyzer(keyMapping.columnName, keyMapping.isComparable(), tsMapping == null ? null : tsMapping.columnName);
List<IndexSearchCondition> conditions = new ArrayList<IndexSearchCondition>();
ExprNodeGenericFuncDesc pushedPredicate = null;
ExprNodeGenericFuncDesc residualPredicate = (ExprNodeGenericFuncDesc) analyzer.analyzePredicate(predicate, conditions);
for (List<IndexSearchCondition> searchConditions : HiveHBaseInputFormatUtil.decompose(conditions).values()) {
int scSize = searchConditions.size();
if (scSize < 1 || 2 < scSize) {
// Either there was nothing which could be pushed down (size = 0),
// there were complex predicates which we don't support yet.
// Currently supported are one of the form:
// 1. key < 20 (size = 1)
// 2. key = 20 (size = 1)
// 3. key < 20 and key > 10 (size = 2)
// Add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
if (scSize == 2 && (searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqual.class.getName()) || searchConditions.get(1).getComparisonOp().equals(GenericUDFOPEqual.class.getName()))) {
// If one of the predicates is =, then any other predicate with it is illegal.
// Add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
boolean sameType = sameTypeIndexSearchConditions(searchConditions);
if (!sameType) {
// If type for column and constant are different, we currently do not support pushing them
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
TypeInfo typeInfo = searchConditions.get(0).getColumnDesc().getTypeInfo();
if (typeInfo.getCategory() == Category.PRIMITIVE && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) == PrimitiveGrouping.NUMERIC_GROUP) {
// would be returned.
if (scSize == 2) {
boolean lowerBound = false;
boolean upperBound = false;
if (searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqualOrLessThan.class.getName()) || searchConditions.get(0).getComparisonOp().equals(GenericUDFOPLessThan.class.getName())) {
lowerBound = true;
} else {
upperBound = true;
}
if (searchConditions.get(1).getComparisonOp().equals(GenericUDFOPEqualOrGreaterThan.class.getName()) || searchConditions.get(1).getComparisonOp().equals(GenericUDFOPGreaterThan.class.getName())) {
upperBound = true;
} else {
lowerBound = true;
}
if (!upperBound || !lowerBound) {
// Not valid range, add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
} else {
// scSize == 1
if (!searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqual.class.getName())) {
// Not valid range, add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
}
}
// This one can be pushed
pushedPredicate = extractStorageHandlerCondition(analyzer, searchConditions, pushedPredicate);
}
DecomposedPredicate decomposedPredicate = new DecomposedPredicate();
decomposedPredicate.pushedPredicate = pushedPredicate;
decomposedPredicate.residualPredicate = residualPredicate;
return decomposedPredicate;
}
Aggregations