use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.
the class HBaseStorageHandler method decomposePredicate.
public static DecomposedPredicate decomposePredicate(JobConf jobConf, HBaseSerDe hBaseSerDe, ExprNodeDesc predicate) {
ColumnMapping keyMapping = hBaseSerDe.getHBaseSerdeParam().getKeyColumnMapping();
ColumnMapping tsMapping = hBaseSerDe.getHBaseSerdeParam().getTimestampColumnMapping();
IndexPredicateAnalyzer analyzer = HiveHBaseTableInputFormat.newIndexPredicateAnalyzer(keyMapping.columnName, keyMapping.isComparable(), tsMapping == null ? null : tsMapping.columnName);
List<IndexSearchCondition> conditions = new ArrayList<IndexSearchCondition>();
ExprNodeGenericFuncDesc pushedPredicate = null;
ExprNodeGenericFuncDesc residualPredicate = (ExprNodeGenericFuncDesc) analyzer.analyzePredicate(predicate, conditions);
for (List<IndexSearchCondition> searchConditions : HiveHBaseInputFormatUtil.decompose(conditions).values()) {
int scSize = searchConditions.size();
if (scSize < 1 || 2 < scSize) {
// Either there was nothing which could be pushed down (size = 0),
// there were complex predicates which we don't support yet.
// Currently supported are one of the form:
// 1. key < 20 (size = 1)
// 2. key = 20 (size = 1)
// 3. key < 20 and key > 10 (size = 2)
// Add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
if (scSize == 2 && (searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqual.class.getName()) || searchConditions.get(1).getComparisonOp().equals(GenericUDFOPEqual.class.getName()))) {
// If one of the predicates is =, then any other predicate with it is illegal.
// Add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
boolean sameType = sameTypeIndexSearchConditions(searchConditions);
if (!sameType) {
// If type for column and constant are different, we currently do not support pushing them
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
TypeInfo typeInfo = searchConditions.get(0).getColumnDesc().getTypeInfo();
if (typeInfo.getCategory() == Category.PRIMITIVE && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) == PrimitiveGrouping.NUMERIC_GROUP) {
// would be returned.
if (scSize == 2) {
boolean lowerBound = false;
boolean upperBound = false;
if (searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqualOrLessThan.class.getName()) || searchConditions.get(0).getComparisonOp().equals(GenericUDFOPLessThan.class.getName())) {
lowerBound = true;
} else {
upperBound = true;
}
if (searchConditions.get(1).getComparisonOp().equals(GenericUDFOPEqualOrGreaterThan.class.getName()) || searchConditions.get(1).getComparisonOp().equals(GenericUDFOPGreaterThan.class.getName())) {
upperBound = true;
} else {
lowerBound = true;
}
if (!upperBound || !lowerBound) {
// Not valid range, add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
} else {
// scSize == 1
if (!searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqual.class.getName())) {
// Not valid range, add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
}
}
// This one can be pushed
pushedPredicate = extractStorageHandlerCondition(analyzer, searchConditions, pushedPredicate);
}
DecomposedPredicate decomposedPredicate = new DecomposedPredicate();
decomposedPredicate.pushedPredicate = pushedPredicate;
decomposedPredicate.residualPredicate = residualPredicate;
return decomposedPredicate;
}
use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.
the class ColumnarStorageBench method getPrimitiveWritable.
private Writable getPrimitiveWritable(final PrimitiveTypeInfo typeInfo) {
Random rand = new Random();
switch(typeInfo.getPrimitiveCategory()) {
case INT:
return new IntWritable(rand.nextInt());
case DOUBLE:
return new DoubleWritable(rand.nextDouble());
case BOOLEAN:
return new BooleanWritable(rand.nextBoolean());
case CHAR:
case VARCHAR:
case STRING:
byte[] b = new byte[30];
rand.nextBytes(b);
return new BytesWritable(b);
default:
throw new IllegalArgumentException("Invalid primitive type: " + typeInfo.getTypeName());
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.
the class HiveOpConverter method genInputSelectForUnion.
private Operator<? extends OperatorDesc> genInputSelectForUnion(Operator<? extends OperatorDesc> origInputOp, ArrayList<ColumnInfo> uColumnInfo) throws SemanticException {
Iterator<ColumnInfo> oIter = origInputOp.getSchema().getSignature().iterator();
Iterator<ColumnInfo> uIter = uColumnInfo.iterator();
List<ExprNodeDesc> columns = new ArrayList<ExprNodeDesc>();
List<String> colName = new ArrayList<String>();
Map<String, ExprNodeDesc> columnExprMap = new HashMap<String, ExprNodeDesc>();
boolean needSelectOp = false;
while (oIter.hasNext()) {
ColumnInfo oInfo = oIter.next();
ColumnInfo uInfo = uIter.next();
if (!oInfo.isSameColumnForRR(uInfo)) {
needSelectOp = true;
}
ExprNodeDesc column = new ExprNodeColumnDesc(oInfo.getType(), oInfo.getInternalName(), oInfo.getTabAlias(), oInfo.getIsVirtualCol(), oInfo.isSkewedCol());
if (!oInfo.getType().equals(uInfo.getType())) {
column = ParseUtils.createConversionCast(column, (PrimitiveTypeInfo) uInfo.getType());
}
columns.add(column);
colName.add(uInfo.getInternalName());
columnExprMap.put(uInfo.getInternalName(), column);
}
if (needSelectOp) {
return OperatorFactory.getAndMakeChild(new SelectDesc(columns, colName), new RowSchema(uColumnInfo), columnExprMap, origInputOp);
} else {
return origInputOp;
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.
the class GenericUDFBaseNwayCompare method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length < 2) {
throw new UDFArgumentLengthException(getFuncName() + " requires at least 2 arguments, got " + arguments.length);
}
if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentException(getFuncName() + " only takes primitive types, got " + arguments[0].getTypeName());
}
argumentOIs = arguments;
converters = new Converter[arguments.length];
TypeInfo commonInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[0]);
for (int i = 1; i < arguments.length; i++) {
PrimitiveTypeInfo currInfo = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[i]);
commonInfo = FunctionRegistry.getCommonClassForComparison(commonInfo, currInfo);
}
resultOI = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo((commonInfo == null) ? TypeInfoFactory.doubleTypeInfo : commonInfo);
for (int i = 0; i < arguments.length; i++) {
converters[i] = ObjectInspectorConverters.getConverter(arguments[i], resultOI);
}
return resultOI;
}
use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.
the class GenericUDFBaseUnary method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentException(opName + " requires one argument.");
}
Category category = arguments[0].getCategory();
if (category != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "The " + GenericUDFUtils.getOrdinal(1) + " argument of " + opName + " is expected to a " + Category.PRIMITIVE.toString().toLowerCase() + " type, but " + category.toString().toLowerCase() + " is found");
}
inputOI = (PrimitiveObjectInspector) arguments[0];
if (!FunctionRegistry.isNumericType(inputOI.getTypeInfo()) && (inputOI.getTypeInfo() != TypeInfoFactory.intervalDayTimeTypeInfo) && (inputOI.getTypeInfo() != TypeInfoFactory.intervalYearMonthTypeInfo)) {
throw new UDFArgumentTypeException(0, "The " + GenericUDFUtils.getOrdinal(1) + " argument of " + opName + " is expected to be a " + "numeric or interval type, but " + inputOI.getTypeName() + " is found");
}
PrimitiveTypeInfo resultTypeInfo = deriveResultTypeInfo(inputOI.getTypeInfo());
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(resultTypeInfo);
converter = ObjectInspectorConverters.getConverter(inputOI, resultOI);
return resultOI;
}
Aggregations