use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class TestKuduPredicateHandler method testMixedPredicates.
@Test
public void testMixedPredicates() throws Exception {
for (ColumnSchema col : SCHEMA.getColumns()) {
// Skip binary columns because binary predicates are not supported. (HIVE-11370)
if (col.getName().equals("null") || col.getName().equals("default") || col.getName().equals("binary")) {
continue;
}
PrimitiveTypeInfo typeInfo = toHiveType(col.getType(), col.getTypeAttributes());
ExprNodeDesc colExpr = new ExprNodeColumnDesc(typeInfo, col.getName(), null, false);
ExprNodeDesc constExpr = new ExprNodeConstantDesc(typeInfo, ROW.getObject(col.getName()));
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(colExpr);
children.add(constExpr);
ExprNodeGenericFuncDesc supportedPredicateExpr = new ExprNodeGenericFuncDesc(typeInfo, new GenericUDFOPEqualOrGreaterThan(), children);
ExprNodeGenericFuncDesc unsupportedPredicateExpr = new ExprNodeGenericFuncDesc(typeInfo, new GenericUDFOPUnsupported(), children);
List<ExprNodeDesc> andChildren = Lists.newArrayList();
andChildren.add(supportedPredicateExpr);
andChildren.add(unsupportedPredicateExpr);
ExprNodeGenericFuncDesc andPredicateExpr = new ExprNodeGenericFuncDesc(typeInfo, new GenericUDFOPAnd(), andChildren);
// Verify KuduPredicateHandler.decompose
HiveStoragePredicateHandler.DecomposedPredicate decompose = KuduPredicateHandler.decompose(andPredicateExpr, SCHEMA);
assertNotNull(decompose);
assertNotNull(decompose.pushedPredicate);
assertNotNull(decompose.residualPredicate);
List<KuduPredicate> predicates = expressionToPredicates(decompose.pushedPredicate);
assertEquals(1, predicates.size());
scanWithPredicates(predicates);
}
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class TestKuduPredicateHandler method testNotComparisonPredicates.
@Test
public void testNotComparisonPredicates() throws Exception {
for (ColumnSchema col : SCHEMA.getColumns()) {
// Skip binary columns because binary predicates are not supported. (HIVE-11370)
if (col.getName().equals("null") || col.getName().equals("default") || col.getName().equals("binary")) {
continue;
}
PrimitiveTypeInfo typeInfo = toHiveType(col.getType(), col.getTypeAttributes());
ExprNodeDesc colExpr = new ExprNodeColumnDesc(typeInfo, col.getName(), null, false);
ExprNodeDesc constExpr = new ExprNodeConstantDesc(typeInfo, ROW.getObject(col.getName()));
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(colExpr);
children.add(constExpr);
for (GenericUDF udf : COMPARISON_UDFS) {
ExprNodeGenericFuncDesc childExpr = new ExprNodeGenericFuncDesc(typeInfo, udf, children);
List<ExprNodeDesc> notChildren = Lists.newArrayList();
notChildren.add(childExpr);
ExprNodeGenericFuncDesc predicateExpr = new ExprNodeGenericFuncDesc(typeInfo, new GenericUDFOPNot(), notChildren);
// Verify KuduPredicateHandler.decompose
HiveStoragePredicateHandler.DecomposedPredicate decompose = KuduPredicateHandler.decompose(predicateExpr, SCHEMA);
// See note in KuduPredicateHandler.newAnalyzer.
assertNull(decompose);
List<KuduPredicate> predicates = expressionToPredicates(predicateExpr);
if (udf instanceof GenericUDFOPEqual) {
// Kudu doesn't support !=.
assertTrue(predicates.isEmpty());
} else {
assertEquals(1, predicates.size());
scanWithPredicates(predicates);
}
}
}
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class MsckAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
if (root.getChildCount() == 0) {
throw new SemanticException("MSCK command must have arguments");
}
ctx.setResFile(ctx.getLocalTmpPath());
boolean repair = root.getChild(0).getType() == HiveParser.KW_REPAIR;
int offset = repair ? 1 : 0;
String tableName = getUnescapedName((ASTNode) root.getChild(0 + offset));
boolean addPartitions = true;
boolean dropPartitions = false;
if (root.getChildCount() > 1 + offset) {
addPartitions = isMsckAddPartition(root.getChild(1 + offset).getType());
dropPartitions = isMsckDropPartition(root.getChild(1 + offset).getType());
}
Table table = getTable(tableName);
Map<Integer, List<ExprNodeGenericFuncDesc>> partitionSpecs = ParseUtils.getFullPartitionSpecs(root, table, conf, false);
byte[] filterExp = null;
if (partitionSpecs != null & !partitionSpecs.isEmpty()) {
// expression proxy class needs to be PartitionExpressionForMetastore since we intend to use the
// filterPartitionsByExpr of PartitionExpressionForMetastore for partition pruning down the line.
// Bail out early if expressionProxyClass is not configured properly.
String expressionProxyClass = conf.get(MetastoreConf.ConfVars.EXPRESSION_PROXY_CLASS.getVarname());
if (!PartitionExpressionForMetastore.class.getCanonicalName().equals(expressionProxyClass)) {
throw new SemanticException("Invalid expression proxy class. The config metastore.expression.proxy needs " + "to be set to org.apache.hadoop.hive.ql.optimizer.ppr.PartitionExpressionForMetastore");
}
// fetch the first value of partitionSpecs map since it will always have one key, value pair
filterExp = SerializationUtilities.serializeExpressionToKryo((ExprNodeGenericFuncDesc) ((List) partitionSpecs.values().toArray()[0]).get(0));
}
if (repair && AcidUtils.isTransactionalTable(table)) {
outputs.add(new WriteEntity(table, WriteType.DDL_EXCLUSIVE));
} else {
outputs.add(new WriteEntity(table, WriteEntity.WriteType.DDL_SHARED));
}
MsckDesc desc = new MsckDesc(tableName, filterExp, ctx.getResFile(), repair, addPartitions, dropPartitions);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class ShowPartitionAnalyzer method replaceDefaultPartNameAndCastType.
private ExprNodeDesc replaceDefaultPartNameAndCastType(ExprNodeDesc nodeDesc, Map<String, String> colTypes, String defaultPartName) throws SemanticException {
if (!(nodeDesc instanceof ExprNodeGenericFuncDesc)) {
return nodeDesc;
}
ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) nodeDesc;
if (FunctionRegistry.isOpAnd(funcDesc) || FunctionRegistry.isOpOr(funcDesc)) {
List<ExprNodeDesc> newChildren = new ArrayList<ExprNodeDesc>();
for (ExprNodeDesc child : funcDesc.getChildren()) {
newChildren.add(replaceDefaultPartNameAndCastType(child, colTypes, defaultPartName));
}
funcDesc.setChildren(newChildren);
return funcDesc;
}
List<ExprNodeDesc> children = funcDesc.getChildren();
int colIdx = -1, constIdx = -1;
for (int i = 0; i < children.size(); i++) {
ExprNodeDesc child = children.get(i);
if (child instanceof ExprNodeColumnDesc) {
String col = ((ExprNodeColumnDesc) child).getColumn().toLowerCase();
String type = colTypes.get(col);
if (!type.equals(child.getTypeString())) {
child.setTypeInfo(TypeInfoFactory.getPrimitiveTypeInfo(type));
}
colIdx = i;
} else if (child instanceof ExprNodeConstantDesc) {
constIdx = i;
}
}
if (funcDesc.getGenericUDF() instanceof GenericUDFBaseCompare && children.size() == 2 && colIdx > -1 && constIdx > -1) {
ExprNodeConstantDesc constantDesc = (ExprNodeConstantDesc) children.get(constIdx);
ExprNodeColumnDesc columnDesc = (ExprNodeColumnDesc) children.get(colIdx);
Object val = constantDesc.getValue();
boolean isDefaultPartitionName = defaultPartName.equals(val);
String type = colTypes.get(columnDesc.getColumn().toLowerCase());
PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type);
if (!isDefaultPartitionName) {
if (!constantDesc.getTypeString().equals(type)) {
Object converted = ObjectInspectorConverters.getConverter(TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(constantDesc.getTypeInfo()), TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(pti)).convert(val);
if (converted == null) {
throw new SemanticException("Cannot convert to " + type + " from " + constantDesc.getTypeString() + ", value: " + val);
}
ExprNodeConstantDesc newConstantDesc = new ExprNodeConstantDesc(pti, converted);
children.set(constIdx, newConstantDesc);
}
} else {
GenericUDF originalOp = funcDesc.getGenericUDF();
String fnName;
if (FunctionRegistry.isEq(originalOp)) {
fnName = "isnull";
} else if (FunctionRegistry.isNeq(originalOp)) {
fnName = "isnotnull";
} else {
throw new SemanticException("Only '=' and '!=' are allowed for the default partition, function: " + originalOp.getUdfName());
}
funcDesc = PartitionUtils.makeUnaryPredicate(fnName, columnDesc);
}
}
return funcDesc;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class VectorSelectOperatorBench method setup.
@Setup
public void setup(Blackhole bh) throws HiveException {
HiveConf hconf = new HiveConf();
List<String> columns = new ArrayList<String>();
columns.add("a");
columns.add("b");
columns.add("c");
VectorizationContext vc = new VectorizationContext("name", columns);
selDesc = new SelectDesc(false);
List<ExprNodeDesc> colList = new ArrayList<ExprNodeDesc>();
ExprNodeColumnDesc colDesc1 = new ExprNodeColumnDesc(Long.class, "a", "table", false);
ExprNodeColumnDesc colDesc2 = new ExprNodeColumnDesc(Long.class, "b", "table", false);
ExprNodeColumnDesc colDesc3 = new ExprNodeColumnDesc(Long.class, "c", "table", false);
ExprNodeGenericFuncDesc plusDesc = new ExprNodeGenericFuncDesc();
GenericUDF gudf = new GenericUDFOPPlus();
plusDesc.setGenericUDF(gudf);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(colDesc1);
children.add(colDesc2);
plusDesc.setChildren(children);
plusDesc.setTypeInfo(TypeInfoFactory.longTypeInfo);
colList.add(plusDesc);
colList.add(colDesc3);
selDesc.setColList(colList);
List<String> outputColNames = new ArrayList<String>();
outputColNames.add("_col0");
outputColNames.add("_col1");
selDesc.setOutputColumnNames(outputColNames);
VectorSelectDesc vectorSelectDesc = new VectorSelectDesc();
selDesc.setVectorDesc(vectorSelectDesc);
List<ExprNodeDesc> selectColList = selDesc.getColList();
VectorExpression[] vectorSelectExprs = new VectorExpression[selectColList.size()];
for (int i = 0; i < selectColList.size(); i++) {
ExprNodeDesc expr = selectColList.get(i);
VectorExpression ve = vc.getVectorExpression(expr);
vectorSelectExprs[i] = ve;
}
vectorSelectDesc.setSelectExpressions(vectorSelectExprs);
vectorSelectDesc.setProjectedOutputColumns(new int[] { 3, 2 });
CompilationOpContext opContext = new CompilationOpContext();
vso = new VectorSelectOperator(opContext, selDesc, vc, vectorSelectDesc);
// to trigger vectorForward
child = new ArrayList<>();
child.add(new BlackholeOperator(opContext, bh));
child.add(new BlackholeOperator(opContext, bh));
vso.initialize(hconf, null);
vrg = VectorizedRowGroupGenUtil.getVectorizedRowBatch(VectorizedRowBatch.DEFAULT_SIZE, 4, 17);
}
Aggregations