use of org.apache.hadoop.hive.ql.parse.SemanticException in project hive by apache.
the class HiveOpConverter method genReduceSinkAndBacktrackSelect.
private static SelectOperator genReduceSinkAndBacktrackSelect(Operator<?> input, ExprNodeDesc[] keys, int tag, ArrayList<ExprNodeDesc> partitionCols, String order, String nullOrder, int numReducers, Operation acidOperation, HiveConf hiveConf, List<String> keepColNames) throws SemanticException {
// 1. Generate RS operator
// 1.1 Prune the tableNames, only count the tableNames that are not empty strings
// as empty string in table aliases is only allowed for virtual columns.
String tableAlias = null;
Set<String> tableNames = input.getSchema().getTableNames();
for (String tableName : tableNames) {
if (tableName != null) {
if (tableName.length() == 0) {
if (tableAlias == null) {
tableAlias = tableName;
}
} else {
if (tableAlias == null || tableAlias.length() == 0) {
tableAlias = tableName;
} else {
if (!tableName.equals(tableAlias)) {
throw new SemanticException("In CBO return path, genReduceSinkAndBacktrackSelect is expecting only one tableAlias but there is more than one");
}
}
}
}
}
if (tableAlias == null) {
throw new SemanticException("In CBO return path, genReduceSinkAndBacktrackSelect is expecting only one tableAlias but there is none");
}
// 1.2 Now generate RS operator
ReduceSinkOperator rsOp = genReduceSink(input, tableAlias, keys, tag, partitionCols, order, nullOrder, numReducers, acidOperation, hiveConf);
// 2. Generate backtrack Select operator
Map<String, ExprNodeDesc> descriptors = buildBacktrackFromReduceSink(keepColNames, rsOp.getConf().getOutputKeyColumnNames(), rsOp.getConf().getOutputValueColumnNames(), rsOp.getValueIndex(), input);
SelectDesc selectDesc = new SelectDesc(new ArrayList<ExprNodeDesc>(descriptors.values()), new ArrayList<String>(descriptors.keySet()));
ArrayList<ColumnInfo> cinfoLst = createColInfosSubset(input, keepColNames);
SelectOperator selectOp = (SelectOperator) OperatorFactory.getAndMakeChild(selectDesc, new RowSchema(cinfoLst), rsOp);
selectOp.setColumnExprMap(descriptors);
if (LOG.isDebugEnabled()) {
LOG.debug("Generated " + selectOp + " with row schema: [" + selectOp.getSchema() + "]");
}
return selectOp;
}
use of org.apache.hadoop.hive.ql.parse.SemanticException in project hive by apache.
the class ListBucketingPrunerUtils method evaluateOrNode.
private static Boolean evaluateOrNode(final ExprNodeDesc node, final List<String> skewedCols, final List<String> cell, final List<List<String>> uniqSkewedValues) throws SemanticException {
List<ExprNodeDesc> children = ((ExprNodeGenericFuncDesc) node).getChildren();
if ((children == null) || (children.size() != 2)) {
throw new SemanticException("GenericUDFOPOr should have 2 ExprNodeDesc. Node name : " + node.getName());
}
ExprNodeDesc left = children.get(0);
ExprNodeDesc right = children.get(1);
return orBoolOperand(recursiveExpr(left, skewedCols, cell, uniqSkewedValues), recursiveExpr(right, skewedCols, cell, uniqSkewedValues));
}
use of org.apache.hadoop.hive.ql.parse.SemanticException in project hive by apache.
the class ListBucketingPrunerUtils method evaluateAndNode.
private static Boolean evaluateAndNode(final ExprNodeDesc node, final List<String> skewedCols, final List<String> cell, final List<List<String>> uniqSkewedValues) throws SemanticException {
List<ExprNodeDesc> children = ((ExprNodeGenericFuncDesc) node).getChildren();
if ((children == null) || (children.size() != 2)) {
throw new SemanticException("GenericUDFOPAnd should have 2 ExprNodeDesc. Node name : " + node.getName());
}
ExprNodeDesc left = children.get(0);
ExprNodeDesc right = children.get(1);
return andBoolOperand(recursiveExpr(left, skewedCols, cell, uniqSkewedValues), recursiveExpr(right, skewedCols, cell, uniqSkewedValues));
}
use of org.apache.hadoop.hive.ql.parse.SemanticException in project hive by apache.
the class ListBucketingPrunerUtils method evaluateNotNode.
private static Boolean evaluateNotNode(final ExprNodeDesc node, final List<String> skewedCols, final List<String> cell, final List<List<String>> uniqSkewedValues) throws SemanticException {
List<ExprNodeDesc> children = ((ExprNodeGenericFuncDesc) node).getChildren();
if ((children == null) || (children.size() != 1)) {
throw new SemanticException("GenericUDFOPNot should have 1 ExprNodeDesc. Node name : " + node.getName());
}
ExprNodeDesc child = children.get(0);
return notBoolOperand(recursiveExpr(child, skewedCols, cell, uniqSkewedValues));
}
use of org.apache.hadoop.hive.ql.parse.SemanticException in project hive by apache.
the class AbstractJoinTaskDispatcher method dispatch.
@Override
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs) throws SemanticException {
if (nodeOutputs == null || nodeOutputs.length == 0) {
throw new SemanticException("No Dispatch Context");
}
TaskGraphWalkerContext walkerCtx = (TaskGraphWalkerContext) nodeOutputs[0];
Task<? extends Serializable> currTask = (Task<? extends Serializable>) nd;
// not map reduce task or not conditional task, just skip
if (currTask.isMapRedTask()) {
if (currTask instanceof ConditionalTask) {
// get the list of task
List<Task<? extends Serializable>> taskList = ((ConditionalTask) currTask).getListTasks();
for (Task<? extends Serializable> tsk : taskList) {
if (tsk.isMapRedTask()) {
Task<? extends Serializable> newTask = this.processCurrentTask((MapRedTask) tsk, ((ConditionalTask) currTask), physicalContext.getContext());
walkerCtx.addToDispatchList(newTask);
}
}
} else {
Task<? extends Serializable> newTask = this.processCurrentTask((MapRedTask) currTask, null, physicalContext.getContext());
walkerCtx.addToDispatchList(newTask);
}
}
return null;
}
Aggregations