use of org.apache.hadoop.hive.ql.parse.SemanticException in project hive by apache.
the class ListBucketingPrunerUtils method evaluateOrNode.
private static Boolean evaluateOrNode(final ExprNodeDesc node, final List<String> skewedCols, final List<String> cell, final List<List<String>> uniqSkewedValues) throws SemanticException {
List<ExprNodeDesc> children = ((ExprNodeGenericFuncDesc) node).getChildren();
if ((children == null) || (children.size() != 2)) {
throw new SemanticException("GenericUDFOPOr should have 2 ExprNodeDesc. Node name : " + node.getName());
}
ExprNodeDesc left = children.get(0);
ExprNodeDesc right = children.get(1);
return orBoolOperand(recursiveExpr(left, skewedCols, cell, uniqSkewedValues), recursiveExpr(right, skewedCols, cell, uniqSkewedValues));
}
use of org.apache.hadoop.hive.ql.parse.SemanticException in project hive by apache.
the class ListBucketingPrunerUtils method evaluateAndNode.
private static Boolean evaluateAndNode(final ExprNodeDesc node, final List<String> skewedCols, final List<String> cell, final List<List<String>> uniqSkewedValues) throws SemanticException {
List<ExprNodeDesc> children = ((ExprNodeGenericFuncDesc) node).getChildren();
if ((children == null) || (children.size() != 2)) {
throw new SemanticException("GenericUDFOPAnd should have 2 ExprNodeDesc. Node name : " + node.getName());
}
ExprNodeDesc left = children.get(0);
ExprNodeDesc right = children.get(1);
return andBoolOperand(recursiveExpr(left, skewedCols, cell, uniqSkewedValues), recursiveExpr(right, skewedCols, cell, uniqSkewedValues));
}
use of org.apache.hadoop.hive.ql.parse.SemanticException in project hive by apache.
the class ListBucketingPrunerUtils method evaluateNotNode.
private static Boolean evaluateNotNode(final ExprNodeDesc node, final List<String> skewedCols, final List<String> cell, final List<List<String>> uniqSkewedValues) throws SemanticException {
List<ExprNodeDesc> children = ((ExprNodeGenericFuncDesc) node).getChildren();
if ((children == null) || (children.size() != 1)) {
throw new SemanticException("GenericUDFOPNot should have 1 ExprNodeDesc. Node name : " + node.getName());
}
ExprNodeDesc child = children.get(0);
return notBoolOperand(recursiveExpr(child, skewedCols, cell, uniqSkewedValues));
}
use of org.apache.hadoop.hive.ql.parse.SemanticException in project hive by apache.
the class AbstractJoinTaskDispatcher method dispatch.
@Override
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs) throws SemanticException {
if (nodeOutputs == null || nodeOutputs.length == 0) {
throw new SemanticException("No Dispatch Context");
}
TaskGraphWalkerContext walkerCtx = (TaskGraphWalkerContext) nodeOutputs[0];
Task<? extends Serializable> currTask = (Task<? extends Serializable>) nd;
// not map reduce task or not conditional task, just skip
if (currTask.isMapRedTask()) {
if (currTask instanceof ConditionalTask) {
// get the list of task
List<Task<? extends Serializable>> taskList = ((ConditionalTask) currTask).getListTasks();
for (Task<? extends Serializable> tsk : taskList) {
if (tsk.isMapRedTask()) {
Task<? extends Serializable> newTask = this.processCurrentTask((MapRedTask) tsk, ((ConditionalTask) currTask), physicalContext.getContext());
walkerCtx.addToDispatchList(newTask);
}
}
} else {
Task<? extends Serializable> newTask = this.processCurrentTask((MapRedTask) currTask, null, physicalContext.getContext());
walkerCtx.addToDispatchList(newTask);
}
}
return null;
}
use of org.apache.hadoop.hive.ql.parse.SemanticException in project hive by apache.
the class AbstractJoinTaskDispatcher method getTotalKnownInputSize.
public long getTotalKnownInputSize(Context context, MapWork currWork, Map<Path, ArrayList<String>> pathToAliases, HashMap<String, Long> aliasToSize) throws SemanticException {
try {
// go over all the input paths, and calculate a known total size, known
// size for each input alias.
Utilities.getInputSummary(context, currWork, null).getLength();
// set alias to size mapping, this can be used to determine if one table
// is chosen as big table, what's the total size of left tables, which
// are going to be small tables.
long aliasTotalKnownInputSize = 0L;
for (Map.Entry<Path, ArrayList<String>> entry : pathToAliases.entrySet()) {
Path path = entry.getKey();
List<String> aliasList = entry.getValue();
ContentSummary cs = context.getCS(path);
if (cs != null) {
long size = cs.getLength();
for (String alias : aliasList) {
aliasTotalKnownInputSize += size;
Long es = aliasToSize.get(alias);
if (es == null) {
es = new Long(0);
}
es += size;
aliasToSize.put(alias, es);
}
}
}
return aliasTotalKnownInputSize;
} catch (Exception e) {
e.printStackTrace();
throw new SemanticException("Generate Map Join Task Error: " + e.getMessage());
}
}
Aggregations