use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class ExprNodeConverter method getPSpec.
private PartitioningSpec getPSpec(RexWindow window) {
PartitioningSpec partitioning = new PartitioningSpec();
Schema schema = new Schema(tabAlias, inputRowType.getFieldList());
if (window.partitionKeys != null && !window.partitionKeys.isEmpty()) {
PartitionSpec pSpec = new PartitionSpec();
for (RexNode pk : window.partitionKeys) {
PartitionExpression exprSpec = new PartitionExpression();
ASTNode astNode = pk.accept(new RexVisitor(schema));
exprSpec.setExpression(astNode);
pSpec.addExpression(exprSpec);
}
partitioning.setPartSpec(pSpec);
}
if (window.orderKeys != null && !window.orderKeys.isEmpty()) {
OrderSpec oSpec = new OrderSpec();
for (RexFieldCollation ok : window.orderKeys) {
OrderExpression exprSpec = new OrderExpression();
Order order = ok.getDirection() == RelFieldCollation.Direction.ASCENDING ? Order.ASC : Order.DESC;
NullOrder nullOrder;
if (ok.right.contains(SqlKind.NULLS_FIRST)) {
nullOrder = NullOrder.NULLS_FIRST;
} else if (ok.right.contains(SqlKind.NULLS_LAST)) {
nullOrder = NullOrder.NULLS_LAST;
} else {
// Default
nullOrder = ok.getDirection() == RelFieldCollation.Direction.ASCENDING ? NullOrder.NULLS_FIRST : NullOrder.NULLS_LAST;
}
exprSpec.setOrder(order);
exprSpec.setNullOrder(nullOrder);
ASTNode astNode = ok.left.accept(new RexVisitor(schema));
exprSpec.setExpression(astNode);
oSpec.addExpression(exprSpec);
}
partitioning.setOrderSpec(oSpec);
}
return partitioning;
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class TestExprProcessorGetFuncExpr method testLookupFunctionOnDemand.
@Test
public void testLookupFunctionOnDemand() throws Exception {
TypeCheckProcFactory.DefaultExprProcessor defaultExprProcessor = ExprNodeTypeCheck.getExprNodeDefaultExprProcessor();
ASTNode funcExpr = new ASTNode(new CommonToken(HiveParser.TOK_FUNCTION, "TOK_FUNCTION"));
funcExpr.addChild(new ASTNode(new CommonToken(HiveParser.Identifier, "myupper")));
funcExpr.addChild(new ASTNode(new CommonToken(HiveParser.StringLiteral, "test")));
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "test"));
// getXpathOrFuncExprNodeDesc cannot access from outside package
ExprNodeDesc exprNodeDesc = (ExprNodeDesc) defaultExprProcessor.getXpathOrFuncExprNodeDesc(funcExpr, true, children, new TypeCheckCtx(null));
Assert.assertNotNull(exprNodeDesc);
Assert.assertNotNull(((ExprNodeGenericFuncDesc) exprNodeDesc).getGenericUDF());
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class Context method getDestNamePrefix.
/**
* The suffix is always relative to a given ASTNode.
* We need this so that FileSinkOperatorS corresponding to different branches of a multi-insert
* statement which represents a SQL Merge statement get marked correctly with
* {@link org.apache.hadoop.hive.ql.io.AcidUtils.Operation}. See usages
* of {@link #getDestNamePrefix(ASTNode, QB)} and
* {@link org.apache.hadoop.hive.ql.parse.SemanticAnalyzer#updating(String)} and
* {@link org.apache.hadoop.hive.ql.parse.SemanticAnalyzer#deleting(String)}.
*/
public DestClausePrefix getDestNamePrefix(ASTNode curNode, QB queryBlock) {
assert curNode != null : "must supply curNode";
if (queryBlock.isInsideView() || queryBlock.getParseInfo().getIsSubQ()) {
/**
* Views get inlined in the logical plan but not in the AST
* {@link org.apache.hadoop.hive.ql.parse.SemanticAnalyzer#replaceViewReferenceWithDefinition(QB, Table, String, String)}
* Since here we only care to identify clauses representing Update/Delete which are not
* possible inside a view/subquery, we can immediately return the default {@link DestClausePrefix.INSERT}
*/
return DestClausePrefix.INSERT;
}
if (curNode.getType() != HiveParser.TOK_INSERT_INTO) {
// select statement
assert curNode.getType() == HiveParser.TOK_DESTINATION;
if (operation == Operation.OTHER) {
// not an 'interesting' op
return DestClausePrefix.INSERT;
}
// if it is an 'interesting' op but it's a select it must be a sub-query or a derived table
// it doesn't require a special Acid code path - the reset of the code here is to ensure
// the tree structure is what we expect
boolean thisIsInASubquery = false;
parentLoop: while (curNode.getParent() != null) {
curNode = (ASTNode) curNode.getParent();
switch(curNode.getType()) {
case HiveParser.TOK_SUBQUERY_EXPR:
// this is a real subquery (foo IN (select ...))
case HiveParser.TOK_SUBQUERY:
// strictly speaking SetOps should have a TOK_SUBQUERY parent so next 6 items are redundant
case HiveParser.TOK_UNIONALL:
case HiveParser.TOK_UNIONDISTINCT:
case HiveParser.TOK_EXCEPTALL:
case HiveParser.TOK_EXCEPTDISTINCT:
case HiveParser.TOK_INTERSECTALL:
case HiveParser.TOK_INTERSECTDISTINCT:
thisIsInASubquery = true;
break parentLoop;
}
}
if (!thisIsInASubquery) {
throw new IllegalStateException("Expected '" + getMatchedText(curNode) + "' to be in sub-query or set operation.");
}
return DestClausePrefix.INSERT;
}
switch(operation) {
case OTHER:
return DestClausePrefix.INSERT;
case UPDATE:
return DestClausePrefix.UPDATE;
case DELETE:
return DestClausePrefix.DELETE;
case MERGE:
/* This is the structure expected here
HiveParser.TOK_QUERY;
HiveParser.TOK_FROM
HiveParser.TOK_INSERT;
HiveParser.TOK_INSERT_INTO;
HiveParser.TOK_INSERT;
HiveParser.TOK_INSERT_INTO;
.....*/
ASTNode insert = (ASTNode) curNode.getParent();
assert insert != null && insert.getType() == HiveParser.TOK_INSERT;
ASTNode query = (ASTNode) insert.getParent();
assert query != null && query.getType() == HiveParser.TOK_QUERY;
for (int childIdx = 1; childIdx < query.getChildCount(); childIdx++) {
// 1st child is TOK_FROM
assert query.getChild(childIdx).getType() == HiveParser.TOK_INSERT;
if (insert == query.getChild(childIdx)) {
DestClausePrefix prefix = insertBranchToNamePrefix.get(childIdx);
if (prefix == null) {
throw new IllegalStateException("Found a node w/o branch mapping: '" + getMatchedText(insert) + "'");
}
return prefix;
}
}
throw new IllegalStateException("Could not locate '" + getMatchedText(insert) + "'");
default:
throw new IllegalStateException("Unexpected operation: " + operation);
}
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class TestTransactionStatement method testTxnCommitRollback.
@Test
public void testTxnCommitRollback() throws ParseException {
ASTNode ast = parse("COMMIT");
Assert.assertEquals("AST doesn't match", "tok_commit", ast.toStringTree());
ast = parse("COMMIT WORK");
Assert.assertEquals("AST doesn't match", "tok_commit", ast.toStringTree());
ast = parse("ROLLBACK");
Assert.assertEquals("AST doesn't match", "tok_rollback", ast.toStringTree());
ast = parse("ROLLBACK WORK");
Assert.assertEquals("AST doesn't match", "tok_rollback", ast.toStringTree());
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class TestTransactionStatement method testAutoCommit.
@Test
public void testAutoCommit() throws ParseException {
ASTNode ast = parse("SET AUTOCOMMIT TRUE");
Assert.assertEquals("AST doesn't match", "(tok_set_autocommit tok_true)", ast.toStringTree());
ast = parse("SET AUTOCOMMIT FALSE");
Assert.assertEquals("AST doesn't match", "(tok_set_autocommit tok_false)", ast.toStringTree());
}
Aggregations