use of org.apache.hadoop.hive.ql.parse.ASTNode in project SQLWindowing by hbutani.
the class TranslateUtils method buildASTNode.
public static ASTNode buildASTNode(String colName) {
TreeWizard tw = new TreeWizard(adaptor, Windowing2Parser.tokenNames);
Object o = tw.create(sprintf("(TABLEORCOL Identifier[%s])", colName));
return (ASTNode) o;
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project SQLWindowing by hbutani.
the class WhereTranslation method translate.
public static void translate(QueryDef qDef) throws WindowingException {
QueryTranslationInfo tInfo = qDef.getTranslationInfo();
QuerySpec spec = qDef.getSpec();
ASTNode wExpr = spec.getWhereExpr();
if (wExpr == null)
return;
WhereDef whDef = new WhereDef();
whDef.setExpression(wExpr);
QueryInputDef iDef = qDef.getInput();
InputInfo iInfo = tInfo.getInputInfo(iDef);
ExprNodeDesc exprNode = TranslateUtils.buildExprNode(wExpr, iInfo.getTypeCheckCtx());
ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(tInfo, exprNode);
ObjectInspector oi = TranslateUtils.initExprNodeEvaluator(qDef, exprNode, exprEval, iInfo);
try {
ObjectInspectorConverters.getConverter(oi, PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
} catch (Throwable t) {
throw new WindowingException("Where Expr must be convertible to a boolean value", t);
}
whDef.setExprNode(exprNode);
whDef.setExprEvaluator(exprEval);
whDef.setOI(oi);
qDef.setWhere(whDef);
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project incubator-atlas by apache.
the class HiveASTRewriter method rewrite.
public String rewrite(String sourceQry) throws RewriteException {
String result = sourceQry;
ASTNode tree = null;
try {
ParseDriver pd = new ParseDriver();
tree = pd.parse(sourceQry, queryContext, true);
tree = ParseUtils.findRootNonNullToken(tree);
this.rwCtx = new RewriteContext(sourceQry, tree, queryContext.getTokenRewriteStream());
rewrite(tree);
result = toSQL();
} catch (ParseException e) {
LOG.error("Could not parse the query {} ", sourceQry, e);
throw new RewriteException("Could not parse query : ", e);
}
return result;
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class CreateDatabaseHook method preAnalyze.
@Override
public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException {
Hive db;
try {
db = context.getHive();
} catch (HiveException e) {
throw new SemanticException("Couldn't get Hive DB instance in semantic analysis phase.", e);
}
// Analyze and create tbl properties object
int numCh = ast.getChildCount();
databaseName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) ast.getChild(0));
for (int num = 1; num < numCh; num++) {
ASTNode child = (ASTNode) ast.getChild(num);
switch(child.getToken().getType()) {
case HiveParser.TOK_IFNOTEXISTS:
try {
List<String> dbs = db.getDatabasesByPattern(databaseName);
if (dbs != null && dbs.size() > 0) {
// db exists
return ast;
}
} catch (HiveException e) {
throw new SemanticException(e);
}
break;
}
}
return ast;
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class CreateTableHook method preAnalyze.
@Override
public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException {
Hive db;
try {
db = context.getHive();
} catch (HiveException e) {
throw new SemanticException("Couldn't get Hive DB instance in semantic analysis phase.", e);
}
// Analyze and create tbl properties object
int numCh = ast.getChildCount();
tableName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) ast.getChild(0));
boolean likeTable = false;
StorageFormat format = new StorageFormat(context.getConf());
for (int num = 1; num < numCh; num++) {
ASTNode child = (ASTNode) ast.getChild(num);
if (format.fillStorageFormat(child)) {
if (org.apache.commons.lang3.StringUtils.isNotEmpty(format.getStorageHandler())) {
return ast;
}
continue;
}
switch(child.getToken().getType()) {
case // CTAS
HiveParser.TOK_QUERY:
throw new SemanticException("Operation not supported. Create table as " + "Select is not a valid operation.");
case HiveParser.TOK_ALTERTABLE_BUCKETS:
break;
case HiveParser.TOK_LIKETABLE:
likeTable = true;
break;
case HiveParser.TOK_IFNOTEXISTS:
try {
List<String> tables = db.getTablesByPattern(tableName);
if (tables != null && tables.size() > 0) {
// exists
return ast;
}
} catch (HiveException e) {
throw new SemanticException(e);
}
break;
case HiveParser.TOK_TABLEPARTCOLS:
List<FieldSchema> partCols = BaseSemanticAnalyzer.getColumns(child, false, context.getConf());
for (FieldSchema fs : partCols) {
if (!fs.getType().equalsIgnoreCase("string")) {
throw new SemanticException("Operation not supported. HCatalog only " + "supports partition columns of type string. " + "For column: " + fs.getName() + " Found type: " + fs.getType());
}
}
break;
}
}
if (!likeTable && (format.getInputFormat() == null || format.getOutputFormat() == null)) {
throw new SemanticException("STORED AS specification is either incomplete or incorrect.");
}
return ast;
}
Aggregations