use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.
the class ConstraintsUtils method validateCheckConstraint.
public static void validateCheckConstraint(List<FieldSchema> columns, List<SQLCheckConstraint> checkConstraints, Configuration conf) throws SemanticException {
// create colinfo and then row resolver
RowResolver rr = new RowResolver();
for (FieldSchema column : columns) {
ColumnInfo ci = new ColumnInfo(column.getName(), TypeInfoUtils.getTypeInfoFromTypeString(column.getType()), null, false);
rr.put(null, column.getName(), ci);
}
TypeCheckCtx typeCheckCtx = new TypeCheckCtx(rr);
// TypeCheckProcFactor expects typecheckctx to have unparse translator
UnparseTranslator unparseTranslator = new UnparseTranslator(conf);
typeCheckCtx.setUnparseTranslator(unparseTranslator);
for (SQLCheckConstraint cc : checkConstraints) {
try {
ParseDriver parseDriver = new ParseDriver();
ASTNode checkExprAST = parseDriver.parseExpression(cc.getCheck_expression());
validateCheckExprAST(checkExprAST);
Map<ASTNode, ExprNodeDesc> genExprs = ExprNodeTypeCheck.genExprNode(checkExprAST, typeCheckCtx);
ExprNodeDesc checkExpr = genExprs.get(checkExprAST);
if (checkExpr == null) {
throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid type for CHECK constraint: ") + cc.getCheck_expression());
}
if (checkExpr.getTypeInfo().getTypeName() != serdeConstants.BOOLEAN_TYPE_NAME) {
throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Only boolean type is supported for CHECK constraint: ") + cc.getCheck_expression() + ". Found: " + checkExpr.getTypeInfo().getTypeName());
}
validateCheckExpr(checkExpr);
} catch (Exception e) {
throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid CHECK constraint expression: ") + cc.getCheck_expression() + ". " + e.getMessage(), e);
}
}
}
use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.
the class CalcitePlanner method genRexNode.
RexNode genRexNode(ASTNode expr, RowResolver input, RowResolver outerRR, Map<ASTNode, QBSubQueryParseInfo> subqueryToRelNode, boolean useCaching, RexBuilder rexBuilder) throws SemanticException {
TypeCheckCtx tcCtx = new TypeCheckCtx(input, rexBuilder, useCaching, false);
tcCtx.setOuterRR(outerRR);
tcCtx.setSubqueryToRelNode(subqueryToRelNode);
return genRexNode(expr, input, tcCtx);
}
use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.
the class ExecuteStatementAnalyzer method getConstant.
/**
* This method creates a constant expression to replace the given dynamic expression.
* @param dynamicExpr Expression node representing Dynamic expression
* @param typeInfo Type info used to create constant expression from ASTNode
* @param parameterMap Integer to AST node map
*/
private ExprNodeConstantDesc getConstant(ExprDynamicParamDesc dynamicExpr, TypeInfo typeInfo, Map<Integer, ASTNode> parameterMap) throws SemanticException {
Preconditions.checkArgument(parameterMap.containsKey(dynamicExpr.getIndex()), "Paramter index not found");
ASTNode paramNode = parameterMap.get(dynamicExpr.getIndex());
TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null);
ExprNodeDesc node = ExprNodeTypeCheck.genExprNode(paramNode, typeCheckCtx).get(paramNode);
Preconditions.checkArgument(node instanceof ExprNodeConstantDesc, "Invalid expression created");
return (ExprNodeConstantDesc) node;
}
use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.
the class ConstraintsUtils method getDefaultValue.
/**
* Validate and get the default value from the AST.
* @param node AST node corresponding to default value
* @return retrieve the default value and return it as string
*/
private static String getDefaultValue(ASTNode node, ASTNode typeChild, TokenRewriteStream tokenStream) throws SemanticException {
// first create expression from defaultValueAST
TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null);
ExprNodeDesc defaultValExpr = ExprNodeTypeCheck.genExprNode(node, typeCheckCtx).get(node);
if (defaultValExpr == null) {
throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value!"));
}
// get default value to be be stored in metastore
String defaultValueText = tokenStream.toOriginalString(node.getTokenStartIndex(), node.getTokenStopIndex());
if (defaultValueText.length() > DEFAULT_MAX_LEN) {
throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value: " + defaultValueText + " .Maximum character length allowed is " + DEFAULT_MAX_LEN + " ."));
}
// Make sure the default value expression type is exactly same as column's type.
TypeInfo defaultValTypeInfo = defaultValExpr.getTypeInfo();
TypeInfo colTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(BaseSemanticAnalyzer.getTypeStringFromAST(typeChild));
if (!defaultValTypeInfo.equals(colTypeInfo)) {
throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid type: " + defaultValTypeInfo.getTypeName() + " for default value: " + defaultValueText + ". Please make sure that " + "the type is compatible with column type: " + colTypeInfo.getTypeName()));
}
// throw an error if default value isn't what hive allows
if (!isDefaultValueAllowed(defaultValExpr)) {
throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value: " + defaultValueText + ". DEFAULT only allows constant or function expressions"));
}
return defaultValueText;
}
use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.
the class ShowPartitionAnalyzer method getShowPartitionsFilter.
@VisibleForTesting
ExprNodeDesc getShowPartitionsFilter(Table table, ASTNode command) throws SemanticException {
ExprNodeDesc showFilter = null;
for (int childIndex = 0; childIndex < command.getChildCount(); childIndex++) {
ASTNode astChild = (ASTNode) command.getChild(childIndex);
if (astChild.getType() == HiveParser.TOK_WHERE) {
RowResolver rwsch = new RowResolver();
Map<String, String> colTypes = new HashMap<String, String>();
for (FieldSchema fs : table.getPartCols()) {
rwsch.put(table.getTableName(), fs.getName(), new ColumnInfo(fs.getName(), TypeInfoFactory.stringTypeInfo, null, true));
colTypes.put(fs.getName().toLowerCase(), fs.getType());
}
TypeCheckCtx tcCtx = new TypeCheckCtx(rwsch);
ASTNode conds = (ASTNode) astChild.getChild(0);
Map<ASTNode, ExprNodeDesc> nodeOutputs = ExprNodeTypeCheck.genExprNode(conds, tcCtx);
ExprNodeDesc target = nodeOutputs.get(conds);
if (!(target instanceof ExprNodeGenericFuncDesc) || !target.getTypeInfo().equals(TypeInfoFactory.booleanTypeInfo)) {
String errorMsg = tcCtx.getError() != null ? ". " + tcCtx.getError() : "";
throw new SemanticException("Not a filter expr: " + (target == null ? "null" : target.getExprString()) + errorMsg);
}
showFilter = replaceDefaultPartNameAndCastType(target, colTypes, HiveConf.getVar(conf, HiveConf.ConfVars.DEFAULTPARTITIONNAME));
}
}
return showFilter;
}
Aggregations