use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.
the class PTFTranslator method setupShapeForNoop.
private ShapeDetails setupShapeForNoop(ShapeDetails inpShape, StructObjectInspector OI, List<String> columnNames, RowResolver rr) throws SemanticException {
ShapeDetails shp = new ShapeDetails();
shp.setRr(rr);
shp.setOI(inpShape.getOI());
shp.setSerde(inpShape.getSerde());
shp.setSerdeClassName(inpShape.getSerde().getClass().getName());
shp.setSerdeProps(inpShape.getSerdeProps());
shp.setColumnNames(columnNames);
TypeCheckCtx tCtx = new TypeCheckCtx(rr);
tCtx.setUnparseTranslator(unparseT);
shp.setTypeCheckCtx(tCtx);
return shp;
}
use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.
the class BaseSemanticAnalyzer method getPartExprNodeDesc.
private static boolean getPartExprNodeDesc(ASTNode astNode, HiveConf conf, Map<ASTNode, ExprNodeDesc> astExprNodeMap) throws SemanticException {
if (astNode == null) {
return true;
} else if ((astNode.getChildren() == null) || (astNode.getChildren().size() == 0)) {
return astNode.getType() != HiveParser.TOK_PARTVAL;
}
TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null);
String defaultPartitionName = HiveConf.getVar(conf, HiveConf.ConfVars.DEFAULTPARTITIONNAME);
boolean result = true;
for (Node childNode : astNode.getChildren()) {
ASTNode childASTNode = (ASTNode) childNode;
if (childASTNode.getType() != HiveParser.TOK_PARTVAL) {
result = getPartExprNodeDesc(childASTNode, conf, astExprNodeMap) && result;
} else {
boolean isDynamicPart = childASTNode.getChildren().size() <= 1;
result = !isDynamicPart && result;
if (!isDynamicPart) {
ASTNode partVal = (ASTNode) childASTNode.getChildren().get(1);
if (!defaultPartitionName.equalsIgnoreCase(unescapeSQLString(partVal.getText()))) {
astExprNodeMap.put((ASTNode) childASTNode.getChildren().get(0), ExprNodeTypeCheck.genExprNode(partVal, typeCheckCtx).get(partVal));
}
}
}
}
return result;
}
use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.
the class ConstraintsUtils method getDefaultValue.
/**
* Validate and get the default value from the AST.
* @param node AST node corresponding to default value
* @return retrieve the default value and return it as string
*/
private static String getDefaultValue(ASTNode node, ASTNode typeChild, TokenRewriteStream tokenStream) throws SemanticException {
// first create expression from defaultValueAST
TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null);
ExprNodeDesc defaultValExpr = ExprNodeTypeCheck.genExprNode(node, typeCheckCtx).get(node);
if (defaultValExpr == null) {
throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value!"));
}
// get default value to be be stored in metastore
String defaultValueText = tokenStream.toOriginalString(node.getTokenStartIndex(), node.getTokenStopIndex());
if (defaultValueText.length() > DEFAULT_MAX_LEN) {
throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value: " + defaultValueText + " .Maximum character length allowed is " + DEFAULT_MAX_LEN + " ."));
}
// Make sure the default value expression type is exactly same as column's type.
TypeInfo defaultValTypeInfo = defaultValExpr.getTypeInfo();
TypeInfo colTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(BaseSemanticAnalyzer.getTypeStringFromAST(typeChild));
if (!defaultValTypeInfo.equals(colTypeInfo)) {
throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid type: " + defaultValTypeInfo.getTypeName() + " for default value: " + defaultValueText + ". Please make sure that " + "the type is compatible with column type: " + colTypeInfo.getTypeName()));
}
// throw an error if default value isn't what hive allows
if (!isDefaultValueAllowed(defaultValExpr)) {
throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value: " + defaultValueText + ". DEFAULT only allows constant or function expressions"));
}
return defaultValueText;
}
use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.
the class PTFTranslator method setupShape.
private ShapeDetails setupShape(StructObjectInspector OI, List<String> columnNames, RowResolver rr) throws SemanticException {
Map<String, String> serdePropsMap = new LinkedHashMap<String, String>();
AbstractSerDe serde = null;
ShapeDetails shp = new ShapeDetails();
try {
serde = PTFTranslator.createLazyBinarySerDe(hCfg, OI, serdePropsMap);
StructObjectInspector outOI = PTFPartition.setupPartitionOutputOI(serde, OI);
shp.setOI(outOI);
} catch (SerDeException se) {
throw new SemanticException(se);
}
shp.setRr(rr);
shp.setSerde(serde);
shp.setSerdeClassName(serde.getClass().getName());
shp.setSerdeProps(serdePropsMap);
shp.setColumnNames(columnNames);
TypeCheckCtx tCtx = new TypeCheckCtx(rr);
tCtx.setUnparseTranslator(unparseT);
shp.setTypeCheckCtx(tCtx);
return shp;
}
use of org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx in project hive by apache.
the class ParseUtils method getFullPartitionSpecs.
/**
* Get the partition specs from the tree. This stores the full specification
* with the comparator operator into the output list.
*
* @return Map of partitions by prefix length. Most of the time prefix length will
* be the same for all partition specs, so we can just OR the expressions.
*/
public static Map<Integer, List<ExprNodeGenericFuncDesc>> getFullPartitionSpecs(CommonTree ast, Table table, Configuration conf, boolean canGroupExprs) throws SemanticException {
String defaultPartitionName = HiveConf.getVar(conf, HiveConf.ConfVars.DEFAULTPARTITIONNAME);
Map<String, String> colTypes = new HashMap<>();
for (FieldSchema fs : table.getPartitionKeys()) {
colTypes.put(fs.getName().toLowerCase(), fs.getType());
}
Map<Integer, List<ExprNodeGenericFuncDesc>> result = new HashMap<>();
for (int childIndex = 0; childIndex < ast.getChildCount(); childIndex++) {
Tree partSpecTree = ast.getChild(childIndex);
if (partSpecTree.getType() != HiveParser.TOK_PARTSPEC) {
continue;
}
ExprNodeGenericFuncDesc expr = null;
Set<String> names = new HashSet<>(partSpecTree.getChildCount());
for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
String key = stripIdentifierQuotes(partSpecSingleKey.getChild(0).getText()).toLowerCase();
String operator = partSpecSingleKey.getChild(1).getText();
ASTNode partValNode = (ASTNode) partSpecSingleKey.getChild(2);
TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null);
ExprNodeConstantDesc valExpr = (ExprNodeConstantDesc) ExprNodeTypeCheck.genExprNode(partValNode, typeCheckCtx).get(partValNode);
Object val = valExpr.getValue();
boolean isDefaultPartitionName = val.equals(defaultPartitionName);
String type = colTypes.get(key);
if (type == null) {
throw new SemanticException("Column " + key + " is not a partition key");
}
PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type);
// Create the corresponding hive expression to filter on partition columns.
if (!isDefaultPartitionName) {
if (!valExpr.getTypeString().equals(type)) {
ObjectInspectorConverters.Converter converter = ObjectInspectorConverters.getConverter(TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(valExpr.getTypeInfo()), TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(pti));
val = converter.convert(valExpr.getValue());
}
}
ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, key, null, true);
ExprNodeGenericFuncDesc op;
if (!isDefaultPartitionName) {
op = PartitionUtils.makeBinaryPredicate(operator, column, new ExprNodeConstantDesc(pti, val));
} else {
GenericUDF originalOp = FunctionRegistry.getFunctionInfo(operator).getGenericUDF();
String fnName;
if (FunctionRegistry.isEq(originalOp)) {
fnName = "isnull";
} else if (FunctionRegistry.isNeq(originalOp)) {
fnName = "isnotnull";
} else {
throw new SemanticException("Cannot use " + operator + " in a default partition spec; only '=' and '!=' are allowed.");
}
op = PartitionUtils.makeUnaryPredicate(fnName, column);
}
// If it's multi-expr filter (e.g. a='5', b='2012-01-02'), AND with previous exprs.
expr = (expr == null) ? op : PartitionUtils.makeBinaryPredicate("and", expr, op);
names.add(key);
}
if (expr == null) {
continue;
}
// We got the expr for one full partition spec. Determine the prefix length.
int prefixLength = calculatePartPrefix(table, names);
List<ExprNodeGenericFuncDesc> orExpr = result.get(prefixLength);
// If we don't, create a new separate filter. In most cases there will only be one.
if (orExpr == null) {
result.put(prefixLength, Lists.newArrayList(expr));
} else if (canGroupExprs) {
orExpr.set(0, PartitionUtils.makeBinaryPredicate("or", expr, orExpr.get(0)));
} else {
orExpr.add(expr);
}
}
return result;
}
Aggregations