Search in sources :

Example 6 with ParseException

use of org.apache.hadoop.hive.ql.parse.ParseException in project hive by apache.

the class GenericUDTFGetSQLSchema method process.

@Override
public void process(Object[] arguments) throws HiveException {
    String query = stringOI.getPrimitiveJavaObject(arguments[0]);
    LOG.debug("Getting schema for Query: {}", query);
    HiveConf conf = new HiveConf(SessionState.get().getConf());
    List<FieldSchema> fieldSchemas = null;
    try {
        fieldSchemas = ParseUtils.parseQueryAndGetSchema(conf, query);
    } catch (ParseException e) {
        throw new HiveException(e);
    }
    if (fieldSchemas != null) {
        for (FieldSchema fieldSchema : fieldSchemas) {
            nameTypePair[0] = fieldSchema.getName().getBytes(StandardCharsets.UTF_8);
            nameTypePair[1] = fieldSchema.getType().getBytes(StandardCharsets.UTF_8);
            forward(nameTypePair);
        }
    }
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) HiveConf(org.apache.hadoop.hive.conf.HiveConf) ParseException(org.apache.hadoop.hive.ql.parse.ParseException)

Example 7 with ParseException

use of org.apache.hadoop.hive.ql.parse.ParseException in project hive by apache.

the class ExpressionFactory method fromString.

public static Expression fromString(final String expression) {
    if (expression == null || expression.isEmpty()) {
        return null;
    }
    ParseDriver driver = new ParseDriver();
    ASTNode node = null;
    try {
        node = driver.parseTriggerExpression(expression);
    } catch (ParseException e) {
        throw new IllegalArgumentException("Invalid expression: " + expression, e);
    }
    if (node.getChildCount() == 2 && node.getChild(1).getType() == HiveParser.EOF) {
        node = (ASTNode) node.getChild(0);
    }
    if (node.getType() != HiveParser.TOK_TRIGGER_EXPRESSION) {
        throw new IllegalArgumentException("Expected trigger expression, got: " + node.toStringTree());
    }
    if (node.getChildCount() != 3) {
        throw new IllegalArgumentException("Only single > condition supported: " + expression);
    }
    // expression tree when multiple conditions are required. HIVE-17622
    if (node.getChild(1).getType() != HiveParser.GREATERTHAN) {
        throw new IllegalArgumentException("Invalid predicate in expression");
    }
    final String counterName = node.getChild(0).getText();
    final String counterValueStr = PlanUtils.stripQuotes(node.getChild(2).getText().toLowerCase());
    if (counterName.isEmpty()) {
        throw new IllegalArgumentException("Counter name cannot be empty!");
    }
    // look for matches in file system counters
    long counterValue;
    for (FileSystemCounterLimit.FSCounter fsCounter : FileSystemCounterLimit.FSCounter.values()) {
        if (counterName.toUpperCase().endsWith(fsCounter.name())) {
            try {
                counterValue = getCounterValue(counterValueStr, new Validator.SizeValidator());
                if (counterValue < 0) {
                    throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
                }
            } catch (NumberFormatException e) {
                throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
            }
            // this is file system counter, valid and create counter
            FileSystemCounterLimit fsCounterLimit = FileSystemCounterLimit.fromName(counterName, counterValue);
            return createExpression(fsCounterLimit);
        }
    }
    // look for matches in time based counters
    for (TimeCounterLimit.TimeCounter timeCounter : TimeCounterLimit.TimeCounter.values()) {
        if (counterName.equalsIgnoreCase(timeCounter.name())) {
            try {
                counterValue = getCounterValue(counterValueStr, new Validator.TimeValidator(TimeUnit.MILLISECONDS));
                if (counterValue < 0) {
                    throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
                }
            } catch (NumberFormatException e) {
                throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
            }
            TimeCounterLimit timeCounterLimit = new TimeCounterLimit(TimeCounterLimit.TimeCounter.valueOf(counterName.toUpperCase()), counterValue);
            return createExpression(timeCounterLimit);
        }
    }
    // look for matches in vertex specific counters
    for (VertexCounterLimit.VertexCounter vertexCounter : VertexCounterLimit.VertexCounter.values()) {
        if (counterName.equalsIgnoreCase(vertexCounter.name())) {
            try {
                counterValue = getCounterValue(counterValueStr, null);
                if (counterValue < 0) {
                    throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
                }
            } catch (NumberFormatException e) {
                throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
            }
            VertexCounterLimit vertexCounterLimit = new VertexCounterLimit(VertexCounterLimit.VertexCounter.valueOf(counterName.toUpperCase()), counterValue);
            return createExpression(vertexCounterLimit);
        }
    }
    // if nothing matches, try creating a custom counter
    try {
        counterValue = getCounterValue(counterValueStr, null);
        if (counterValue < 0) {
            throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
        }
    } catch (NumberFormatException e) {
        throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
    }
    CustomCounterLimit customCounterLimit = new CustomCounterLimit(counterName, counterValue);
    return createExpression(customCounterLimit);
}
Also used : ParseDriver(org.apache.hadoop.hive.ql.parse.ParseDriver) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ParseException(org.apache.hadoop.hive.ql.parse.ParseException)

Example 8 with ParseException

use of org.apache.hadoop.hive.ql.parse.ParseException in project incubator-atlas by apache.

the class HiveASTRewriter method rewrite.

public String rewrite(String sourceQry) throws RewriteException {
    String result = sourceQry;
    ASTNode tree = null;
    try {
        ParseDriver pd = new ParseDriver();
        tree = pd.parse(sourceQry, queryContext, true);
        tree = ParseUtils.findRootNonNullToken(tree);
        this.rwCtx = new RewriteContext(sourceQry, tree, queryContext.getTokenRewriteStream());
        rewrite(tree);
        result = toSQL();
    } catch (ParseException e) {
        LOG.error("Could not parse the query {} ", sourceQry, e);
        throw new RewriteException("Could not parse query : ", e);
    }
    return result;
}
Also used : ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ParseDriver(org.apache.hadoop.hive.ql.parse.ParseDriver) ParseException(org.apache.hadoop.hive.ql.parse.ParseException)

Aggregations

ParseException (org.apache.hadoop.hive.ql.parse.ParseException)8 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)5 IOException (java.io.IOException)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 ParseDriver (org.apache.hadoop.hive.ql.parse.ParseDriver)3 HiveConf (org.apache.hadoop.hive.conf.HiveConf)2 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)2 HiveTxnManager (org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager)2 BaseSemanticAnalyzer (org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer)2 ParseContext (org.apache.hadoop.hive.ql.parse.ParseContext)2 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)2 ImmutableMap (com.google.common.collect.ImmutableMap)1 File (java.io.File)1 FileWriter (java.io.FileWriter)1 HashMap (java.util.HashMap)1 LinkedHashMap (java.util.LinkedHashMap)1 Map (java.util.Map)1 Tree (org.antlr.runtime.tree.Tree)1 Configurable (org.apache.hadoop.conf.Configurable)1 HiveVariableSource (org.apache.hadoop.hive.conf.HiveVariableSource)1