use of org.apache.hadoop.hive.ql.parse.ParseException in project hive by apache.
the class GenericUDTFGetSQLSchema method process.
@Override
public void process(Object[] arguments) throws HiveException {
String query = stringOI.getPrimitiveJavaObject(arguments[0]);
LOG.debug("Getting schema for Query: {}", query);
HiveConf conf = new HiveConf(SessionState.get().getConf());
List<FieldSchema> fieldSchemas = null;
try {
fieldSchemas = ParseUtils.parseQueryAndGetSchema(conf, query);
} catch (ParseException e) {
throw new HiveException(e);
}
if (fieldSchemas != null) {
for (FieldSchema fieldSchema : fieldSchemas) {
nameTypePair[0] = fieldSchema.getName().getBytes(StandardCharsets.UTF_8);
nameTypePair[1] = fieldSchema.getType().getBytes(StandardCharsets.UTF_8);
forward(nameTypePair);
}
}
}
use of org.apache.hadoop.hive.ql.parse.ParseException in project hive by apache.
the class ExpressionFactory method fromString.
public static Expression fromString(final String expression) {
if (expression == null || expression.isEmpty()) {
return null;
}
ParseDriver driver = new ParseDriver();
ASTNode node = null;
try {
node = driver.parseTriggerExpression(expression);
} catch (ParseException e) {
throw new IllegalArgumentException("Invalid expression: " + expression, e);
}
if (node.getChildCount() == 2 && node.getChild(1).getType() == HiveParser.EOF) {
node = (ASTNode) node.getChild(0);
}
if (node.getType() != HiveParser.TOK_TRIGGER_EXPRESSION) {
throw new IllegalArgumentException("Expected trigger expression, got: " + node.toStringTree());
}
if (node.getChildCount() != 3) {
throw new IllegalArgumentException("Only single > condition supported: " + expression);
}
// expression tree when multiple conditions are required. HIVE-17622
if (node.getChild(1).getType() != HiveParser.GREATERTHAN) {
throw new IllegalArgumentException("Invalid predicate in expression");
}
final String counterName = node.getChild(0).getText();
final String counterValueStr = PlanUtils.stripQuotes(node.getChild(2).getText().toLowerCase());
if (counterName.isEmpty()) {
throw new IllegalArgumentException("Counter name cannot be empty!");
}
// look for matches in file system counters
long counterValue;
for (FileSystemCounterLimit.FSCounter fsCounter : FileSystemCounterLimit.FSCounter.values()) {
if (counterName.toUpperCase().endsWith(fsCounter.name())) {
try {
counterValue = getCounterValue(counterValueStr, new Validator.SizeValidator());
if (counterValue < 0) {
throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
}
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
}
// this is file system counter, valid and create counter
FileSystemCounterLimit fsCounterLimit = FileSystemCounterLimit.fromName(counterName, counterValue);
return createExpression(fsCounterLimit);
}
}
// look for matches in time based counters
for (TimeCounterLimit.TimeCounter timeCounter : TimeCounterLimit.TimeCounter.values()) {
if (counterName.equalsIgnoreCase(timeCounter.name())) {
try {
counterValue = getCounterValue(counterValueStr, new Validator.TimeValidator(TimeUnit.MILLISECONDS));
if (counterValue < 0) {
throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
}
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
}
TimeCounterLimit timeCounterLimit = new TimeCounterLimit(TimeCounterLimit.TimeCounter.valueOf(counterName.toUpperCase()), counterValue);
return createExpression(timeCounterLimit);
}
}
// look for matches in vertex specific counters
for (VertexCounterLimit.VertexCounter vertexCounter : VertexCounterLimit.VertexCounter.values()) {
if (counterName.equalsIgnoreCase(vertexCounter.name())) {
try {
counterValue = getCounterValue(counterValueStr, null);
if (counterValue < 0) {
throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
}
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
}
VertexCounterLimit vertexCounterLimit = new VertexCounterLimit(VertexCounterLimit.VertexCounter.valueOf(counterName.toUpperCase()), counterValue);
return createExpression(vertexCounterLimit);
}
}
// if nothing matches, try creating a custom counter
try {
counterValue = getCounterValue(counterValueStr, null);
if (counterValue < 0) {
throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
}
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
}
CustomCounterLimit customCounterLimit = new CustomCounterLimit(counterName, counterValue);
return createExpression(customCounterLimit);
}
use of org.apache.hadoop.hive.ql.parse.ParseException in project incubator-atlas by apache.
the class HiveASTRewriter method rewrite.
public String rewrite(String sourceQry) throws RewriteException {
String result = sourceQry;
ASTNode tree = null;
try {
ParseDriver pd = new ParseDriver();
tree = pd.parse(sourceQry, queryContext, true);
tree = ParseUtils.findRootNonNullToken(tree);
this.rwCtx = new RewriteContext(sourceQry, tree, queryContext.getTokenRewriteStream());
rewrite(tree);
result = toSQL();
} catch (ParseException e) {
LOG.error("Could not parse the query {} ", sourceQry, e);
throw new RewriteException("Could not parse query : ", e);
}
return result;
}
Aggregations