use of org.apache.hadoop.hive.metastore.parser.ExpressionTree in project hive by apache.
the class HBaseStore method getNumPartitionsByFilter.
@Override
public int getNumPartitionsByFilter(String dbName, String tblName, String filter) throws MetaException, NoSuchObjectException {
final ExpressionTree exprTree = (filter != null && !filter.isEmpty()) ? PartFilterExprUtil.getFilterParser(filter).tree : ExpressionTree.EMPTY_TREE;
List<Partition> result = new ArrayList<Partition>();
boolean commit = false;
openTransaction();
try {
return getPartitionsByFilter(dbName, tblName, filter, Short.MAX_VALUE).size();
} finally {
commitOrRoleBack(commit);
}
}
use of org.apache.hadoop.hive.metastore.parser.ExpressionTree in project hive by apache.
the class PartFilterExprUtil method makeExpressionTree.
/**
* Makes expression tree out of expr.
* @param filter Filter.
* @return Expression tree. Null if there was an error.
*/
private static ExpressionTree makeExpressionTree(String filter) throws MetaException {
// TODO: ExprNodeDesc is an expression tree, we could just use that and be rid of Filter.g.
if (filter == null || filter.isEmpty()) {
return ExpressionTree.EMPTY_TREE;
}
LOG.debug("Filter specified is " + filter);
ExpressionTree tree = null;
try {
tree = getFilterParser(filter).tree;
} catch (MetaException ex) {
LOG.info("Unable to make the expression tree from expression string [" + filter + "]" + // Don't log the stack, this is normal.
ex.getMessage());
}
if (tree == null) {
return null;
}
// We suspect that LIKE pushdown into JDO is invalid; see HIVE-5134. Check for like here.
LikeChecker lc = new LikeChecker();
tree.accept(lc);
return lc.hasLike() ? null : tree;
}
use of org.apache.hadoop.hive.metastore.parser.ExpressionTree in project hive by apache.
the class ObjectStore method getNumPartitionsByExpr.
@Override
public int getNumPartitionsByExpr(String dbName, String tblName, byte[] expr) throws MetaException, NoSuchObjectException {
final ExpressionTree exprTree = PartFilterExprUtil.makeExpressionTree(expressionProxy, expr);
// Need to be final to pass it to an inner class
final byte[] tempExpr = expr;
return new GetHelper<Integer>(dbName, tblName, true, true) {
private SqlFilterForPushdown filter = new SqlFilterForPushdown();
@Override
protected String describeResult() {
return "Partition count";
}
protected boolean canUseDirectSql(GetHelper<Integer> ctx) throws MetaException {
return directSql.generateSqlFilterForPushdown(ctx.getTable(), exprTree, filter);
}
;
@Override
protected Integer getSqlResult(GetHelper<Integer> ctx) throws MetaException {
return directSql.getNumPartitionsViaSqlFilter(filter);
}
@Override
protected Integer getJdoResult(GetHelper<Integer> ctx) throws MetaException, NoSuchObjectException {
Integer numPartitions = null;
if (exprTree != null) {
try {
numPartitions = getNumPartitionsViaOrmFilter(ctx.getTable(), exprTree, true);
} catch (MetaException e) {
numPartitions = null;
}
}
// if numPartitions could not be obtained from ORM filters, then get number partitions names, and count them
if (numPartitions == null) {
List<String> filteredPartNames = new ArrayList<String>();
getPartitionNamesPrunedByExprNoTxn(ctx.getTable(), tempExpr, "", (short) -1, filteredPartNames);
numPartitions = filteredPartNames.size();
}
return numPartitions;
}
}.run(true);
}
use of org.apache.hadoop.hive.metastore.parser.ExpressionTree in project hive by apache.
the class TestHBaseFilterPlanUtil method verifyPlan.
private void verifyPlan(TreeNode l, List<FieldSchema> parts, String keyName, ScanMarker startMarker, ScanMarker endMarker, boolean hasUnsupportedCondition) throws MetaException {
ExpressionTree e = null;
if (l != null) {
e = new ExpressionTree();
e.setRootForTest(l);
}
PlanResult planRes = HBaseFilterPlanUtil.getFilterPlan(e, parts);
FilterPlan plan = planRes.plan;
Assert.assertEquals("Has unsupported condition", hasUnsupportedCondition, planRes.hasUnsupportedCondition);
Assert.assertEquals(1, plan.getPlans().size());
ScanPlan splan = plan.getPlans().get(0);
if (startMarker != null) {
Assert.assertEquals(startMarker, splan.markers.get(keyName).startMarker);
} else {
Assert.assertTrue(splan.markers.get(keyName) == null || splan.markers.get(keyName).startMarker == null);
}
if (endMarker != null) {
Assert.assertEquals(endMarker, splan.markers.get(keyName).endMarker);
} else {
Assert.assertTrue(splan.markers.get(keyName) == null || splan.markers.get(keyName).endMarker == null);
}
}
use of org.apache.hadoop.hive.metastore.parser.ExpressionTree in project hive by apache.
the class HBaseStore method getPartitionsByExpr.
@Override
public boolean getPartitionsByExpr(String dbName, String tblName, byte[] expr, String defaultPartitionName, short maxParts, List<Partition> result) throws TException {
final ExpressionTree exprTree = PartFilterExprUtil.makeExpressionTree(expressionProxy, expr);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
tblName = HiveStringUtils.normalizeIdentifier(tblName);
Table table = getTable(dbName, tblName);
boolean commit = false;
openTransaction();
try {
if (exprTree == null) {
List<String> partNames = new LinkedList<String>();
boolean hasUnknownPartitions = getPartitionNamesPrunedByExprNoTxn(table, expr, defaultPartitionName, maxParts, partNames);
result.addAll(getPartitionsByNames(dbName, tblName, partNames));
return hasUnknownPartitions;
} else {
return getPartitionsByExprInternal(dbName, tblName, exprTree, maxParts, result);
}
} finally {
commitOrRoleBack(commit);
}
}
Aggregations