Search in sources :

Example 1 with ScanMarker

use of org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan.ScanMarker in project hive by apache.

the class TestHBaseFilterPlanUtil method testgetComparedMarker.

/**
   * Test function that finds greater/lesser marker
   */
@Test
public void testgetComparedMarker() {
    ScanMarker l;
    ScanMarker r;
    // equal plans
    l = new ScanMarker("1", INCLUSIVE, "int");
    r = new ScanMarker("1", INCLUSIVE, "int");
    assertFirstGreater(l, r);
    l = new ScanMarker("1", !INCLUSIVE, "int");
    r = new ScanMarker("1", !INCLUSIVE, "int");
    assertFirstGreater(l, r);
    assertFirstGreater(null, null);
    // create l is greater because of inclusive flag
    l = new ScanMarker("1", !INCLUSIVE, "int");
    // the rule for null vs non-null is different
    // non-null is both smaller and greater than null
    Assert.assertEquals(l, ScanPlan.getComparedMarker(l, null, true));
    Assert.assertEquals(l, ScanPlan.getComparedMarker(null, l, true));
    Assert.assertEquals(l, ScanPlan.getComparedMarker(l, null, false));
    Assert.assertEquals(l, ScanPlan.getComparedMarker(null, l, false));
    // create l that is greater because of the bytes
    l = new ScanMarker("2", INCLUSIVE, "int");
    r = new ScanMarker("1", INCLUSIVE, "int");
    assertFirstGreater(l, r);
}
Also used : ScanMarker(org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan.ScanMarker) Test(org.junit.Test)

Example 2 with ScanMarker

use of org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan.ScanMarker in project hive by apache.

the class TestHBaseFilterPlanUtil method testScanPlanAnd.

/**
   * Test ScanPlan AND operation
   */
@Test
public void testScanPlanAnd() {
    ScanPlan l = new ScanPlan();
    ScanPlan r = new ScanPlan();
    l.setStartMarker("a", "int", "10", INCLUSIVE);
    r.setStartMarker("a", "int", "10", INCLUSIVE);
    ScanPlan res;
    // both equal
    res = l.and(r).getPlans().get(0);
    Assert.assertEquals(new ScanMarker("10", INCLUSIVE, "int"), res.markers.get("a").startMarker);
    // add equal end markers as well, and test AND again
    l.setEndMarker("a", "int", "20", INCLUSIVE);
    r.setEndMarker("a", "int", "20", INCLUSIVE);
    res = l.and(r).getPlans().get(0);
    Assert.assertEquals(new ScanMarker("10", INCLUSIVE, "int"), res.markers.get("a").startMarker);
    Assert.assertEquals(new ScanMarker("20", INCLUSIVE, "int"), res.markers.get("a").endMarker);
    l.setStartMarker("a", "int", "10", !INCLUSIVE);
    l.setEndMarker("a", "int", "20", INCLUSIVE);
    r.setStartMarker("a", "int", "10", INCLUSIVE);
    r.setEndMarker("a", "int", "15", INCLUSIVE);
    res = l.and(r).getPlans().get(0);
    // start of l is greater, end of r is smaller
    Assert.assertEquals(l.markers.get("a").startMarker, res.markers.get("a").startMarker);
    Assert.assertEquals(r.markers.get("a").endMarker, res.markers.get("a").endMarker);
}
Also used : ScanMarker(org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan.ScanMarker) MultiScanPlan(org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.MultiScanPlan) ScanPlan(org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan) Test(org.junit.Test)

Example 3 with ScanMarker

use of org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan.ScanMarker in project hive by apache.

the class TestHBaseFilterPlanUtil method testTreeNodePlan.

/**
   * Test plan generation from TreeNode
   *
   * @throws MetaException
   */
@Test
public void testTreeNodePlan() throws MetaException {
    final String KEY = "k1";
    final String VAL1 = "10";
    final String VAL2 = "11";
    LeafNode l = new LeafNode();
    l.keyName = KEY;
    l.value = VAL1;
    final ScanMarker DEFAULT_SCANMARKER = null;
    List<FieldSchema> parts = new ArrayList<FieldSchema>();
    parts.add(new FieldSchema("k1", "int", null));
    LeafNode r = new LeafNode();
    r.keyName = KEY;
    r.value = VAL2;
    TreeNode tn = new TreeNode(l, LogicalOperator.AND, r);
    // verify plan for - k1 >= '10' and k1 < '11'
    l.operator = Operator.GREATERTHANOREQUALTO;
    r.operator = Operator.LESSTHAN;
    verifyPlan(tn, parts, KEY, new ScanMarker(VAL1, INCLUSIVE, "int"), new ScanMarker(VAL2, !INCLUSIVE, "int"));
    // verify plan for - k1 >= '10' and k1 > '11'
    l.operator = Operator.GREATERTHANOREQUALTO;
    r.operator = Operator.GREATERTHAN;
    verifyPlan(tn, parts, KEY, new ScanMarker(VAL2, !INCLUSIVE, "int"), DEFAULT_SCANMARKER);
    // verify plan for - k1 >= '10' or k1 > '11'
    tn = new TreeNode(l, LogicalOperator.OR, r);
    ExpressionTree e = new ExpressionTree();
    e.setRootForTest(tn);
    PlanResult planRes = HBaseFilterPlanUtil.getFilterPlan(e, parts);
    Assert.assertEquals(2, planRes.plan.getPlans().size());
    Assert.assertEquals(false, planRes.hasUnsupportedCondition);
    // verify plan for - k1 >= '10' and (k1 >= '10' or k1 > '11')
    TreeNode tn2 = new TreeNode(l, LogicalOperator.AND, tn);
    e = new ExpressionTree();
    e.setRootForTest(tn2);
    planRes = HBaseFilterPlanUtil.getFilterPlan(e, parts);
    Assert.assertEquals(2, planRes.plan.getPlans().size());
    Assert.assertEquals(false, planRes.hasUnsupportedCondition);
    // verify plan for  (k1 >= '10' and (k1 >= '10' or k1 > '11')) or k1 LIKE '2'
    // plan should return true for hasUnsupportedCondition
    LeafNode klike = new LeafNode();
    klike.keyName = KEY;
    klike.value = VAL1;
    klike.operator = Operator.LIKE;
    TreeNode tn3 = new TreeNode(tn2, LogicalOperator.OR, klike);
    e = new ExpressionTree();
    e.setRootForTest(tn3);
    planRes = HBaseFilterPlanUtil.getFilterPlan(e, parts);
    Assert.assertEquals(3, planRes.plan.getPlans().size());
    Assert.assertEquals(false, planRes.hasUnsupportedCondition);
}
Also used : ScanMarker(org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan.ScanMarker) PlanResult(org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.PlanResult) TreeNode(org.apache.hadoop.hive.metastore.parser.ExpressionTree.TreeNode) LeafNode(org.apache.hadoop.hive.metastore.parser.ExpressionTree.LeafNode) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) ExpressionTree(org.apache.hadoop.hive.metastore.parser.ExpressionTree) Test(org.junit.Test)

Example 4 with ScanMarker

use of org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan.ScanMarker in project hive by apache.

the class TestHBaseFilterPlanUtil method testLeafNodePlan.

/**
   * Test plan generation from LeafNode
   *
   * @throws MetaException
   */
@Test
public void testLeafNodePlan() throws MetaException {
    final String KEY = "k1";
    final String VAL = "v1";
    final String OTHERKEY = "k2";
    LeafNode l = new LeafNode();
    l.keyName = KEY;
    l.value = VAL;
    final ScanMarker DEFAULT_SCANMARKER = null;
    List<FieldSchema> parts = new ArrayList<FieldSchema>();
    parts.add(new FieldSchema(KEY, "int", null));
    parts.add(new FieldSchema(OTHERKEY, "int", null));
    l.operator = Operator.EQUALS;
    verifyPlan(l, parts, KEY, new ScanMarker(VAL, INCLUSIVE, "int"), new ScanMarker(VAL, INCLUSIVE, "int"));
    l.operator = Operator.GREATERTHAN;
    verifyPlan(l, parts, KEY, new ScanMarker(VAL, !INCLUSIVE, "int"), DEFAULT_SCANMARKER);
    l.operator = Operator.GREATERTHANOREQUALTO;
    verifyPlan(l, parts, KEY, new ScanMarker(VAL, INCLUSIVE, "int"), DEFAULT_SCANMARKER);
    l.operator = Operator.LESSTHAN;
    verifyPlan(l, parts, KEY, DEFAULT_SCANMARKER, new ScanMarker(VAL, !INCLUSIVE, "int"));
    l.operator = Operator.LESSTHANOREQUALTO;
    verifyPlan(l, parts, KEY, DEFAULT_SCANMARKER, new ScanMarker(VAL, INCLUSIVE, "int"));
    // following leaf node plans should currently have true for 'has unsupported condition',
    // because of the condition is not on first key
    l.operator = Operator.EQUALS;
    verifyPlan(l, parts, OTHERKEY, DEFAULT_SCANMARKER, DEFAULT_SCANMARKER, false);
    // if tree is null, it should return equivalent of full scan, and true
    // for 'has unsupported condition'
    verifyPlan(null, parts, KEY, DEFAULT_SCANMARKER, DEFAULT_SCANMARKER, true);
}
Also used : ScanMarker(org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan.ScanMarker) LeafNode(org.apache.hadoop.hive.metastore.parser.ExpressionTree.LeafNode) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Aggregations

ScanMarker (org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan.ScanMarker)4 Test (org.junit.Test)4 ArrayList (java.util.ArrayList)2 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)2 LeafNode (org.apache.hadoop.hive.metastore.parser.ExpressionTree.LeafNode)2 MultiScanPlan (org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.MultiScanPlan)1 PlanResult (org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.PlanResult)1 ScanPlan (org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan)1 ExpressionTree (org.apache.hadoop.hive.metastore.parser.ExpressionTree)1 TreeNode (org.apache.hadoop.hive.metastore.parser.ExpressionTree.TreeNode)1