Search in sources :

Example 1 with GenericUDFOPEqualOrLessThan

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan in project hive by apache.

the class TestAccumuloPredicateHandler method rangeLessThanOrEqual.

@Test
public void rangeLessThanOrEqual() throws SerDeException {
    ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
    ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
    List<ExprNodeDesc> children = Lists.newArrayList();
    children.add(column);
    children.add(constant);
    ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children);
    assertNotNull(node);
    String filterExpr = SerializationUtilities.serializeExpression(node);
    conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
    Collection<Range> ranges = handler.getRanges(conf, columnMapper);
    assertEquals(ranges.size(), 1);
    Range range = ranges.iterator().next();
    assertTrue(range.isStartKeyInclusive());
    assertFalse(range.isEndKeyInclusive());
    assertTrue(range.contains(new Key(new Text("aaa"))));
    assertTrue(range.afterEndKey(new Key(new Text("ccccc"))));
    assertTrue(range.contains(new Key(new Text("aa"))));
    assertTrue(range.afterEndKey(new Key(new Text("aab"))));
    assertFalse(range.afterEndKey(new Key(new Text("aaa"))));
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) GenericUDFOPEqualOrLessThan(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) Text(org.apache.hadoop.io.Text) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) Range(org.apache.accumulo.core.data.Range) Key(org.apache.accumulo.core.data.Key) Test(org.junit.Test)

Example 2 with GenericUDFOPEqualOrLessThan

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan in project hive by apache.

the class TestAccumuloPredicateHandler method testIteratorIgnoreRowIDFields.

@Test
public void testIteratorIgnoreRowIDFields() {
    ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
    ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
    List<ExprNodeDesc> children = Lists.newArrayList();
    children.add(column);
    children.add(constant);
    ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children);
    assertNotNull(node);
    ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
    ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "bbb");
    List<ExprNodeDesc> children2 = Lists.newArrayList();
    children2.add(column2);
    children2.add(constant2);
    ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2);
    assertNotNull(node2);
    List<ExprNodeDesc> bothFilters = Lists.newArrayList();
    bothFilters.add(node);
    bothFilters.add(node2);
    ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters);
    String filterExpr = SerializationUtilities.serializeExpression(both);
    conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
    try {
        List<IteratorSetting> iterators = handler.getIterators(conf, columnMapper);
        assertEquals(iterators.size(), 0);
    } catch (SerDeException e) {
        StringUtils.stringifyException(e);
    }
}
Also used : GenericUDFOPGreaterThan(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) GenericUDFOPEqualOrLessThan(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan) IteratorSetting(org.apache.accumulo.core.client.IteratorSetting) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) GenericUDFOPAnd(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd) Test(org.junit.Test)

Example 3 with GenericUDFOPEqualOrLessThan

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan in project hive by apache.

the class TestAccumuloPredicateHandler method testRowRangeIntersection.

@Test
public void testRowRangeIntersection() throws SerDeException {
    // rowId >= 'f'
    ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
    ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "f");
    List<ExprNodeDesc> children = Lists.newArrayList();
    children.add(column);
    children.add(constant);
    ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrGreaterThan(), children);
    assertNotNull(node);
    // rowId <= 'm'
    ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
    ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "m");
    List<ExprNodeDesc> children2 = Lists.newArrayList();
    children2.add(column2);
    children2.add(constant2);
    ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children2);
    assertNotNull(node2);
    List<ExprNodeDesc> bothFilters = Lists.newArrayList();
    bothFilters.add(node);
    bothFilters.add(node2);
    ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters);
    String filterExpr = SerializationUtilities.serializeExpression(both);
    conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
    // Should make ['f', 'm\0')
    List<Range> ranges = handler.getRanges(conf, columnMapper);
    assertEquals(1, ranges.size());
    assertEquals(new Range(new Key("f"), true, new Key("m\0"), false), ranges.get(0));
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) GenericUDFOPEqualOrLessThan(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) Range(org.apache.accumulo.core.data.Range) GenericUDFOPEqualOrGreaterThan(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan) Key(org.apache.accumulo.core.data.Key) GenericUDFOPAnd(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd) Test(org.junit.Test)

Example 4 with GenericUDFOPEqualOrLessThan

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan in project hive by apache.

the class TestAccumuloPredicateHandler method testIgnoreIteratorPushdown.

@Test
public void testIgnoreIteratorPushdown() throws TooManyAccumuloColumnsException {
    // Override what's placed in the Configuration by setup()
    conf = new JobConf();
    List<String> columnNames = Arrays.asList("field1", "field2", "rid");
    List<TypeInfo> columnTypes = Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo);
    conf.set(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columnNames));
    conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,int,string");
    String columnMappingStr = "cf:f1,cf:f2,:rowID";
    conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, columnMappingStr);
    columnMapper = new ColumnMapper(columnMappingStr, ColumnEncoding.STRING.getName(), columnNames, columnTypes);
    ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "field1", null, false);
    ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
    List<ExprNodeDesc> children = Lists.newArrayList();
    children.add(column);
    children.add(constant);
    ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children);
    assertNotNull(node);
    ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "field2", null, false);
    ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 5);
    List<ExprNodeDesc> children2 = Lists.newArrayList();
    children2.add(column2);
    children2.add(constant2);
    ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2);
    assertNotNull(node2);
    List<ExprNodeDesc> bothFilters = Lists.newArrayList();
    bothFilters.add(node);
    bothFilters.add(node2);
    ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters);
    String filterExpr = SerializationUtilities.serializeExpression(both);
    conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
    conf.setBoolean(AccumuloSerDeParameters.ITERATOR_PUSHDOWN_KEY, false);
    try {
        List<IteratorSetting> iterators = handler.getIterators(conf, columnMapper);
        assertEquals(iterators.size(), 0);
    } catch (Exception e) {
        fail(StringUtils.stringifyException(e));
    }
}
Also used : GenericUDFOPGreaterThan(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) GenericUDFOPEqualOrLessThan(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) TooManyAccumuloColumnsException(org.apache.hadoop.hive.accumulo.serde.TooManyAccumuloColumnsException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) IteratorSetting(org.apache.accumulo.core.client.IteratorSetting) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) JobConf(org.apache.hadoop.mapred.JobConf) ColumnMapper(org.apache.hadoop.hive.accumulo.columns.ColumnMapper) GenericUDFOPAnd(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd) Test(org.junit.Test)

Example 5 with GenericUDFOPEqualOrLessThan

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan in project hive by apache.

the class TestOrcSplitElimination method testFooterExternalCacheImpl.

private void testFooterExternalCacheImpl(boolean isPpd) throws IOException {
    ObjectInspector inspector = createIO();
    writeFile(inspector, testFilePath);
    writeFile(inspector, testFilePath2);
    GenericUDF udf = new GenericUDFOPEqualOrLessThan();
    List<ExprNodeDesc> childExpr = Lists.newArrayList();
    createTestSarg(inspector, udf, childExpr);
    setupExternalCacheConfig(isPpd, testFilePath + "," + testFilePath2);
    // Get the base values w/o cache.
    conf.setBoolean(ConfVars.HIVE_ORC_MS_FOOTER_CACHE_ENABLED.varname, false);
    OrcInputFormatForTest.clearLocalCache();
    OrcInputFormat in0 = new OrcInputFormat();
    InputSplit[] originals = in0.getSplits(conf, -1);
    assertEquals(10, originals.length);
    HashSet<FsWithHash> originalHs = new HashSet<>();
    for (InputSplit original : originals) {
        originalHs.add(new FsWithHash((FileSplit) original));
    }
    // Populate the cache.
    conf.setBoolean(ConfVars.HIVE_ORC_MS_FOOTER_CACHE_ENABLED.varname, true);
    OrcInputFormatForTest in = new OrcInputFormatForTest();
    OrcInputFormatForTest.clearLocalCache();
    OrcInputFormatForTest.caches.resetCounts();
    OrcInputFormatForTest.caches.cache.clear();
    InputSplit[] splits = in.getSplits(conf, -1);
    // Puts, gets, hits, unused, unused.
    @SuppressWarnings("static-access") AtomicInteger[] counts = { in.caches.putCount, isPpd ? in.caches.getByExprCount : in.caches.getCount, isPpd ? in.caches.getHitByExprCount : in.caches.getHitCount, isPpd ? in.caches.getCount : in.caches.getByExprCount, isPpd ? in.caches.getHitCount : in.caches.getHitByExprCount };
    verifySplits(originalHs, splits);
    verifyCallCounts(counts, 2, 2, 0);
    assertEquals(2, OrcInputFormatForTest.caches.cache.size());
    // Verify we can get from cache.
    OrcInputFormatForTest.clearLocalCache();
    OrcInputFormatForTest.caches.resetCounts();
    splits = in.getSplits(conf, -1);
    verifySplits(originalHs, splits);
    verifyCallCounts(counts, 0, 2, 2);
    // Verify ORC SARG still works.
    OrcInputFormatForTest.clearLocalCache();
    OrcInputFormatForTest.caches.resetCounts();
    childExpr.set(1, new ExprNodeConstantDesc(5));
    conf.set("hive.io.filter.expr.serialized", SerializationUtilities.serializeExpression(new ExprNodeGenericFuncDesc(inspector, udf, childExpr)));
    splits = in.getSplits(conf, -1);
    InputSplit[] filtered = { originals[0], originals[4], originals[5], originals[9] };
    originalHs = new HashSet<>();
    for (InputSplit original : filtered) {
        originalHs.add(new FsWithHash((FileSplit) original));
    }
    verifySplits(originalHs, splits);
    verifyCallCounts(counts, 0, 2, 2);
    // Verify corrupted cache value gets replaced.
    OrcInputFormatForTest.clearLocalCache();
    OrcInputFormatForTest.caches.resetCounts();
    Map.Entry<Long, MockExternalCaches.MockItem> e = OrcInputFormatForTest.caches.cache.entrySet().iterator().next();
    Long key = e.getKey();
    byte[] someData = new byte[8];
    ByteBuffer toCorrupt = e.getValue().data;
    System.arraycopy(toCorrupt.array(), toCorrupt.arrayOffset(), someData, 0, someData.length);
    toCorrupt.putLong(0, 0L);
    splits = in.getSplits(conf, -1);
    verifySplits(originalHs, splits);
    if (!isPpd) {
        // Recovery is not implemented yet for PPD path.
        ByteBuffer restored = OrcInputFormatForTest.caches.cache.get(key).data;
        byte[] newData = new byte[someData.length];
        System.arraycopy(restored.array(), restored.arrayOffset(), newData, 0, newData.length);
        assertArrayEquals(someData, newData);
    }
}
Also used : GenericUDFOPEqualOrLessThan(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan) FileSplit(org.apache.hadoop.mapred.FileSplit) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) InputSplit(org.apache.hadoop.mapred.InputSplit) HashSet(java.util.HashSet) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) ByteBuffer(java.nio.ByteBuffer) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap)

Aggregations

ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)17 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)17 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)17 GenericUDFOPEqualOrLessThan (org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan)17 Test (org.junit.Test)16 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)14 GenericUDFOPAnd (org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd)12 Range (org.apache.accumulo.core.data.Range)9 GenericUDFOPEqualOrGreaterThan (org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan)9 HashMap (java.util.HashMap)8 ArrayList (java.util.ArrayList)7 DefaultGraphWalker (org.apache.hadoop.hive.ql.lib.DefaultGraphWalker)7 DefaultRuleDispatcher (org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher)7 Dispatcher (org.apache.hadoop.hive.ql.lib.Dispatcher)7 GraphWalker (org.apache.hadoop.hive.ql.lib.GraphWalker)7 Node (org.apache.hadoop.hive.ql.lib.Node)7 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)7 List (java.util.List)6 Key (org.apache.accumulo.core.data.Key)5 GenericUDF (org.apache.hadoop.hive.ql.udf.generic.GenericUDF)4