Search in sources :

Example 46 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.

the class TestGet method testDynamicFilter.

@Test
public void testDynamicFilter() throws Exception {
    Configuration conf = HBaseConfiguration.create();
    String localPath = conf.get("hbase.local.dir") + File.separator + "jars" + File.separator;
    File jarFile = new File(localPath, "MockFilter.jar");
    jarFile.delete();
    assertFalse("Should be deleted: " + jarFile.getPath(), jarFile.exists());
    ClientProtos.Get getProto1 = ClientProtos.Get.parseFrom(Base64.decode(PB_GET));
    ClientProtos.Get getProto2 = ClientProtos.Get.parseFrom(Base64.decode(PB_GET_WITH_FILTER_LIST));
    try {
        ProtobufUtil.toGet(getProto1);
        fail("Should not be able to load the filter class");
    } catch (IOException ioe) {
        assertTrue(ioe.getCause() instanceof ClassNotFoundException);
    }
    try {
        ProtobufUtil.toGet(getProto2);
        fail("Should not be able to load the filter class");
    } catch (IOException ioe) {
        assertTrue(ioe.getCause() instanceof InvocationTargetException);
        InvocationTargetException ite = (InvocationTargetException) ioe.getCause();
        assertTrue(ite.getTargetException() instanceof DeserializationException);
    }
    FileOutputStream fos = new FileOutputStream(jarFile);
    fos.write(Base64.decode(MOCK_FILTER_JAR));
    fos.close();
    Get get1 = ProtobufUtil.toGet(getProto1);
    assertEquals("test.MockFilter", get1.getFilter().getClass().getName());
    Get get2 = ProtobufUtil.toGet(getProto2);
    assertTrue(get2.getFilter() instanceof FilterList);
    List<Filter> filters = ((FilterList) get2.getFilter()).getFilters();
    assertEquals(3, filters.size());
    assertEquals("test.MockFilter", filters.get(0).getClass().getName());
    assertEquals("my.MockFilter", filters.get(1).getClass().getName());
    assertTrue(filters.get(2) instanceof KeyOnlyFilter);
}
Also used : KeyOnlyFilter(org.apache.hadoop.hbase.filter.KeyOnlyFilter) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) FilterList(org.apache.hadoop.hbase.filter.FilterList) IOException(java.io.IOException) InvocationTargetException(java.lang.reflect.InvocationTargetException) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException) Filter(org.apache.hadoop.hbase.filter.Filter) KeyOnlyFilter(org.apache.hadoop.hbase.filter.KeyOnlyFilter) FileOutputStream(java.io.FileOutputStream) File(java.io.File) ClientProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos) Test(org.junit.Test)

Example 47 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.

the class TableResource method getScanResource.

@Path("{scanspec: .*[*]$}")
public TableScanResource getScanResource(@Context final UriInfo uriInfo, @PathParam("scanspec") final String scanSpec, @HeaderParam("Accept") final String contentType, @DefaultValue(Integer.MAX_VALUE + "") @QueryParam(Constants.SCAN_LIMIT) int userRequestedLimit, @DefaultValue("") @QueryParam(Constants.SCAN_START_ROW) String startRow, @DefaultValue("") @QueryParam(Constants.SCAN_END_ROW) String endRow, @DefaultValue("") @QueryParam(Constants.SCAN_COLUMN) List<String> column, @DefaultValue("1") @QueryParam(Constants.SCAN_MAX_VERSIONS) int maxVersions, @DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize, @DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime, @DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime, @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks, @DefaultValue("false") @QueryParam(Constants.SCAN_REVERSED) boolean reversed, @DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String filters) {
    try {
        Filter filter = null;
        Scan tableScan = new Scan();
        if (scanSpec.indexOf('*') > 0) {
            String prefix = scanSpec.substring(0, scanSpec.indexOf('*'));
            byte[] prefixBytes = Bytes.toBytes(prefix);
            filter = new PrefixFilter(Bytes.toBytes(prefix));
            if (startRow.isEmpty()) {
                tableScan.setStartRow(prefixBytes);
            }
        }
        if (LOG.isTraceEnabled()) {
            LOG.trace("Query parameters  : Table Name = > " + this.table + " Start Row => " + startRow + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => " + maxVersions + " Batch Size => " + batchSize);
        }
        Table hTable = RESTServlet.getInstance().getTable(this.table);
        tableScan.setBatch(batchSize);
        tableScan.setMaxVersions(maxVersions);
        tableScan.setTimeRange(startTime, endTime);
        if (!startRow.isEmpty()) {
            tableScan.setStartRow(Bytes.toBytes(startRow));
        }
        tableScan.setStopRow(Bytes.toBytes(endRow));
        for (String csplit : column) {
            String[] familysplit = csplit.trim().split(":");
            if (familysplit.length == 2) {
                if (familysplit[1].length() > 0) {
                    if (LOG.isTraceEnabled()) {
                        LOG.trace("Scan family and column : " + familysplit[0] + "  " + familysplit[1]);
                    }
                    tableScan.addColumn(Bytes.toBytes(familysplit[0]), Bytes.toBytes(familysplit[1]));
                } else {
                    tableScan.addFamily(Bytes.toBytes(familysplit[0]));
                    if (LOG.isTraceEnabled()) {
                        LOG.trace("Scan family : " + familysplit[0] + " and empty qualifier.");
                    }
                    tableScan.addColumn(Bytes.toBytes(familysplit[0]), null);
                }
            } else if (StringUtils.isNotEmpty(familysplit[0])) {
                if (LOG.isTraceEnabled()) {
                    LOG.trace("Scan family : " + familysplit[0]);
                }
                tableScan.addFamily(Bytes.toBytes(familysplit[0]));
            }
        }
        FilterList filterList = null;
        if (StringUtils.isNotEmpty(filters)) {
            ParseFilter pf = new ParseFilter();
            Filter filterParam = pf.parseFilterString(filters);
            if (filter != null) {
                filterList = new FilterList(filter, filterParam);
            } else {
                filter = filterParam;
            }
        }
        if (filterList != null) {
            tableScan.setFilter(filterList);
        } else if (filter != null) {
            tableScan.setFilter(filter);
        }
        int fetchSize = this.servlet.getConfiguration().getInt(Constants.SCAN_FETCH_SIZE, 10);
        tableScan.setCaching(fetchSize);
        tableScan.setReversed(reversed);
        return new TableScanResource(hTable.getScanner(tableScan), userRequestedLimit);
    } catch (IOException exp) {
        servlet.getMetrics().incrementFailedScanRequests(1);
        processException(exp);
        LOG.warn(exp);
        return null;
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) Filter(org.apache.hadoop.hbase.filter.Filter) Scan(org.apache.hadoop.hbase.client.Scan) FilterList(org.apache.hadoop.hbase.filter.FilterList) IOException(java.io.IOException) Path(javax.ws.rs.Path)

Example 48 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.

the class VerifyReplication method setRowPrefixFilter.

private static void setRowPrefixFilter(Scan scan, String rowPrefixes) {
    if (rowPrefixes != null && !rowPrefixes.isEmpty()) {
        String[] rowPrefixArray = rowPrefixes.split(",");
        Arrays.sort(rowPrefixArray);
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
        for (String prefix : rowPrefixArray) {
            Filter filter = new PrefixFilter(Bytes.toBytes(prefix));
            filterList.addFilter(filter);
        }
        scan.setFilter(filterList);
        byte[] startPrefixRow = Bytes.toBytes(rowPrefixArray[0]);
        byte[] lastPrefixRow = Bytes.toBytes(rowPrefixArray[rowPrefixArray.length - 1]);
        setStartAndStopRows(scan, startPrefixRow, lastPrefixRow);
    }
}
Also used : PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) Filter(org.apache.hadoop.hbase.filter.Filter) FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 49 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.

the class VisibilityController method preScannerOpen.

@Override
public RegionScanner preScannerOpen(ObserverContext<RegionCoprocessorEnvironment> e, Scan scan, RegionScanner s) throws IOException {
    if (!initialized) {
        throw new VisibilityControllerNotReadyException("VisibilityController not yet initialized!");
    }
    // Nothing to do if authorization is not enabled
    if (!authorizationEnabled) {
        return s;
    }
    Region region = e.getEnvironment().getRegion();
    Authorizations authorizations = null;
    try {
        authorizations = scan.getAuthorizations();
    } catch (DeserializationException de) {
        throw new IOException(de);
    }
    if (authorizations == null) {
        // No Authorizations present for this scan/Get!
        // In case of system tables other than "labels" just scan with out visibility check and
        // filtering. Checking visibility labels for META and NAMESPACE table is not needed.
        TableName table = region.getRegionInfo().getTable();
        if (table.isSystemTable() && !table.equals(LABELS_TABLE_NAME)) {
            return s;
        }
    }
    Filter visibilityLabelFilter = VisibilityUtils.createVisibilityLabelFilter(region, authorizations);
    if (visibilityLabelFilter != null) {
        Filter filter = scan.getFilter();
        if (filter != null) {
            scan.setFilter(new FilterList(filter, visibilityLabelFilter));
        } else {
            scan.setFilter(visibilityLabelFilter);
        }
    }
    return s;
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Filter(org.apache.hadoop.hbase.filter.Filter) Region(org.apache.hadoop.hbase.regionserver.Region) FilterList(org.apache.hadoop.hbase.filter.FilterList) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException)

Example 50 with FilterList

use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.

the class AccessController method internalPreRead.

private void internalPreRead(final ObserverContext<RegionCoprocessorEnvironment> c, final Query query, OpType opType) throws IOException {
    Filter filter = query.getFilter();
    // Don't wrap an AccessControlFilter
    if (filter != null && filter instanceof AccessControlFilter) {
        return;
    }
    User user = getActiveUser(c);
    RegionCoprocessorEnvironment env = c.getEnvironment();
    Map<byte[], ? extends Collection<byte[]>> families = null;
    switch(opType) {
        case GET:
        case EXISTS:
            families = ((Get) query).getFamilyMap();
            break;
        case SCAN:
            families = ((Scan) query).getFamilyMap();
            break;
        default:
            throw new RuntimeException("Unhandled operation " + opType);
    }
    AuthResult authResult = permissionGranted(opType, user, env, families, Action.READ);
    Region region = getRegion(env);
    TableName table = getTableName(region);
    Map<ByteRange, Integer> cfVsMaxVersions = Maps.newHashMap();
    for (HColumnDescriptor hcd : region.getTableDesc().getFamilies()) {
        cfVsMaxVersions.put(new SimpleMutableByteRange(hcd.getName()), hcd.getMaxVersions());
    }
    if (!authResult.isAllowed()) {
        if (!cellFeaturesEnabled || compatibleEarlyTermination) {
            // filter) but that's the price of backwards compatibility.
            if (hasFamilyQualifierPermission(user, Action.READ, env, families)) {
                authResult.setAllowed(true);
                authResult.setReason("Access allowed with filter");
                // Only wrap the filter if we are enforcing authorizations
                if (authorizationEnabled) {
                    Filter ourFilter = new AccessControlFilter(authManager, user, table, AccessControlFilter.Strategy.CHECK_TABLE_AND_CF_ONLY, cfVsMaxVersions);
                    // wrap any existing filter
                    if (filter != null) {
                        ourFilter = new FilterList(FilterList.Operator.MUST_PASS_ALL, Lists.newArrayList(ourFilter, filter));
                    }
                    switch(opType) {
                        case GET:
                        case EXISTS:
                            ((Get) query).setFilter(ourFilter);
                            break;
                        case SCAN:
                            ((Scan) query).setFilter(ourFilter);
                            break;
                        default:
                            throw new RuntimeException("Unhandled operation " + opType);
                    }
                }
            }
        } else {
            // New behavior: Any access we might be granted is more fine-grained
            // than whole table or CF. Simply inject a filter and return what is
            // allowed. We will not throw an AccessDeniedException. This is a
            // behavioral change since 0.96.
            authResult.setAllowed(true);
            authResult.setReason("Access allowed with filter");
            // Only wrap the filter if we are enforcing authorizations
            if (authorizationEnabled) {
                Filter ourFilter = new AccessControlFilter(authManager, user, table, AccessControlFilter.Strategy.CHECK_CELL_DEFAULT, cfVsMaxVersions);
                // wrap any existing filter
                if (filter != null) {
                    ourFilter = new FilterList(FilterList.Operator.MUST_PASS_ALL, Lists.newArrayList(ourFilter, filter));
                }
                switch(opType) {
                    case GET:
                    case EXISTS:
                        ((Get) query).setFilter(ourFilter);
                        break;
                    case SCAN:
                        ((Scan) query).setFilter(ourFilter);
                        break;
                    default:
                        throw new RuntimeException("Unhandled operation " + opType);
                }
            }
        }
    }
    logResult(authResult);
    if (authorizationEnabled && !authResult.isAllowed()) {
        throw new AccessDeniedException("Insufficient permissions for user '" + (user != null ? user.getShortName() : "null") + "' (table=" + table + ", action=READ)");
    }
}
Also used : AccessDeniedException(org.apache.hadoop.hbase.security.AccessDeniedException) User(org.apache.hadoop.hbase.security.User) ByteRange(org.apache.hadoop.hbase.util.ByteRange) SimpleMutableByteRange(org.apache.hadoop.hbase.util.SimpleMutableByteRange) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) FilterList(org.apache.hadoop.hbase.filter.FilterList) SimpleMutableByteRange(org.apache.hadoop.hbase.util.SimpleMutableByteRange) TableName(org.apache.hadoop.hbase.TableName) RegionCoprocessorEnvironment(org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment) CompareFilter(org.apache.hadoop.hbase.filter.CompareFilter) Filter(org.apache.hadoop.hbase.filter.Filter) Get(org.apache.hadoop.hbase.client.Get) Region(org.apache.hadoop.hbase.regionserver.Region) Scan(org.apache.hadoop.hbase.client.Scan)

Aggregations

FilterList (org.apache.hadoop.hbase.filter.FilterList)68 Filter (org.apache.hadoop.hbase.filter.Filter)36 Scan (org.apache.hadoop.hbase.client.Scan)16 QualifierFilter (org.apache.hadoop.hbase.filter.QualifierFilter)10 SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)10 TimelineFilterList (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)10 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)9 Test (org.junit.Test)8 ConsumerConfig (co.cask.cdap.data2.queue.ConsumerConfig)7 ArrayList (java.util.ArrayList)7 FamilyFilter (org.apache.hadoop.hbase.filter.FamilyFilter)7 Transaction (org.apache.tephra.Transaction)7 IOException (java.io.IOException)6 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)6 Result (org.apache.hadoop.hbase.client.Result)5 PageFilter (org.apache.hadoop.hbase.filter.PageFilter)5 Cell (org.apache.hadoop.hbase.Cell)4 TableName (org.apache.hadoop.hbase.TableName)4 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)4 FirstKeyOnlyFilter (org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter)4