Search in sources :

Example 21 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class AccessControlLists method loadAll.

/**
   * Load all permissions from the region server holding {@code _acl_},
   * primarily intended for testing purposes.
   */
static Map<byte[], ListMultimap<String, TablePermission>> loadAll(Configuration conf) throws IOException {
    Map<byte[], ListMultimap<String, TablePermission>> allPerms = new TreeMap<>(Bytes.BYTES_RAWCOMPARATOR);
    // do a full scan of _acl_, filtering on only first table region rows
    Scan scan = new Scan();
    scan.addFamily(ACL_LIST_FAMILY);
    ResultScanner scanner = null;
    // TODO: Pass in a Connection rather than create one each time.
    try (Connection connection = ConnectionFactory.createConnection(conf)) {
        try (Table table = connection.getTable(ACL_TABLE_NAME)) {
            scanner = table.getScanner(scan);
            try {
                for (Result row : scanner) {
                    ListMultimap<String, TablePermission> resultPerms = parsePermissions(row.getRow(), row);
                    allPerms.put(row.getRow(), resultPerms);
                }
            } finally {
                if (scanner != null)
                    scanner.close();
            }
        }
    }
    return allPerms;
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Table(org.apache.hadoop.hbase.client.Table) Connection(org.apache.hadoop.hbase.client.Connection) Scan(org.apache.hadoop.hbase.client.Scan) TreeMap(java.util.TreeMap) ArrayListMultimap(com.google.common.collect.ArrayListMultimap) ListMultimap(com.google.common.collect.ListMultimap) Result(org.apache.hadoop.hbase.client.Result)

Example 22 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class HBaseTestingUtility method startMiniHBaseCluster.

/**
   * Starts up mini hbase cluster.  Usually used after call to
   * {@link #startMiniCluster(int, int)} when doing stepped startup of clusters.
   * Usually you won't want this.  You'll usually want {@link #startMiniCluster()}.
   * @param numMasters
   * @param numSlaves
   * @param create Whether to create a
   * root or data directory path or not; will overwrite if exists already.
   * @return Reference to the hbase mini hbase cluster.
   * @throws IOException
   * @throws InterruptedException
   * @see {@link #startMiniCluster()}
   */
public MiniHBaseCluster startMiniHBaseCluster(final int numMasters, final int numSlaves, Class<? extends HMaster> masterClass, Class<? extends MiniHBaseCluster.MiniHBaseClusterRegionServer> regionserverClass, boolean create, boolean withWALDir) throws IOException, InterruptedException {
    // Now do the mini hbase cluster.  Set the hbase.rootdir in config.
    createRootDir(create);
    if (withWALDir) {
        createWALRootDir();
    }
    // Set the hbase.fs.tmp.dir config to make sure that we have some default value. This is
    // for tests that do not read hbase-defaults.xml
    setHBaseFsTmpDir();
    // regions servers are connected.
    if (conf.getInt(ServerManager.WAIT_ON_REGIONSERVERS_MINTOSTART, -1) == -1) {
        conf.setInt(ServerManager.WAIT_ON_REGIONSERVERS_MINTOSTART, numSlaves);
    }
    if (conf.getInt(ServerManager.WAIT_ON_REGIONSERVERS_MAXTOSTART, -1) == -1) {
        conf.setInt(ServerManager.WAIT_ON_REGIONSERVERS_MAXTOSTART, numSlaves);
    }
    Configuration c = new Configuration(this.conf);
    this.hbaseCluster = new MiniHBaseCluster(c, numMasters, numSlaves, masterClass, regionserverClass);
    // Don't leave here till we've done a successful scan of the hbase:meta
    Table t = getConnection().getTable(TableName.META_TABLE_NAME);
    ResultScanner s = t.getScanner(new Scan());
    while (s.next() != null) {
        continue;
    }
    s.close();
    t.close();
    // create immediately the hbaseAdmin
    getAdmin();
    LOG.info("Minicluster is up");
    return (MiniHBaseCluster) this.hbaseCluster;
}
Also used : HTable(org.apache.hadoop.hbase.client.HTable) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Configuration(org.apache.hadoop.conf.Configuration) Scan(org.apache.hadoop.hbase.client.Scan)

Example 23 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class HBaseTestingUtility method deleteTableData.

//
// ==========================================================================
/**
   * Provide an existing table name to truncate.
   * Scans the table and issues a delete for each row read.
   * @param tableName existing table
   * @return HTable to that new table
   * @throws IOException
   */
public Table deleteTableData(TableName tableName) throws IOException {
    Table table = getConnection().getTable(tableName);
    Scan scan = new Scan();
    ResultScanner resScan = table.getScanner(scan);
    for (Result res : resScan) {
        Delete del = new Delete(res.getRow());
        table.delete(del);
    }
    resScan = table.getScanner(scan);
    resScan.close();
    return table;
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) HTable(org.apache.hadoop.hbase.client.HTable) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Example 24 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class HBaseTestingUtility method getClosestRowBefore.

public Result getClosestRowBefore(Region r, byte[] row, byte[] family) throws IOException {
    Scan scan = new Scan(row);
    scan.setSmall(true);
    scan.setCaching(1);
    scan.setReversed(true);
    scan.addFamily(family);
    try (RegionScanner scanner = r.getScanner(scan)) {
        List<Cell> cells = new ArrayList<>(1);
        scanner.next(cells);
        if (r.getRegionInfo().isMetaRegion() && !isTargetTable(row, cells.get(0))) {
            return null;
        }
        return Result.create(cells);
    }
}
Also used : RegionScanner(org.apache.hadoop.hbase.regionserver.RegionScanner) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan)

Example 25 with Scan

use of org.apache.hadoop.hbase.client.Scan in project hbase by apache.

the class HBaseTestingUtility method getMetaTableRows.

/**
   * Returns all rows from the hbase:meta table for a given user table
   *
   * @throws IOException When reading the rows fails.
   */
public List<byte[]> getMetaTableRows(TableName tableName) throws IOException {
    // TODO: Redo using MetaTableAccessor.
    Table t = getConnection().getTable(TableName.META_TABLE_NAME);
    List<byte[]> rows = new ArrayList<>();
    ResultScanner s = t.getScanner(new Scan());
    for (Result result : s) {
        HRegionInfo info = MetaTableAccessor.getHRegionInfo(result);
        if (info == null) {
            LOG.error("No region info for row " + Bytes.toString(result.getRow()));
            // TODO figure out what to do for this new hosed case.
            continue;
        }
        if (info.getTable().equals(tableName)) {
            LOG.info("getMetaTableRows: row -> " + Bytes.toStringBinary(result.getRow()) + info);
            rows.add(result.getRow());
        }
    }
    s.close();
    t.close();
    return rows;
}
Also used : HTable(org.apache.hadoop.hbase.client.HTable) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Aggregations

Scan (org.apache.hadoop.hbase.client.Scan)950 Test (org.junit.Test)495 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)302 Result (org.apache.hadoop.hbase.client.Result)286 Cell (org.apache.hadoop.hbase.Cell)258 ArrayList (java.util.ArrayList)238 Table (org.apache.hadoop.hbase.client.Table)178 Put (org.apache.hadoop.hbase.client.Put)161 BaseConnectionlessQueryTest (org.apache.phoenix.query.BaseConnectionlessQueryTest)153 IOException (java.io.IOException)135 TableName (org.apache.hadoop.hbase.TableName)98 Delete (org.apache.hadoop.hbase.client.Delete)95 Filter (org.apache.hadoop.hbase.filter.Filter)95 KeyValue (org.apache.hadoop.hbase.KeyValue)84 Connection (org.apache.hadoop.hbase.client.Connection)81 SkipScanFilter (org.apache.phoenix.filter.SkipScanFilter)78 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)78 RowKeyComparisonFilter (org.apache.phoenix.filter.RowKeyComparisonFilter)72 Configuration (org.apache.hadoop.conf.Configuration)51 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)51