Search in sources :

Example 1 with HConnection

use of org.apache.hadoop.hbase.client.HConnection in project phoenix by apache.

the class PhoenixInputFormat method generateSplits.

private List<InputSplit> generateSplits(final JobConf jobConf, final QueryPlan qplan, final List<KeyRange> splits, String query) throws IOException {
    Preconditions.checkNotNull(qplan);
    Preconditions.checkNotNull(splits);
    final List<InputSplit> psplits = Lists.newArrayListWithExpectedSize(splits.size());
    Path[] tablePaths = FileInputFormat.getInputPaths(ShimLoader.getHadoopShims().newJobContext(new Job(jobConf)));
    boolean splitByStats = jobConf.getBoolean(PhoenixStorageHandlerConstants.SPLIT_BY_STATS, false);
    setScanCacheSize(jobConf);
    // Adding Localization
    HConnection connection = HConnectionManager.createConnection(PhoenixConnectionUtil.getConfiguration(jobConf));
    RegionLocator regionLocator = connection.getRegionLocator(TableName.valueOf(qplan.getTableRef().getTable().getPhysicalName().toString()));
    RegionSizeCalculator sizeCalculator = new RegionSizeCalculator(regionLocator, connection.getAdmin());
    for (List<Scan> scans : qplan.getScans()) {
        PhoenixInputSplit inputSplit;
        HRegionLocation location = regionLocator.getRegionLocation(scans.get(0).getStartRow(), false);
        long regionSize = sizeCalculator.getRegionSize(location.getRegionInfo().getRegionName());
        String regionLocation = PhoenixStorageHandlerUtil.getRegionLocation(location, LOG);
        if (splitByStats) {
            for (Scan aScan : scans) {
                if (LOG.isDebugEnabled()) {
                    LOG.debug("Split for  scan : " + aScan + "with scanAttribute : " + aScan.getAttributesMap() + " [scanCache, cacheBlock, scanBatch] : [" + aScan.getCaching() + ", " + aScan.getCacheBlocks() + ", " + aScan.getBatch() + "] and  regionLocation : " + regionLocation);
                }
                inputSplit = new PhoenixInputSplit(Lists.newArrayList(aScan), tablePaths[0], regionLocation, regionSize);
                inputSplit.setQuery(query);
                psplits.add(inputSplit);
            }
        } else {
            if (LOG.isDebugEnabled()) {
                LOG.debug("Scan count[" + scans.size() + "] : " + Bytes.toStringBinary(scans.get(0).getStartRow()) + " ~ " + Bytes.toStringBinary(scans.get(scans.size() - 1).getStopRow()));
                LOG.debug("First scan : " + scans.get(0) + "with scanAttribute : " + scans.get(0).getAttributesMap() + " [scanCache, cacheBlock, scanBatch] : " + "[" + scans.get(0).getCaching() + ", " + scans.get(0).getCacheBlocks() + ", " + scans.get(0).getBatch() + "] and  regionLocation : " + regionLocation);
                for (int i = 0, limit = scans.size(); i < limit; i++) {
                    LOG.debug("EXPECTED_UPPER_REGION_KEY[" + i + "] : " + Bytes.toStringBinary(scans.get(i).getAttribute(BaseScannerRegionObserver.EXPECTED_UPPER_REGION_KEY)));
                }
            }
            inputSplit = new PhoenixInputSplit(scans, tablePaths[0], regionLocation, regionSize);
            inputSplit.setQuery(query);
            psplits.add(inputSplit);
        }
    }
    return psplits;
}
Also used : Path(org.apache.hadoop.fs.Path) RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) RegionSizeCalculator(org.apache.hadoop.hbase.util.RegionSizeCalculator) HConnection(org.apache.hadoop.hbase.client.HConnection) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) Scan(org.apache.hadoop.hbase.client.Scan) Job(org.apache.hadoop.mapreduce.Job) InputSplit(org.apache.hadoop.mapred.InputSplit)

Example 2 with HConnection

use of org.apache.hadoop.hbase.client.HConnection in project cdap by caskdata.

the class HBaseCheck method run.

@Override
public void run() {
    LOG.info("Checking HBase version.");
    HBaseTableUtil hBaseTableUtil;
    try {
        hBaseTableUtil = new HBaseTableUtilFactory(cConf).get();
    } catch (ProvisionException e) {
        throw new RuntimeException("Unsupported Hbase version " + HBaseVersion.getVersionString());
    }
    LOG.info("  HBase version successfully verified.");
    LOG.info("Checking HBase availability.");
    try (HConnection hbaseConnection = HConnectionManager.createConnection(hConf)) {
        hbaseConnection.listTables();
        LOG.info("  HBase availability successfully verified.");
    } catch (IOException e) {
        throw new RuntimeException("Unable to connect to HBase. " + "Please check that HBase is running and that the correct HBase configuration (hbase-site.xml) " + "and libraries are included in the CDAP master classpath.", e);
    }
    if (hConf.getBoolean("hbase.security.authorization", false)) {
        if (cConf.getBoolean(TxConstants.TransactionPruning.PRUNE_ENABLE)) {
            LOG.info("HBase authorization and transaction pruning are enabled. Checking global admin privileges for cdap.");
            try {
                boolean isGlobalAdmin = hBaseTableUtil.isGlobalAdmin(hConf);
                LOG.info("Global admin privileges check status: {}", isGlobalAdmin);
                if (isGlobalAdmin) {
                    return;
                }
                // if global admin was false then depend on the TX_PRUNE_ACL_CHECK value
                if (cConf.getBoolean(Constants.Startup.TX_PRUNE_ACL_CHECK, false)) {
                    LOG.info("Found {} to be set to true. Continuing with cdap master startup even though global admin check " + "returned false", Constants.Startup.TX_PRUNE_ACL_CHECK);
                    return;
                }
                StringBuilder builder = new StringBuilder("Transaction pruning is enabled and cdap does not have global " + "admin privileges in HBase. Global admin privileges for cdap " + "are required for transaction pruning. " + "Either disable transaction pruning or grant global admin " + "privilege to cdap in HBase or can override this " + "check by setting ");
                builder.append(Constants.Startup.TX_PRUNE_ACL_CHECK);
                builder.append(" in cdap-site.xml.");
                if (HBaseVersion.get().equals(HBaseVersion.Version.HBASE_96) || HBaseVersion.get().equals(HBaseVersion.Version.HBASE_98)) {
                    builder.append(" Detected HBase version ");
                    builder.append(HBaseVersion.get());
                    builder.append(" CDAP will not be able determine if it has global admin privilege in HBase.");
                    builder.append(" After granting global admin privilege please set ");
                    builder.append(Constants.Startup.TX_PRUNE_ACL_CHECK);
                }
                throw new RuntimeException(builder.toString());
            } catch (IOException e) {
                throw new RuntimeException("Unable to determines cdap privileges as global admin in HBase.");
            }
        }
    }
    LOG.info("Hbase authorization is disabled. Skipping global admin check for transaction pruning.");
}
Also used : ProvisionException(com.google.inject.ProvisionException) HBaseTableUtilFactory(co.cask.cdap.data2.util.hbase.HBaseTableUtilFactory) IOException(java.io.IOException) HBaseTableUtil(co.cask.cdap.data2.util.hbase.HBaseTableUtil) HConnection(org.apache.hadoop.hbase.client.HConnection)

Example 3 with HConnection

use of org.apache.hadoop.hbase.client.HConnection in project hive by apache.

the class HBaseStorageHandler method addHBaseDelegationToken.

private void addHBaseDelegationToken(Configuration conf) throws IOException {
    if (User.isHBaseSecurityEnabled(conf)) {
        HConnection conn = HConnectionManager.createConnection(conf);
        try {
            User curUser = User.getCurrent();
            Job job = new Job(conf);
            TokenUtil.addTokenForJob(conn, curUser, job);
        } catch (InterruptedException e) {
            throw new IOException("Error while obtaining hbase delegation token", e);
        } finally {
            conn.close();
        }
    }
}
Also used : User(org.apache.hadoop.hbase.security.User) IOException(java.io.IOException) Job(org.apache.hadoop.mapreduce.Job) HConnection(org.apache.hadoop.hbase.client.HConnection)

Example 4 with HConnection

use of org.apache.hadoop.hbase.client.HConnection in project Cloud9 by lintool.

the class HBaseWordCountFetch method run.

/**
   * Runs this tool.
   */
@SuppressWarnings({ "static-access" })
public int run(String[] args) throws Exception {
    Options options = new Options();
    options.addOption(OptionBuilder.withArgName("table").hasArg().withDescription("HBase table name").create(TABLE));
    options.addOption(OptionBuilder.withArgName("word").hasArg().withDescription("word to look up").create(WORD));
    CommandLine cmdline;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        return -1;
    }
    if (!cmdline.hasOption(TABLE) || !cmdline.hasOption(WORD)) {
        System.out.println("args: " + Arrays.toString(args));
        HelpFormatter formatter = new HelpFormatter();
        formatter.setWidth(120);
        formatter.printHelp(this.getClass().getName(), options);
        ToolRunner.printGenericCommandUsage(System.out);
        return -1;
    }
    String tableName = cmdline.getOptionValue(TABLE);
    String word = cmdline.getOptionValue(WORD);
    Configuration conf = getConf();
    conf.addResource(new Path("/etc/hbase/conf/hbase-site.xml"));
    Configuration hbaseConfig = HBaseConfiguration.create(conf);
    HConnection hbaseConnection = HConnectionManager.createConnection(hbaseConfig);
    HTableInterface table = hbaseConnection.getTable(tableName);
    Get get = new Get(Bytes.toBytes(word));
    Result result = table.get(get);
    int count = Bytes.toInt(result.getValue(HBaseWordCount.CF, HBaseWordCount.COUNT));
    LOG.info("word: " + word + ", count: " + count);
    return 0;
}
Also used : Path(org.apache.hadoop.fs.Path) Options(org.apache.commons.cli.Options) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) Configuration(org.apache.hadoop.conf.Configuration) GnuParser(org.apache.commons.cli.GnuParser) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) HConnection(org.apache.hadoop.hbase.client.HConnection) Result(org.apache.hadoop.hbase.client.Result) HelpFormatter(org.apache.commons.cli.HelpFormatter) CommandLine(org.apache.commons.cli.CommandLine) Get(org.apache.hadoop.hbase.client.Get) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException)

Example 5 with HConnection

use of org.apache.hadoop.hbase.client.HConnection in project titan by thinkaurelius.

the class HBaseStorageSetup method waitForConnection.

public static synchronized void waitForConnection(long timeout, TimeUnit timeoutUnit) {
    long before = System.currentTimeMillis();
    long after;
    long timeoutMS = TimeUnit.MILLISECONDS.convert(timeout, timeoutUnit);
    do {
        try {
            HConnection hc = HConnectionManager.createConnection(HBaseConfiguration.create());
            hc.close();
            after = System.currentTimeMillis();
            log.info("HBase server to started after about {} ms", after - before);
            return;
        } catch (IOException e) {
            log.info("Exception caught while waiting for the HBase server to start", e);
        }
        after = System.currentTimeMillis();
    } while (timeoutMS > after - before);
    after = System.currentTimeMillis();
    log.warn("HBase server did not start in {} ms", after - before);
}
Also used : IOException(java.io.IOException) HConnection(org.apache.hadoop.hbase.client.HConnection)

Aggregations

HConnection (org.apache.hadoop.hbase.client.HConnection)6 IOException (java.io.IOException)4 Path (org.apache.hadoop.fs.Path)2 HRegionLocation (org.apache.hadoop.hbase.HRegionLocation)2 Job (org.apache.hadoop.mapreduce.Job)2 HBaseTableUtil (co.cask.cdap.data2.util.hbase.HBaseTableUtil)1 HBaseTableUtilFactory (co.cask.cdap.data2.util.hbase.HBaseTableUtilFactory)1 ProvisionException (com.google.inject.ProvisionException)1 ServiceException (com.google.protobuf.ServiceException)1 CommandLine (org.apache.commons.cli.CommandLine)1 CommandLineParser (org.apache.commons.cli.CommandLineParser)1 GnuParser (org.apache.commons.cli.GnuParser)1 HelpFormatter (org.apache.commons.cli.HelpFormatter)1 Options (org.apache.commons.cli.Options)1 ParseException (org.apache.commons.cli.ParseException)1 Configuration (org.apache.hadoop.conf.Configuration)1 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)1 ServerName (org.apache.hadoop.hbase.ServerName)1 Get (org.apache.hadoop.hbase.client.Get)1 HTableInterface (org.apache.hadoop.hbase.client.HTableInterface)1