Search in sources :

Example 26 with TableNotFoundException

use of org.apache.hadoop.hbase.TableNotFoundException in project cdap by caskdata.

the class HBaseTableFactory method enableTable.

private void enableTable(HBaseDDLExecutor ddlExecutor, TableId tableId) throws IOException {
    try {
        TableName tableName = HTableNameConverter.toTableName(cConf.get(Constants.Dataset.TABLE_PREFIX), tableId);
        ddlExecutor.enableTableIfDisabled(tableName.getNamespaceAsString(), tableName.getQualifierAsString());
        LOG.debug("TMS Table {} has been enabled.", tableName);
    } catch (TableNotFoundException ex) {
        LOG.debug("TMS Table {} was not found. Skipping enable.", tableId, ex);
    } catch (TableNotDisabledException ex) {
        LOG.debug("TMS Table {} was already in enabled state.", tableId, ex);
    }
}
Also used : TableNotDisabledException(org.apache.hadoop.hbase.TableNotDisabledException) TableName(org.apache.hadoop.hbase.TableName) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException)

Example 27 with TableNotFoundException

use of org.apache.hadoop.hbase.TableNotFoundException in project hbase by apache.

the class AsyncNonMetaRegionLocator method onScanComplete.

private void onScanComplete(TableName tableName, LocateRequest req, List<Result> results, Throwable error) {
    if (error != null) {
        complete(tableName, req, null, error);
        return;
    }
    if (results.isEmpty()) {
        complete(tableName, req, null, new TableNotFoundException(tableName));
        return;
    }
    RegionLocations locs = MetaTableAccessor.getRegionLocations(results.get(0));
    if (LOG.isDebugEnabled()) {
        LOG.debug("The fetched location of '" + tableName + "', row='" + Bytes.toStringBinary(req.row) + "', locateType=" + req.locateType + " is " + locs);
    }
    if (locs == null || locs.getDefaultRegionLocation() == null) {
        complete(tableName, req, null, new IOException(String.format("No location found for '%s', row='%s', locateType=%s", tableName, Bytes.toStringBinary(req.row), req.locateType)));
        return;
    }
    HRegionLocation loc = locs.getDefaultRegionLocation();
    HRegionInfo info = loc.getRegionInfo();
    if (info == null) {
        complete(tableName, req, null, new IOException(String.format("HRegionInfo is null for '%s', row='%s', locateType=%s", tableName, Bytes.toStringBinary(req.row), req.locateType)));
        return;
    }
    if (!info.getTable().equals(tableName)) {
        complete(tableName, req, null, new TableNotFoundException("Table '" + tableName + "' was not found, got: '" + info.getTable() + "'"));
        return;
    }
    if (info.isSplit()) {
        complete(tableName, req, null, new RegionOfflineException("the only available region for the required row is a split parent," + " the daughters should be online soon: '" + info.getRegionNameAsString() + "'"));
        return;
    }
    if (info.isOffline()) {
        complete(tableName, req, null, new RegionOfflineException("the region is offline, could" + " be caused by a disable table call: '" + info.getRegionNameAsString() + "'"));
        return;
    }
    if (loc.getServerName() == null) {
        complete(tableName, req, null, new NoServerForRegionException(String.format("No server address listed for region '%s', row='%s', locateType=%s", info.getRegionNameAsString(), Bytes.toStringBinary(req.row), req.locateType)));
        return;
    }
    complete(tableName, req, loc, null);
}
Also used : HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) RegionLocations(org.apache.hadoop.hbase.RegionLocations) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException)

Example 28 with TableNotFoundException

use of org.apache.hadoop.hbase.TableNotFoundException in project hbase by apache.

the class ImportTsv method createSubmittableJob.

/**
   * Sets up the actual job.
   *
   * @param conf  The current configuration.
   * @param args  The command line parameters.
   * @return The newly created job.
   * @throws IOException When setting up the job fails.
   */
protected static Job createSubmittableJob(Configuration conf, String[] args) throws IOException, ClassNotFoundException {
    Job job = null;
    boolean isDryRun = conf.getBoolean(DRY_RUN_CONF_KEY, false);
    try (Connection connection = ConnectionFactory.createConnection(conf)) {
        try (Admin admin = connection.getAdmin()) {
            // Support non-XML supported characters
            // by re-encoding the passed separator as a Base64 string.
            String actualSeparator = conf.get(SEPARATOR_CONF_KEY);
            if (actualSeparator != null) {
                conf.set(SEPARATOR_CONF_KEY, Base64.encodeBytes(actualSeparator.getBytes()));
            }
            // See if a non-default Mapper was set
            String mapperClassName = conf.get(MAPPER_CONF_KEY);
            Class mapperClass = mapperClassName != null ? Class.forName(mapperClassName) : DEFAULT_MAPPER;
            TableName tableName = TableName.valueOf(args[0]);
            Path inputDir = new Path(args[1]);
            String jobName = conf.get(JOB_NAME_CONF_KEY, NAME + "_" + tableName.getNameAsString());
            job = Job.getInstance(conf, jobName);
            job.setJarByClass(mapperClass);
            FileInputFormat.setInputPaths(job, inputDir);
            job.setInputFormatClass(TextInputFormat.class);
            job.setMapperClass(mapperClass);
            job.setMapOutputKeyClass(ImmutableBytesWritable.class);
            String hfileOutPath = conf.get(BULK_OUTPUT_CONF_KEY);
            String[] columns = conf.getStrings(COLUMNS_CONF_KEY);
            if (StringUtils.isNotEmpty(conf.get(CREDENTIALS_LOCATION))) {
                String fileLoc = conf.get(CREDENTIALS_LOCATION);
                Credentials cred = Credentials.readTokenStorageFile(new File(fileLoc), conf);
                job.getCredentials().addAll(cred);
            }
            if (hfileOutPath != null) {
                if (!admin.tableExists(tableName)) {
                    LOG.warn(format("Table '%s' does not exist.", tableName));
                    if ("yes".equalsIgnoreCase(conf.get(CREATE_TABLE_CONF_KEY, "yes"))) {
                        // TODO: this is backwards. Instead of depending on the existence of a table,
                        // create a sane splits file for HFileOutputFormat based on data sampling.
                        createTable(admin, tableName, columns);
                        if (isDryRun) {
                            LOG.warn("Dry run: Table will be deleted at end of dry run.");
                            synchronized (ImportTsv.class) {
                                DRY_RUN_TABLE_CREATED = true;
                            }
                        }
                    } else {
                        String errorMsg = format("Table '%s' does not exist and '%s' is set to no.", tableName, CREATE_TABLE_CONF_KEY);
                        LOG.error(errorMsg);
                        throw new TableNotFoundException(errorMsg);
                    }
                }
                try (Table table = connection.getTable(tableName);
                    RegionLocator regionLocator = connection.getRegionLocator(tableName)) {
                    boolean noStrict = conf.getBoolean(NO_STRICT_COL_FAMILY, false);
                    // if no.strict is false then check column family
                    if (!noStrict) {
                        ArrayList<String> unmatchedFamilies = new ArrayList<>();
                        Set<String> cfSet = getColumnFamilies(columns);
                        HTableDescriptor tDesc = table.getTableDescriptor();
                        for (String cf : cfSet) {
                            if (tDesc.getFamily(Bytes.toBytes(cf)) == null) {
                                unmatchedFamilies.add(cf);
                            }
                        }
                        if (unmatchedFamilies.size() > 0) {
                            ArrayList<String> familyNames = new ArrayList<>();
                            for (HColumnDescriptor family : table.getTableDescriptor().getFamilies()) {
                                familyNames.add(family.getNameAsString());
                            }
                            String msg = "Column Families " + unmatchedFamilies + " specified in " + COLUMNS_CONF_KEY + " does not match with any of the table " + tableName + " column families " + familyNames + ".\n" + "To disable column family check, use -D" + NO_STRICT_COL_FAMILY + "=true.\n";
                            usage(msg);
                            System.exit(-1);
                        }
                    }
                    if (mapperClass.equals(TsvImporterTextMapper.class)) {
                        job.setMapOutputValueClass(Text.class);
                        job.setReducerClass(TextSortReducer.class);
                    } else {
                        job.setMapOutputValueClass(Put.class);
                        job.setCombinerClass(PutCombiner.class);
                        job.setReducerClass(PutSortReducer.class);
                    }
                    if (!isDryRun) {
                        Path outputDir = new Path(hfileOutPath);
                        FileOutputFormat.setOutputPath(job, outputDir);
                        HFileOutputFormat2.configureIncrementalLoad(job, table.getTableDescriptor(), regionLocator);
                    }
                }
            } else {
                if (!admin.tableExists(tableName)) {
                    String errorMsg = format("Table '%s' does not exist.", tableName);
                    LOG.error(errorMsg);
                    throw new TableNotFoundException(errorMsg);
                }
                if (mapperClass.equals(TsvImporterTextMapper.class)) {
                    usage(TsvImporterTextMapper.class.toString() + " should not be used for non bulkloading case. use " + TsvImporterMapper.class.toString() + " or custom mapper whose value type is Put.");
                    System.exit(-1);
                }
                if (!isDryRun) {
                    // No reducers. Just write straight to table. Call initTableReducerJob
                    // to set up the TableOutputFormat.
                    TableMapReduceUtil.initTableReducerJob(tableName.getNameAsString(), null, job);
                }
                job.setNumReduceTasks(0);
            }
            if (isDryRun) {
                job.setOutputFormatClass(NullOutputFormat.class);
                job.getConfiguration().setStrings("io.serializations", job.getConfiguration().get("io.serializations"), MutationSerialization.class.getName(), ResultSerialization.class.getName(), KeyValueSerialization.class.getName());
            }
            TableMapReduceUtil.addDependencyJars(job);
            TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), com.google.common.base.Function.class);
        }
    }
    return job;
}
Also used : ArrayList(java.util.ArrayList) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) Job(org.apache.hadoop.mapreduce.Job) Path(org.apache.hadoop.fs.Path) RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) Table(org.apache.hadoop.hbase.client.Table) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Connection(org.apache.hadoop.hbase.client.Connection) Admin(org.apache.hadoop.hbase.client.Admin) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) TableName(org.apache.hadoop.hbase.TableName) File(java.io.File) Credentials(org.apache.hadoop.security.Credentials)

Example 29 with TableNotFoundException

use of org.apache.hadoop.hbase.TableNotFoundException in project hbase by apache.

the class DefaultVisibilityExpressionResolver method init.

@Override
public void init() {
    // Reading all the labels and ordinal.
    // This scan should be done by user with global_admin privileges.. Ensure that it works
    Table labelsTable = null;
    Connection connection = null;
    try {
        connection = ConnectionFactory.createConnection(conf);
        try {
            labelsTable = connection.getTable(LABELS_TABLE_NAME);
        } catch (IOException e) {
            LOG.error("Error opening 'labels' table", e);
            return;
        }
        Scan scan = new Scan();
        scan.setAuthorizations(new Authorizations(VisibilityUtils.SYSTEM_LABEL));
        scan.addColumn(LABELS_TABLE_FAMILY, LABEL_QUALIFIER);
        ResultScanner scanner = null;
        try {
            scanner = labelsTable.getScanner(scan);
            Result next = null;
            while ((next = scanner.next()) != null) {
                byte[] row = next.getRow();
                byte[] value = next.getValue(LABELS_TABLE_FAMILY, LABEL_QUALIFIER);
                labels.put(Bytes.toString(value), Bytes.toInt(row));
            }
        } catch (TableNotFoundException e) {
            // Table not found. So just return
            return;
        } catch (IOException e) {
            LOG.error("Error scanning 'labels' table", e);
        } finally {
            if (scanner != null)
                scanner.close();
        }
    } catch (IOException ioe) {
        LOG.error("Failed reading 'labels' tags", ioe);
        return;
    } finally {
        if (labelsTable != null) {
            try {
                labelsTable.close();
            } catch (IOException ioe) {
                LOG.warn("Error closing 'labels' table", ioe);
            }
        }
        if (connection != null)
            try {
                connection.close();
            } catch (IOException ioe) {
                LOG.warn("Failed close of temporary connection", ioe);
            }
    }
}
Also used : TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Connection(org.apache.hadoop.hbase.client.Connection) Scan(org.apache.hadoop.hbase.client.Scan) IOException(java.io.IOException) Result(org.apache.hadoop.hbase.client.Result)

Example 30 with TableNotFoundException

use of org.apache.hadoop.hbase.TableNotFoundException in project flink by apache.

the class HBaseRowInputFormat method connectToTable.

private void connectToTable() {
    if (this.conf == null) {
        this.conf = HBaseConfiguration.create();
    }
    try {
        Connection conn = ConnectionFactory.createConnection(conf);
        super.table = (HTable) conn.getTable(TableName.valueOf(tableName));
    } catch (TableNotFoundException tnfe) {
        LOG.error("The table " + tableName + " not found ", tnfe);
        throw new RuntimeException("HBase table '" + tableName + "' not found.", tnfe);
    } catch (IOException ioe) {
        LOG.error("Exception while creating connection to HBase.", ioe);
        throw new RuntimeException("Cannot create connection to HBase.", ioe);
    }
}
Also used : TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) Connection(org.apache.hadoop.hbase.client.Connection) IOException(java.io.IOException)

Aggregations

TableNotFoundException (org.apache.hadoop.hbase.TableNotFoundException)42 IOException (java.io.IOException)20 TableName (org.apache.hadoop.hbase.TableName)14 TableNotEnabledException (org.apache.hadoop.hbase.TableNotEnabledException)9 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)8 Test (org.junit.Test)8 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)7 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)6 ServerName (org.apache.hadoop.hbase.ServerName)6 ArrayList (java.util.ArrayList)5 Connection (org.apache.hadoop.hbase.client.Connection)5 Table (org.apache.hadoop.hbase.client.Table)5 Path (org.apache.hadoop.fs.Path)4 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)4 TableNotDisabledException (org.apache.hadoop.hbase.TableNotDisabledException)4 RegionLocator (org.apache.hadoop.hbase.client.RegionLocator)4 InterruptedIOException (java.io.InterruptedIOException)3 LinkedList (java.util.LinkedList)3 List (java.util.List)3 Map (java.util.Map)3