Search in sources :

Example 16 with TableNotFoundException

use of org.apache.hadoop.hbase.TableNotFoundException in project hbase by apache.

the class LoadIncrementalHFiles method doBulkLoad.

/**
   * Perform a bulk load of the given directory into the given
   * pre-existing table.  This method is not threadsafe.
   *
   * @param hfofDir the directory that was provided as the output path
   *   of a job using HFileOutputFormat
   * @param admin the Admin
   * @param table the table to load into
   * @param regionLocator region locator
   * @param silence true to ignore unmatched column families
   * @param copyFile always copy hfiles if true
   * @throws TableNotFoundException if table does not yet exist
   */
public void doBulkLoad(Path hfofDir, final Admin admin, Table table, RegionLocator regionLocator, boolean silence, boolean copyFile) throws TableNotFoundException, IOException {
    if (!admin.isTableAvailable(regionLocator.getName())) {
        throw new TableNotFoundException("Table " + table.getName() + " is not currently available.");
    }
    /*
     * Checking hfile format is a time-consuming operation, we should have an option to skip
     * this step when bulkloading millions of HFiles. See HBASE-13985.
     */
    boolean validateHFile = getConf().getBoolean("hbase.loadincremental.validate.hfile", true);
    if (!validateHFile) {
        LOG.warn("You are skipping HFiles validation, it might cause some data loss if files " + "are not correct. If you fail to read data from your table after using this " + "option, consider removing the files and bulkload again without this option. " + "See HBASE-13985");
    }
    // LQI queue does not need to be threadsafe -- all operations on this queue
    // happen in this thread
    Deque<LoadQueueItem> queue = new LinkedList<>();
    ExecutorService pool = null;
    SecureBulkLoadClient secureClient = null;
    try {
        prepareHFileQueue(hfofDir, table, queue, validateHFile, silence);
        if (queue.isEmpty()) {
            LOG.warn("Bulk load operation did not find any files to load in " + "directory " + hfofDir != null ? hfofDir.toUri() : "" + ".  Does it contain files in " + "subdirectories that correspond to column family names?");
            return;
        }
        pool = createExecutorService();
        secureClient = new SecureBulkLoadClient(table.getConfiguration(), table);
        retValue = performBulkLoad(admin, table, regionLocator, queue, pool, secureClient, copyFile);
    } finally {
        cleanup(admin, queue, pool, secureClient);
    }
}
Also used : TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) SecureBulkLoadClient(org.apache.hadoop.hbase.client.SecureBulkLoadClient) ExecutorService(java.util.concurrent.ExecutorService) LinkedList(java.util.LinkedList)

Example 17 with TableNotFoundException

use of org.apache.hadoop.hbase.TableNotFoundException in project hbase by apache.

the class LoadIncrementalHFiles method run.

public Map<LoadQueueItem, ByteBuffer> run(String dirPath, Map<byte[], List<Path>> map, TableName tableName) throws Exception {
    initialize();
    try (Connection connection = ConnectionFactory.createConnection(getConf());
        Admin admin = connection.getAdmin()) {
        boolean tableExists = admin.tableExists(tableName);
        if (!tableExists) {
            if (dirPath != null && "yes".equalsIgnoreCase(getConf().get(CREATE_TABLE_CONF_KEY, "yes"))) {
                this.createTable(tableName, dirPath, admin);
            } else {
                String errorMsg = format("Table '%s' does not exist.", tableName);
                LOG.error(errorMsg);
                throw new TableNotFoundException(errorMsg);
            }
        }
        Path hfofDir = null;
        if (dirPath != null) {
            hfofDir = new Path(dirPath);
        }
        try (Table table = connection.getTable(tableName);
            RegionLocator locator = connection.getRegionLocator(tableName)) {
            boolean silence = "yes".equalsIgnoreCase(getConf().get(IGNORE_UNMATCHED_CF_CONF_KEY, ""));
            boolean copyFiles = "yes".equalsIgnoreCase(getConf().get(ALWAYS_COPY_FILES, ""));
            if (dirPath != null) {
                doBulkLoad(hfofDir, admin, table, locator, silence, copyFiles);
            } else {
                doBulkLoad(map, admin, table, locator, silence, copyFiles);
            }
            return retValue;
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) Table(org.apache.hadoop.hbase.client.Table) Connection(org.apache.hadoop.hbase.client.Connection) Admin(org.apache.hadoop.hbase.client.Admin)

Example 18 with TableNotFoundException

use of org.apache.hadoop.hbase.TableNotFoundException in project hbase by apache.

the class LoadIncrementalHFiles method doBulkLoad.

/**
   * Perform a bulk load of the given directory into the given
   * pre-existing table.  This method is not threadsafe.
   *
   * @param map map of family to List of hfiles
   * @param admin the Admin
   * @param table the table to load into
   * @param regionLocator region locator
   * @param silence true to ignore unmatched column families
   * @param copyFile always copy hfiles if true
   * @throws TableNotFoundException if table does not yet exist
   */
public void doBulkLoad(Map<byte[], List<Path>> map, final Admin admin, Table table, RegionLocator regionLocator, boolean silence, boolean copyFile) throws TableNotFoundException, IOException {
    if (!admin.isTableAvailable(regionLocator.getName())) {
        throw new TableNotFoundException("Table " + table.getName() + " is not currently available.");
    }
    // LQI queue does not need to be threadsafe -- all operations on this queue
    // happen in this thread
    Deque<LoadQueueItem> queue = new LinkedList<>();
    ExecutorService pool = null;
    SecureBulkLoadClient secureClient = null;
    try {
        prepareHFileQueue(map, table, queue, silence);
        if (queue.isEmpty()) {
            LOG.warn("Bulk load operation did not get any files to load");
            return;
        }
        pool = createExecutorService();
        secureClient = new SecureBulkLoadClient(table.getConfiguration(), table);
        for (Map.Entry<byte[], List<Path>> entry : map.entrySet()) {
            for (Path p : entry.getValue()) {
                fs = p.getFileSystem(table.getConfiguration());
                break;
            }
        }
        retValue = performBulkLoad(admin, table, regionLocator, queue, pool, secureClient, copyFile);
    } finally {
        cleanup(admin, queue, pool, secureClient);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) SecureBulkLoadClient(org.apache.hadoop.hbase.client.SecureBulkLoadClient) ExecutorService(java.util.concurrent.ExecutorService) List(java.util.List) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) Map(java.util.Map) HashMap(java.util.HashMap) TreeMap(java.util.TreeMap) LinkedList(java.util.LinkedList)

Example 19 with TableNotFoundException

use of org.apache.hadoop.hbase.TableNotFoundException in project hbase by apache.

the class TestMetaWithReplicas method testAccessingUnknownTables.

@Test
public void testAccessingUnknownTables() throws Exception {
    Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
    conf.setBoolean(HConstants.USE_META_REPLICAS, true);
    Table table = TEST_UTIL.getConnection().getTable(TableName.valueOf(name.getMethodName()));
    Get get = new Get(Bytes.toBytes("foo"));
    try {
        table.get(get);
    } catch (TableNotFoundException t) {
        return;
    }
    fail("Expected TableNotFoundException");
}
Also used : TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) Configuration(org.apache.hadoop.conf.Configuration) Test(org.junit.Test)

Example 20 with TableNotFoundException

use of org.apache.hadoop.hbase.TableNotFoundException in project hbase by apache.

the class TestFlushSnapshotFromClient method testSnapshotFailsOnNonExistantTable.

@Test
public void testSnapshotFailsOnNonExistantTable() throws Exception {
    // make sure we don't fail on listing snapshots
    SnapshotTestingUtils.assertNoSnapshots(admin);
    TableName tableName = TableName.valueOf("_not_a_table");
    // make sure the table doesn't exist
    boolean fail = false;
    do {
        try {
            admin.getTableDescriptor(tableName);
            fail = true;
            LOG.error("Table:" + tableName + " already exists, checking a new name");
            tableName = TableName.valueOf(tableName + "!");
        } catch (TableNotFoundException e) {
            fail = false;
        }
    } while (fail);
    // snapshot the non-existant table
    try {
        admin.snapshot("fail", tableName, SnapshotType.FLUSH);
        fail("Snapshot succeeded even though there is not table.");
    } catch (SnapshotCreationException e) {
        LOG.info("Correctly failed to snapshot a non-existant table:" + e.getMessage());
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) Test(org.junit.Test)

Aggregations

TableNotFoundException (org.apache.hadoop.hbase.TableNotFoundException)41 IOException (java.io.IOException)19 TableName (org.apache.hadoop.hbase.TableName)14 TableNotEnabledException (org.apache.hadoop.hbase.TableNotEnabledException)8 Test (org.junit.Test)8 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)7 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)7 ServerName (org.apache.hadoop.hbase.ServerName)6 ArrayList (java.util.ArrayList)5 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)5 Connection (org.apache.hadoop.hbase.client.Connection)5 Table (org.apache.hadoop.hbase.client.Table)5 Path (org.apache.hadoop.fs.Path)4 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)4 TableNotDisabledException (org.apache.hadoop.hbase.TableNotDisabledException)4 RegionLocator (org.apache.hadoop.hbase.client.RegionLocator)4 InterruptedIOException (java.io.InterruptedIOException)3 LinkedList (java.util.LinkedList)3 List (java.util.List)3 Map (java.util.Map)3