Search in sources :

Example 6 with TableNotEnabledException

use of org.apache.hadoop.hbase.TableNotEnabledException in project incubator-atlas by apache.

the class HBaseStoreManager method ensureColumnFamilyExists.

private void ensureColumnFamilyExists(String tableName, String columnFamily, int ttlInSeconds) throws BackendException {
    AdminMask adm = null;
    try {
        adm = getAdminInterface();
        HTableDescriptor desc = ensureTableExists(tableName, columnFamily, ttlInSeconds);
        Preconditions.checkNotNull(desc);
        HColumnDescriptor cf = desc.getFamily(columnFamily.getBytes());
        // Create our column family, if necessary
        if (cf == null) {
            try {
                if (!adm.isTableDisabled(tableName)) {
                    adm.disableTable(tableName);
                }
            } catch (TableNotEnabledException e) {
                logger.debug("Table {} already disabled", tableName);
            } catch (IOException e) {
                throw new TemporaryBackendException(e);
            }
            try {
                HColumnDescriptor cdesc = new HColumnDescriptor(columnFamily);
                setCFOptions(cdesc, ttlInSeconds);
                adm.addColumn(tableName, cdesc);
                logger.debug("Added HBase ColumnFamily {}, waiting for 1 sec. to propogate.", columnFamily);
                adm.enableTable(tableName);
            } catch (TableNotFoundException ee) {
                logger.error("TableNotFoundException", ee);
                throw new PermanentBackendException(ee);
            } catch (org.apache.hadoop.hbase.TableExistsException ee) {
                logger.debug("Swallowing exception {}", ee);
            } catch (IOException ee) {
                throw new TemporaryBackendException(ee);
            }
        }
    } finally {
        IOUtils.closeQuietly(adm);
    }
}
Also used : TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) TemporaryBackendException(com.thinkaurelius.titan.diskstorage.TemporaryBackendException) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) PermanentBackendException(com.thinkaurelius.titan.diskstorage.PermanentBackendException) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) TableNotEnabledException(org.apache.hadoop.hbase.TableNotEnabledException)

Example 7 with TableNotEnabledException

use of org.apache.hadoop.hbase.TableNotEnabledException in project cdap by caskdata.

the class HBaseTableFactory method disableTable.

private void disableTable(HBaseDDLExecutor ddlExecutor, TableId tableId) throws IOException {
    try {
        TableName tableName = HTableNameConverter.toTableName(cConf.get(Constants.Dataset.TABLE_PREFIX), tableId);
        ddlExecutor.disableTableIfEnabled(tableName.getNamespaceAsString(), tableName.getQualifierAsString());
        LOG.debug("TMS Table {} has been disabled.", tableId);
    } catch (TableNotFoundException ex) {
        LOG.debug("TMS Table {} was not found. Skipping disable.", tableId, ex);
    } catch (TableNotEnabledException ex) {
        LOG.debug("TMS Table {} was already in disabled state.", tableId, ex);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) TableNotEnabledException(org.apache.hadoop.hbase.TableNotEnabledException)

Example 8 with TableNotEnabledException

use of org.apache.hadoop.hbase.TableNotEnabledException in project cdap by caskdata.

the class AbstractHBaseDataSetAdmin method updateTable.

/**
   * Performs update on a given HBase table. It will be updated if either its spec has
   * changed since the HBase table was created or updated, or if the CDAP version recorded
   * in the HTable descriptor is less than the current CDAP version.
   *
   * @param force forces update regardless of whether the table needs it.
   * @throws IOException If update failed.
   */
public void updateTable(boolean force) throws IOException {
    try (HBaseDDLExecutor ddlExecutor = ddlExecutorFactory.get()) {
        HTableDescriptor tableDescriptor;
        try (HBaseAdmin admin = new HBaseAdmin(hConf)) {
            tableDescriptor = tableUtil.getHTableDescriptor(admin, tableId);
        }
        // update any table properties if necessary
        boolean needUpdate = needsUpdate(tableDescriptor) || force;
        // Get the cdap version from the table
        ProjectInfo.Version version = HBaseTableUtil.getVersion(tableDescriptor);
        if (!needUpdate && version.compareTo(ProjectInfo.getVersion()) >= 0) {
            // If neither the table spec nor the cdap version have changed, no need to update
            LOG.info("Table '{}' has not changed and its version '{}' is same or greater " + "than current CDAP version '{}'", tableId, version, ProjectInfo.getVersion());
            return;
        }
        // create a new descriptor for the table update
        HTableDescriptorBuilder newDescriptor = tableUtil.buildHTableDescriptor(tableDescriptor);
        // Generate the coprocessor jar
        CoprocessorJar coprocessorJar = createCoprocessorJar();
        Location jarLocation = coprocessorJar.getJarLocation();
        // Check if coprocessor upgrade is needed
        Map<String, HBaseTableUtil.CoprocessorInfo> coprocessorInfo = HBaseTableUtil.getCoprocessorInfo(tableDescriptor);
        // For all required coprocessors, check if they've need to be upgraded.
        for (Class<? extends Coprocessor> coprocessor : coprocessorJar.getCoprocessors()) {
            HBaseTableUtil.CoprocessorInfo info = coprocessorInfo.get(coprocessor.getName());
            if (info != null) {
                // The same coprocessor has been configured, check by the file name to see if they are the same.
                if (!jarLocation.getName().equals(info.getPath().getName())) {
                    // Remove old one and add the new one.
                    newDescriptor.removeCoprocessor(info.getClassName());
                    addCoprocessor(newDescriptor, coprocessor, coprocessorJar.getPriority(coprocessor));
                }
            } else {
                // The coprocessor is missing from the table, add it.
                addCoprocessor(newDescriptor, coprocessor, coprocessorJar.getPriority(coprocessor));
            }
        }
        // Removes all old coprocessors
        Set<String> coprocessorNames = ImmutableSet.copyOf(Iterables.transform(coprocessorJar.coprocessors, CLASS_TO_NAME));
        for (String remove : Sets.difference(coprocessorInfo.keySet(), coprocessorNames)) {
            newDescriptor.removeCoprocessor(remove);
        }
        HBaseTableUtil.setVersion(newDescriptor);
        HBaseTableUtil.setTablePrefix(newDescriptor, cConf);
        LOG.info("Updating table '{}'...", tableId);
        TableName tableName = HTableNameConverter.toTableName(cConf.get(Constants.Dataset.TABLE_PREFIX), tableId);
        boolean enableTable = false;
        try {
            ddlExecutor.disableTableIfEnabled(tableName.getNamespaceAsString(), tableName.getQualifierAsString());
            enableTable = true;
        } catch (TableNotEnabledException e) {
            // If the table is in cdap_system namespace enable it regardless so that they can be used later. See CDAP-7324
            if (isSystemTable()) {
                enableTable = true;
            } else {
                LOG.debug("Table '{}' was not enabled before update and will not be enabled after update.", tableId);
            }
        }
        tableUtil.modifyTable(ddlExecutor, newDescriptor.build());
        if (enableTable) {
            LOG.debug("Enabling table '{}'...", tableId);
            ddlExecutor.enableTableIfDisabled(tableName.getNamespaceAsString(), tableName.getQualifierAsString());
        }
    }
    LOG.info("Table '{}' update completed.", tableId);
}
Also used : HBaseDDLExecutor(co.cask.cdap.spi.hbase.HBaseDDLExecutor) HTableDescriptorBuilder(co.cask.cdap.data2.util.hbase.HTableDescriptorBuilder) HBaseTableUtil(co.cask.cdap.data2.util.hbase.HBaseTableUtil) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) HBaseAdmin(org.apache.hadoop.hbase.client.HBaseAdmin) TableName(org.apache.hadoop.hbase.TableName) ProjectInfo(co.cask.cdap.common.utils.ProjectInfo) Location(org.apache.twill.filesystem.Location) TableNotEnabledException(org.apache.hadoop.hbase.TableNotEnabledException)

Example 9 with TableNotEnabledException

use of org.apache.hadoop.hbase.TableNotEnabledException in project janusgraph by JanusGraph.

the class HBaseStoreManager method ensureColumnFamilyExists.

private void ensureColumnFamilyExists(String tableName, String columnFamily, int ttlInSeconds) throws BackendException {
    AdminMask adm = null;
    try {
        adm = getAdminInterface();
        HTableDescriptor desc = ensureTableExists(tableName, columnFamily, ttlInSeconds);
        Preconditions.checkNotNull(desc);
        HColumnDescriptor cf = desc.getFamily(Bytes.toBytes(columnFamily));
        // Create our column family, if necessary
        if (cf == null) {
            try {
                if (!adm.isTableDisabled(tableName)) {
                    adm.disableTable(tableName);
                }
            } catch (TableNotEnabledException e) {
                logger.debug("Table {} already disabled", tableName);
            } catch (IOException e) {
                throw new TemporaryBackendException(e);
            }
            try {
                HColumnDescriptor columnDescriptor = new HColumnDescriptor(columnFamily);
                setCFOptions(columnDescriptor, ttlInSeconds);
                adm.addColumn(tableName, columnDescriptor);
                try {
                    logger.debug("Added HBase ColumnFamily {}, waiting for 1 sec. to propogate.", columnFamily);
                    Thread.sleep(1000L);
                } catch (InterruptedException ie) {
                    throw new TemporaryBackendException(ie);
                }
                adm.enableTable(tableName);
            } catch (TableNotFoundException ee) {
                logger.error("TableNotFoundException", ee);
                throw new PermanentBackendException(ee);
            } catch (org.apache.hadoop.hbase.TableExistsException ee) {
                logger.debug("Swallowing exception {}", ee);
            } catch (IOException ee) {
                throw new TemporaryBackendException(ee);
            }
        }
    } finally {
        IOUtils.closeQuietly(adm);
    }
}
Also used : TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) TemporaryBackendException(org.janusgraph.diskstorage.TemporaryBackendException) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) PermanentBackendException(org.janusgraph.diskstorage.PermanentBackendException) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) TableNotEnabledException(org.apache.hadoop.hbase.TableNotEnabledException)

Example 10 with TableNotEnabledException

use of org.apache.hadoop.hbase.TableNotEnabledException in project hbase by apache.

the class ImportTsv method deleteTable.

private static void deleteTable(Configuration conf, String[] args) {
    TableName tableName = TableName.valueOf(args[0]);
    try (Connection connection = ConnectionFactory.createConnection(conf);
        Admin admin = connection.getAdmin()) {
        try {
            admin.disableTable(tableName);
        } catch (TableNotEnabledException e) {
            LOG.debug("Dry mode: Table: " + tableName + " already disabled, so just deleting it.");
        }
        admin.deleteTable(tableName);
    } catch (IOException e) {
        LOG.error(format("***Dry run: Failed to delete table '%s'.***%n%s", tableName, e.toString()));
        return;
    }
    LOG.info(format("Dry run: Deleted table '%s'.", tableName));
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Connection(org.apache.hadoop.hbase.client.Connection) IOException(java.io.IOException) Admin(org.apache.hadoop.hbase.client.Admin) TableNotEnabledException(org.apache.hadoop.hbase.TableNotEnabledException)

Aggregations

TableNotEnabledException (org.apache.hadoop.hbase.TableNotEnabledException)14 TableNotFoundException (org.apache.hadoop.hbase.TableNotFoundException)8 IOException (java.io.IOException)6 TableName (org.apache.hadoop.hbase.TableName)6 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)4 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)3 Admin (org.apache.hadoop.hbase.client.Admin)3 Test (org.junit.Test)3 PermanentBackendException (com.thinkaurelius.titan.diskstorage.PermanentBackendException)2 TemporaryBackendException (com.thinkaurelius.titan.diskstorage.TemporaryBackendException)2 ProjectInfo (co.cask.cdap.common.utils.ProjectInfo)1 HBaseTableUtil (co.cask.cdap.data2.util.hbase.HBaseTableUtil)1 HTableDescriptorBuilder (co.cask.cdap.data2.util.hbase.HTableDescriptorBuilder)1 HBaseDDLExecutor (co.cask.cdap.spi.hbase.HBaseDDLExecutor)1 CountDownLatch (java.util.concurrent.CountDownLatch)1 DELETE (javax.ws.rs.DELETE)1 Cell (org.apache.hadoop.hbase.Cell)1 ProcedureInfo (org.apache.hadoop.hbase.ProcedureInfo)1 TableExistsException (org.apache.hadoop.hbase.TableExistsException)1 UnknownScannerException (org.apache.hadoop.hbase.UnknownScannerException)1