Search in sources :

Example 21 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class ChangeCompressionAction method perform.

@Override
public void perform() throws Exception {
    HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
    HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
    if (columnDescriptors == null || columnDescriptors.length == 0) {
        return;
    }
    // Possible compression algorithms. If an algorithm is not supported,
    // modifyTable will fail, so there is no harm.
    Algorithm[] possibleAlgos = Algorithm.values();
    // Since not every compression algorithm is supported,
    // let's use the same algorithm for all column families.
    // If an unsupported compression algorithm is chosen, pick a different one.
    // This is to work around the issue that modifyTable() does not throw remote
    // exception.
    Algorithm algo;
    do {
        algo = possibleAlgos[random.nextInt(possibleAlgos.length)];
        try {
            Compressor c = algo.getCompressor();
            // call returnCompressor() to release the Compressor
            algo.returnCompressor(c);
            break;
        } catch (Throwable t) {
            LOG.info("Performing action: Changing compression algorithms to " + algo + " is not supported, pick another one");
        }
    } while (true);
    LOG.debug("Performing action: Changing compression algorithms on " + tableName.getNameAsString() + " to " + algo);
    for (HColumnDescriptor descriptor : columnDescriptors) {
        if (random.nextBoolean()) {
            descriptor.setCompactionCompressionType(algo);
        } else {
            descriptor.setCompressionType(algo);
        }
    }
    // Don't try the modify if we're stopping
    if (context.isStopping()) {
        return;
    }
    admin.modifyTable(tableName, tableDescriptor);
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Compressor(org.apache.hadoop.io.compress.Compressor) Algorithm(org.apache.hadoop.hbase.io.compress.Compression.Algorithm) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 22 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class SchemaResource method update.

private Response update(final TableName name, final TableSchemaModel model, final UriInfo uriInfo, final Admin admin) {
    if (servlet.isReadOnly()) {
        return Response.status(Response.Status.FORBIDDEN).type(MIMETYPE_TEXT).entity("Forbidden" + CRLF).build();
    }
    try {
        HTableDescriptor htd = admin.getTableDescriptor(name);
        admin.disableTable(name);
        try {
            for (ColumnSchemaModel family : model.getColumns()) {
                HColumnDescriptor hcd = new HColumnDescriptor(family.getName());
                for (Map.Entry<QName, Object> e : family.getAny().entrySet()) {
                    hcd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
                }
                if (htd.hasFamily(hcd.getName())) {
                    admin.modifyColumnFamily(name, hcd);
                } else {
                    admin.addColumnFamily(name, hcd);
                }
            }
        } catch (IOException e) {
            return Response.status(Response.Status.SERVICE_UNAVAILABLE).type(MIMETYPE_TEXT).entity("Unavailable" + CRLF).build();
        } finally {
            admin.enableTable(TableName.valueOf(tableResource.getName()));
        }
        servlet.getMetrics().incrementSucessfulPutRequests(1);
        return Response.ok().build();
    } catch (Exception e) {
        servlet.getMetrics().incrementFailedPutRequests(1);
        return processException(e);
    }
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) QName(javax.xml.namespace.QName) IOException(java.io.IOException) ColumnSchemaModel(org.apache.hadoop.hbase.rest.model.ColumnSchemaModel) Map(java.util.Map) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) TableNotEnabledException(org.apache.hadoop.hbase.TableNotEnabledException) TableExistsException(org.apache.hadoop.hbase.TableExistsException) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 23 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class SchemaResource method replace.

private Response replace(final TableName name, final TableSchemaModel model, final UriInfo uriInfo, final Admin admin) {
    if (servlet.isReadOnly()) {
        return Response.status(Response.Status.FORBIDDEN).type(MIMETYPE_TEXT).entity("Forbidden" + CRLF).build();
    }
    try {
        HTableDescriptor htd = new HTableDescriptor(name);
        for (Map.Entry<QName, Object> e : model.getAny().entrySet()) {
            htd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
        }
        for (ColumnSchemaModel family : model.getColumns()) {
            HColumnDescriptor hcd = new HColumnDescriptor(family.getName());
            for (Map.Entry<QName, Object> e : family.getAny().entrySet()) {
                hcd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
            }
            htd.addFamily(hcd);
        }
        if (admin.tableExists(name)) {
            admin.disableTable(name);
            admin.modifyTable(name, htd);
            admin.enableTable(name);
            servlet.getMetrics().incrementSucessfulPutRequests(1);
        } else
            try {
                admin.createTable(htd);
                servlet.getMetrics().incrementSucessfulPutRequests(1);
            } catch (TableExistsException e) {
                // race, someone else created a table with the same name
                return Response.status(Response.Status.NOT_MODIFIED).type(MIMETYPE_TEXT).entity("Not modified" + CRLF).build();
            }
        return Response.created(uriInfo.getAbsolutePath()).build();
    } catch (Exception e) {
        servlet.getMetrics().incrementFailedPutRequests(1);
        return processException(e);
    }
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) QName(javax.xml.namespace.QName) TableExistsException(org.apache.hadoop.hbase.TableExistsException) Map(java.util.Map) ColumnSchemaModel(org.apache.hadoop.hbase.rest.model.ColumnSchemaModel) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) TableNotEnabledException(org.apache.hadoop.hbase.TableNotEnabledException) TableExistsException(org.apache.hadoop.hbase.TableExistsException) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 24 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TableSchemaModel method getTableDescriptor.

/**
   * @return a table descriptor
   */
@JsonIgnore
public HTableDescriptor getTableDescriptor() {
    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(getName()));
    for (Map.Entry<QName, Object> e : getAny().entrySet()) {
        htd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
    }
    for (ColumnSchemaModel column : getColumns()) {
        HColumnDescriptor hcd = new HColumnDescriptor(column.getName());
        for (Map.Entry<QName, Object> e : column.getAny().entrySet()) {
            hcd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
        }
        htd.addFamily(hcd);
    }
    return htd;
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) QName(javax.xml.namespace.QName) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) JsonIgnore(org.codehaus.jackson.annotate.JsonIgnore)

Example 25 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class ExpiredMobFileCleaner method run.

@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "REC_CATCH_EXCEPTION", justification = "Intentional")
public int run(String[] args) throws Exception {
    if (args.length != 2) {
        printUsage();
        return 1;
    }
    String tableName = args[0];
    String familyName = args[1];
    TableName tn = TableName.valueOf(tableName);
    HBaseAdmin.available(getConf());
    Connection connection = ConnectionFactory.createConnection(getConf());
    Admin admin = connection.getAdmin();
    try {
        HTableDescriptor htd = admin.getTableDescriptor(tn);
        HColumnDescriptor family = htd.getFamily(Bytes.toBytes(familyName));
        if (family == null || !family.isMobEnabled()) {
            throw new IOException("Column family " + familyName + " is not a MOB column family");
        }
        if (family.getMinVersions() > 0) {
            throw new IOException("The minVersions of the column family is not 0, could not be handled by this cleaner");
        }
        cleanExpiredMobFiles(tableName, family);
        return 0;
    } finally {
        try {
            admin.close();
        } catch (IOException e) {
            LOG.error("Failed to close the HBaseAdmin.", e);
        }
        try {
            connection.close();
        } catch (IOException e) {
            LOG.error("Failed to close the connection.", e);
        }
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Connection(org.apache.hadoop.hbase.client.Connection) IOException(java.io.IOException) HBaseAdmin(org.apache.hadoop.hbase.client.HBaseAdmin) Admin(org.apache.hadoop.hbase.client.Admin) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Aggregations

HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)867 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)555 Test (org.junit.Test)425 TableName (org.apache.hadoop.hbase.TableName)258 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)171 IOException (java.io.IOException)167 Put (org.apache.hadoop.hbase.client.Put)149 Table (org.apache.hadoop.hbase.client.Table)134 Path (org.apache.hadoop.fs.Path)127 Admin (org.apache.hadoop.hbase.client.Admin)121 Configuration (org.apache.hadoop.conf.Configuration)87 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)77 ArrayList (java.util.ArrayList)75 FileSystem (org.apache.hadoop.fs.FileSystem)66 Result (org.apache.hadoop.hbase.client.Result)62 Connection (org.apache.hadoop.hbase.client.Connection)57 Scan (org.apache.hadoop.hbase.client.Scan)51 Cell (org.apache.hadoop.hbase.Cell)44 Delete (org.apache.hadoop.hbase.client.Delete)44 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)43