Search in sources :

Example 16 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TableSchemaModel method getTableDescriptor.

/**
   * @return a table descriptor
   */
@JsonIgnore
public HTableDescriptor getTableDescriptor() {
    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(getName()));
    for (Map.Entry<QName, Object> e : getAny().entrySet()) {
        htd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
    }
    for (ColumnSchemaModel column : getColumns()) {
        HColumnDescriptor hcd = new HColumnDescriptor(column.getName());
        for (Map.Entry<QName, Object> e : column.getAny().entrySet()) {
            hcd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
        }
        htd.addFamily(hcd);
    }
    return htd;
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) QName(javax.xml.namespace.QName) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) JsonIgnore(org.codehaus.jackson.annotate.JsonIgnore)

Example 17 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class MobUtils method doMobCompaction.

/**
   * Performs the mob compaction.
   * @param conf the Configuration
   * @param fs the file system
   * @param tableName the table the compact
   * @param hcd the column descriptor
   * @param pool the thread pool
   * @param allFiles Whether add all mob files into the compaction.
   */
public static void doMobCompaction(Configuration conf, FileSystem fs, TableName tableName, HColumnDescriptor hcd, ExecutorService pool, boolean allFiles, LockManager.MasterLock lock) throws IOException {
    String className = conf.get(MobConstants.MOB_COMPACTOR_CLASS_KEY, PartitionedMobCompactor.class.getName());
    // instantiate the mob compactor.
    MobCompactor compactor = null;
    try {
        compactor = ReflectionUtils.instantiateWithCustomCtor(className, new Class[] { Configuration.class, FileSystem.class, TableName.class, HColumnDescriptor.class, ExecutorService.class }, new Object[] { conf, fs, tableName, hcd, pool });
    } catch (Exception e) {
        throw new IOException("Unable to load configured mob file compactor '" + className + "'", e);
    }
    // with major compaction in mob-enabled column.
    try {
        lock.acquire();
        compactor.compact(allFiles);
    } catch (Exception e) {
        LOG.error("Failed to compact the mob files for the column " + hcd.getNameAsString() + " in the table " + tableName.getNameAsString(), e);
    } finally {
        lock.release();
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Configuration(org.apache.hadoop.conf.Configuration) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) PartitionedMobCompactor(org.apache.hadoop.hbase.mob.compactions.PartitionedMobCompactor) FileSystem(org.apache.hadoop.fs.FileSystem) ExecutorService(java.util.concurrent.ExecutorService) MobCompactor(org.apache.hadoop.hbase.mob.compactions.MobCompactor) PartitionedMobCompactor(org.apache.hadoop.hbase.mob.compactions.PartitionedMobCompactor) IOException(java.io.IOException) ParseException(java.text.ParseException) FileNotFoundException(java.io.FileNotFoundException) RejectedExecutionException(java.util.concurrent.RejectedExecutionException) IOException(java.io.IOException)

Example 18 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class ExpiredMobFileCleaner method run.

@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "REC_CATCH_EXCEPTION", justification = "Intentional")
public int run(String[] args) throws Exception {
    if (args.length != 2) {
        printUsage();
        return 1;
    }
    String tableName = args[0];
    String familyName = args[1];
    TableName tn = TableName.valueOf(tableName);
    HBaseAdmin.available(getConf());
    Connection connection = ConnectionFactory.createConnection(getConf());
    Admin admin = connection.getAdmin();
    try {
        HTableDescriptor htd = admin.getTableDescriptor(tn);
        HColumnDescriptor family = htd.getFamily(Bytes.toBytes(familyName));
        if (family == null || !family.isMobEnabled()) {
            throw new IOException("Column family " + familyName + " is not a MOB column family");
        }
        if (family.getMinVersions() > 0) {
            throw new IOException("The minVersions of the column family is not 0, could not be handled by this cleaner");
        }
        cleanExpiredMobFiles(tableName, family);
        return 0;
    } finally {
        try {
            admin.close();
        } catch (IOException e) {
            LOG.error("Failed to close the HBaseAdmin.", e);
        }
        try {
            connection.close();
        } catch (IOException e) {
            LOG.error("Failed to close the connection.", e);
        }
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Connection(org.apache.hadoop.hbase.client.Connection) IOException(java.io.IOException) HBaseAdmin(org.apache.hadoop.hbase.client.HBaseAdmin) Admin(org.apache.hadoop.hbase.client.Admin) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 19 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class VisibilityUtils method createVisibilityLabelFilter.

public static Filter createVisibilityLabelFilter(Region region, Authorizations authorizations) throws IOException {
    Map<ByteRange, Integer> cfVsMaxVersions = new HashMap<>();
    for (HColumnDescriptor hcd : region.getTableDesc().getFamilies()) {
        cfVsMaxVersions.put(new SimpleMutableByteRange(hcd.getName()), hcd.getMaxVersions());
    }
    VisibilityLabelService vls = VisibilityLabelServiceManager.getInstance().getVisibilityLabelService();
    Filter visibilityLabelFilter = new VisibilityLabelFilter(vls.getVisibilityExpEvaluator(authorizations), cfVsMaxVersions);
    return visibilityLabelFilter;
}
Also used : HashMap(java.util.HashMap) ByteRange(org.apache.hadoop.hbase.util.ByteRange) SimpleMutableByteRange(org.apache.hadoop.hbase.util.SimpleMutableByteRange) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Filter(org.apache.hadoop.hbase.filter.Filter) SimpleMutableByteRange(org.apache.hadoop.hbase.util.SimpleMutableByteRange)

Example 20 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class SnapshotManifest method addMobRegion.

public void addMobRegion(HRegionInfo regionInfo) throws IOException {
    // 0. Get the ManifestBuilder/RegionVisitor
    RegionVisitor visitor = createRegionVisitor(desc);
    // 1. dump region meta info into the snapshot directory
    LOG.debug("Storing mob region '" + regionInfo + "' region-info for snapshot.");
    Object regionData = visitor.regionOpen(regionInfo);
    monitor.rethrowException();
    // 2. iterate through all the stores in the region
    LOG.debug("Creating references for mob files");
    Path mobRegionPath = MobUtils.getMobRegionPath(conf, regionInfo.getTable());
    for (HColumnDescriptor hcd : htd.getColumnFamilies()) {
        // 2.1. build the snapshot reference for the store if it's a mob store
        if (!hcd.isMobEnabled()) {
            continue;
        }
        Object familyData = visitor.familyOpen(regionData, hcd.getName());
        monitor.rethrowException();
        Path storePath = MobUtils.getMobFamilyPath(mobRegionPath, hcd.getNameAsString());
        List<StoreFileInfo> storeFiles = getStoreFiles(storePath);
        if (storeFiles == null) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("No mob files under family: " + hcd.getNameAsString());
            }
            continue;
        }
        addReferenceFiles(visitor, regionData, familyData, storeFiles, true);
        visitor.familyClose(regionData, familyData);
    }
    visitor.regionClose(regionData);
}
Also used : Path(org.apache.hadoop.fs.Path) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) StoreFileInfo(org.apache.hadoop.hbase.regionserver.StoreFileInfo)

Aggregations

HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)671 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)554 Test (org.junit.Test)358 TableName (org.apache.hadoop.hbase.TableName)200 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)137 Put (org.apache.hadoop.hbase.client.Put)132 Table (org.apache.hadoop.hbase.client.Table)117 Admin (org.apache.hadoop.hbase.client.Admin)110 IOException (java.io.IOException)109 Path (org.apache.hadoop.fs.Path)81 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)71 ArrayList (java.util.ArrayList)66 Configuration (org.apache.hadoop.conf.Configuration)65 Connection (org.apache.hadoop.hbase.client.Connection)51 Scan (org.apache.hadoop.hbase.client.Scan)50 Result (org.apache.hadoop.hbase.client.Result)45 FileSystem (org.apache.hadoop.fs.FileSystem)44 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)42 Connection (java.sql.Connection)41 Properties (java.util.Properties)38