Search in sources :

Example 6 with TableDescriptorBuilder

use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.

the class TestMultiRowResource method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    conf = TEST_UTIL.getConfiguration();
    conf.setBoolean(RESTServer.REST_CSRF_ENABLED_KEY, csrfEnabled);
    if (csrfEnabled) {
        conf.set(RESTServer.REST_CSRF_BROWSER_USERAGENTS_REGEX_KEY, ".*");
    }
    extraHdr = new BasicHeader(RESTServer.REST_CSRF_CUSTOM_HEADER_DEFAULT, "");
    TEST_UTIL.startMiniCluster();
    REST_TEST_UTIL.startServletContainer(conf);
    context = JAXBContext.newInstance(CellModel.class, CellSetModel.class, RowModel.class);
    marshaller = context.createMarshaller();
    unmarshaller = context.createUnmarshaller();
    client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
    Admin admin = TEST_UTIL.getAdmin();
    if (admin.tableExists(TABLE)) {
        return;
    }
    TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TABLE);
    ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(CFA)).build();
    tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
    columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(CFB)).build();
    tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
    admin.createTable(tableDescriptorBuilder.build());
}
Also used : CellSetModel(org.apache.hadoop.hbase.rest.model.CellSetModel) Cluster(org.apache.hadoop.hbase.rest.client.Cluster) CellModel(org.apache.hadoop.hbase.rest.model.CellModel) RowModel(org.apache.hadoop.hbase.rest.model.RowModel) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) Client(org.apache.hadoop.hbase.rest.client.Client) Admin(org.apache.hadoop.hbase.client.Admin) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) BasicHeader(org.apache.http.message.BasicHeader) BeforeClass(org.junit.BeforeClass)

Example 7 with TableDescriptorBuilder

use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.

the class TableSchemaModel method getTableDescriptor.

/**
 * @return a table descriptor
 */
@JsonIgnore
public TableDescriptor getTableDescriptor() {
    TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TableName.valueOf(getName()));
    for (Map.Entry<QName, Object> e : getAny().entrySet()) {
        tableDescriptorBuilder.setValue(e.getKey().getLocalPart(), e.getValue().toString());
    }
    for (ColumnSchemaModel column : getColumns()) {
        ColumnFamilyDescriptorBuilder cfdb = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(column.getName()));
        for (Map.Entry<QName, Object> e : column.getAny().entrySet()) {
            cfdb.setValue(e.getKey().getLocalPart(), e.getValue().toString());
        }
        tableDescriptorBuilder.setColumnFamily(cfdb.build());
    }
    return tableDescriptorBuilder.build();
}
Also used : QName(javax.xml.namespace.QName) ColumnFamilyDescriptorBuilder(org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) JsonIgnore(com.fasterxml.jackson.annotation.JsonIgnore)

Example 8 with TableDescriptorBuilder

use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.

the class CloneSnapshotProcedure method updateTableDescriptorWithSFT.

/**
 * If a StoreFileTracker is specified we strip the TableDescriptor from previous SFT config
 * and set the specified SFT on the table level
 */
private void updateTableDescriptorWithSFT() {
    if (StringUtils.isEmpty(customSFT)) {
        return;
    }
    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
    builder.setValue(StoreFileTrackerFactory.TRACKER_IMPL, customSFT);
    for (ColumnFamilyDescriptor family : tableDescriptor.getColumnFamilies()) {
        ColumnFamilyDescriptorBuilder cfBuilder = ColumnFamilyDescriptorBuilder.newBuilder(family);
        cfBuilder.setConfiguration(StoreFileTrackerFactory.TRACKER_IMPL, null);
        cfBuilder.setValue(StoreFileTrackerFactory.TRACKER_IMPL, null);
        builder.modifyColumnFamily(cfBuilder.build());
    }
    tableDescriptor = builder.build();
}
Also used : ColumnFamilyDescriptorBuilder(org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)

Example 9 with TableDescriptorBuilder

use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.

the class ModifyTableStoreFileTrackerProcedure method createFinishTableDescriptor.

@Override
protected TableDescriptor createFinishTableDescriptor(TableDescriptor current) {
    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(current);
    finish(builder::setValue, builder::removeValue);
    return builder.build();
}
Also used : TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder)

Example 10 with TableDescriptorBuilder

use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.

the class TestHFileOutputFormat2 method testColumnFamilySettings.

/**
 * Test that {@link HFileOutputFormat2} RecordWriter uses compression and
 * bloom filter settings from the column family descriptor
 */
@Ignore("Goes zombie too frequently; needs work. See HBASE-14563")
@Test
public void testColumnFamilySettings() throws Exception {
    Configuration conf = new Configuration(this.util.getConfiguration());
    RecordWriter<ImmutableBytesWritable, Cell> writer = null;
    TaskAttemptContext context = null;
    Path dir = util.getDataTestDir("testColumnFamilySettings");
    // Setup table descriptor
    Table table = Mockito.mock(Table.class);
    RegionLocator regionLocator = Mockito.mock(RegionLocator.class);
    TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TABLE_NAMES[0]);
    Mockito.doReturn(tableDescriptorBuilder.build()).when(table).getDescriptor();
    for (ColumnFamilyDescriptor hcd : HBaseTestingUtil.generateColumnDescriptors()) {
        tableDescriptorBuilder.setColumnFamily(hcd);
    }
    // set up the table to return some mock keys
    setupMockStartKeys(regionLocator);
    try {
        // partial map red setup to get an operational writer for testing
        // We turn off the sequence file compression, because DefaultCodec
        // pollutes the GZip codec pool with an incompatible compressor.
        conf.set("io.seqfile.compression.type", "NONE");
        conf.set("hbase.fs.tmp.dir", dir.toString());
        // turn locality off to eliminate getRegionLocation fail-and-retry time when writing kvs
        conf.setBoolean(HFileOutputFormat2.LOCALITY_SENSITIVE_CONF_KEY, false);
        Job job = new Job(conf, "testLocalMRIncrementalLoad");
        job.setWorkingDirectory(util.getDataTestDirOnTestFS("testColumnFamilySettings"));
        setupRandomGeneratorMapper(job, false);
        HFileOutputFormat2.configureIncrementalLoad(job, table.getDescriptor(), regionLocator);
        FileOutputFormat.setOutputPath(job, dir);
        context = createTestTaskAttemptContext(job);
        HFileOutputFormat2 hof = new HFileOutputFormat2();
        writer = hof.getRecordWriter(context);
        // write out random rows
        writeRandomKeyValues(writer, context, tableDescriptorBuilder.build().getColumnFamilyNames(), ROWSPERSPLIT);
        writer.close(context);
        // Make sure that a directory was created for every CF
        FileSystem fs = dir.getFileSystem(conf);
        // commit so that the filesystem has one directory per column family
        hof.getOutputCommitter(context).commitTask(context);
        hof.getOutputCommitter(context).commitJob(context);
        FileStatus[] families = CommonFSUtils.listStatus(fs, dir, new FSUtils.FamilyDirFilter(fs));
        assertEquals(tableDescriptorBuilder.build().getColumnFamilies().length, families.length);
        for (FileStatus f : families) {
            String familyStr = f.getPath().getName();
            ColumnFamilyDescriptor hcd = tableDescriptorBuilder.build().getColumnFamily(Bytes.toBytes(familyStr));
            // verify that the compression on this file matches the configured
            // compression
            Path dataFilePath = fs.listStatus(f.getPath())[0].getPath();
            Reader reader = HFile.createReader(fs, dataFilePath, new CacheConfig(conf), true, conf);
            Map<byte[], byte[]> fileInfo = reader.getHFileInfo();
            byte[] bloomFilter = fileInfo.get(BLOOM_FILTER_TYPE_KEY);
            if (bloomFilter == null)
                bloomFilter = Bytes.toBytes("NONE");
            assertEquals("Incorrect bloom filter used for column family " + familyStr + "(reader: " + reader + ")", hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter)));
            assertEquals("Incorrect compression used for column family " + familyStr + "(reader: " + reader + ")", hcd.getCompressionType(), reader.getFileContext().getCompression());
        }
    } finally {
        dir.getFileSystem(conf).delete(dir, true);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) Table(org.apache.hadoop.hbase.client.Table) FileStatus(org.apache.hadoop.fs.FileStatus) LocatedFileStatus(org.apache.hadoop.fs.LocatedFileStatus) HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) Reader(org.apache.hadoop.hbase.io.hfile.HFile.Reader) TaskAttemptContext(org.apache.hadoop.mapreduce.TaskAttemptContext) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) FileSystem(org.apache.hadoop.fs.FileSystem) TestHRegionFileSystem(org.apache.hadoop.hbase.regionserver.TestHRegionFileSystem) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) Job(org.apache.hadoop.mapreduce.Job) Cell(org.apache.hadoop.hbase.Cell) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) CommonFSUtils(org.apache.hadoop.hbase.util.CommonFSUtils) FSUtils(org.apache.hadoop.hbase.util.FSUtils) Ignore(org.junit.Ignore) Test(org.junit.Test)

Aggregations

TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)190 ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)76 Test (org.junit.Test)68 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)61 ColumnFamilyDescriptorBuilder (org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder)47 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)39 TableName (org.apache.hadoop.hbase.TableName)34 Path (org.apache.hadoop.fs.Path)31 Admin (org.apache.hadoop.hbase.client.Admin)29 Put (org.apache.hadoop.hbase.client.Put)25 IOException (java.io.IOException)24 Configuration (org.apache.hadoop.conf.Configuration)20 Table (org.apache.hadoop.hbase.client.Table)18 ArrayList (java.util.ArrayList)17 FileSystem (org.apache.hadoop.fs.FileSystem)15 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)15 Before (org.junit.Before)12 Cell (org.apache.hadoop.hbase.Cell)11 NamespaceDescriptor (org.apache.hadoop.hbase.NamespaceDescriptor)10 HashMap (java.util.HashMap)9