Search in sources :

Example 76 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestHStoreFile method testEmptyStoreFileRestrictKeyRanges.

@Test
public void testEmptyStoreFileRestrictKeyRanges() throws Exception {
    StoreFileReader reader = mock(StoreFileReader.class);
    HStore store = mock(HStore.class);
    byte[] cf = Bytes.toBytes("ty");
    ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.of(cf);
    when(store.getColumnFamilyDescriptor()).thenReturn(cfd);
    try (StoreFileScanner scanner = new StoreFileScanner(reader, mock(HFileScanner.class), false, false, 0, 0, true)) {
        Scan scan = new Scan();
        scan.setColumnFamilyTimeRange(cf, 0, 1);
        assertFalse(scanner.shouldUseScanner(scan, store, 0));
    }
}
Also used : HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner) Scan(org.apache.hadoop.hbase.client.Scan) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) Test(org.junit.Test)

Example 77 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestMiniClusterLoadSequential method prepareForLoadTest.

protected void prepareForLoadTest() throws IOException {
    LOG.info("Starting load test: dataBlockEncoding=" + dataBlockEncoding + ", isMultiPut=" + isMultiPut);
    numKeys = numKeys();
    Admin admin = TEST_UTIL.getAdmin();
    while (admin.getClusterMetrics(EnumSet.of(Option.LIVE_SERVERS)).getLiveServerMetrics().size() < NUM_RS) {
        LOG.info("Sleeping until " + NUM_RS + " RSs are online");
        Threads.sleepWithoutInterrupt(1000);
    }
    admin.close();
    TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TABLE).build();
    ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(CF).setCompressionType(compression).setDataBlockEncoding(dataBlockEncoding).build();
    createPreSplitLoadTestTable(tableDescriptor, familyDescriptor);
    LoadTestDataGenerator dataGen = new MultiThreadedAction.DefaultDataGenerator(CF);
    writerThreads = prepareWriterThreads(dataGen, conf, TABLE);
    readerThreads = prepareReaderThreads(dataGen, conf, TABLE, 100);
}
Also used : LoadTestDataGenerator(org.apache.hadoop.hbase.util.test.LoadTestDataGenerator) Admin(org.apache.hadoop.hbase.client.Admin) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 78 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestMajorCompactorTTL method modifyTTL.

protected void modifyTTL(TableName tableName) throws IOException, InterruptedException {
    // Set the TTL to 5 secs, so all the files just written above will get cleaned up on compact.
    admin.disableTable(tableName);
    utility.waitTableDisabled(tableName.getName());
    TableDescriptor descriptor = admin.getDescriptor(tableName);
    ColumnFamilyDescriptor colDesc = descriptor.getColumnFamily(FAMILY);
    ColumnFamilyDescriptorBuilder cFDB = ColumnFamilyDescriptorBuilder.newBuilder(colDesc);
    cFDB.setTimeToLive(5);
    admin.modifyColumnFamily(tableName, cFDB.build());
    admin.enableTable(tableName);
    utility.waitTableEnabled(tableName);
}
Also used : ColumnFamilyDescriptorBuilder(org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 79 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestHBaseFsckEncryption method setUp.

@Before
public void setUp() throws Exception {
    conf = TEST_UTIL.getConfiguration();
    conf.setInt("hfile.format.version", 3);
    conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
    conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
    // Create the test encryption key
    SecureRandom rng = new SecureRandom();
    byte[] keyBytes = new byte[AES.KEY_LENGTH];
    rng.nextBytes(keyBytes);
    String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
    cfKey = new SecretKeySpec(keyBytes, algorithm);
    // Start the minicluster
    TEST_UTIL.startMiniCluster(3);
    // Create the table
    TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TableName.valueOf("default", "TestHBaseFsckEncryption"));
    ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("cf")).setEncryptionType(algorithm).setEncryptionKey(EncryptionUtil.wrapKey(conf, conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, User.getCurrent().getShortName()), cfKey)).build();
    tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
    tableDescriptor = tableDescriptorBuilder.build();
    TEST_UTIL.getAdmin().createTable(tableDescriptor);
    TEST_UTIL.waitTableAvailable(tableDescriptor.getTableName(), 5000);
}
Also used : SecretKeySpec(javax.crypto.spec.SecretKeySpec) SecureRandom(java.security.SecureRandom) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) KeyProviderForTesting(org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting) Before(org.junit.Before)

Example 80 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestZooKeeperTableArchiveClient method testArchivingOnSingleTable.

@Test
public void testArchivingOnSingleTable() throws Exception {
    createArchiveDirectory();
    FileSystem fs = UTIL.getTestFileSystem();
    Path archiveDir = getArchiveDir();
    Path tableDir = getTableDir(STRING_TABLE_NAME);
    toCleanup.add(archiveDir);
    toCleanup.add(tableDir);
    Configuration conf = UTIL.getConfiguration();
    // setup the delegate
    Stoppable stop = new StoppableImplementation();
    HFileCleaner cleaner = setupAndCreateCleaner(conf, fs, archiveDir, stop);
    List<BaseHFileCleanerDelegate> cleaners = turnOnArchiving(STRING_TABLE_NAME, cleaner);
    final LongTermArchivingHFileCleaner delegate = (LongTermArchivingHFileCleaner) cleaners.get(0);
    // create the region
    ColumnFamilyDescriptor hcd = ColumnFamilyDescriptorBuilder.of(TEST_FAM);
    HRegion region = UTIL.createTestRegion(STRING_TABLE_NAME, hcd);
    List<HRegion> regions = new ArrayList<>();
    regions.add(region);
    Mockito.doReturn(regions).when(rss).getRegions();
    final CompactedHFilesDischarger compactionCleaner = new CompactedHFilesDischarger(100, stop, rss, false);
    loadFlushAndCompact(region, TEST_FAM);
    compactionCleaner.chore();
    // get the current hfiles in the archive directory
    List<Path> files = getAllFiles(fs, archiveDir);
    if (files == null) {
        CommonFSUtils.logFileSystemState(fs, UTIL.getDataTestDir(), LOG);
        throw new RuntimeException("Didn't archive any files!");
    }
    CountDownLatch finished = setupCleanerWatching(delegate, cleaners, files.size());
    runCleaner(cleaner, finished, stop);
    // know the cleaner ran, so now check all the files again to make sure they are still there
    List<Path> archivedFiles = getAllFiles(fs, archiveDir);
    assertEquals("Archived files changed after running archive cleaner.", files, archivedFiles);
    // but we still have the archive directory
    assertTrue(fs.exists(HFileArchiveUtil.getArchivePath(UTIL.getConfiguration())));
}
Also used : Path(org.apache.hadoop.fs.Path) BaseHFileCleanerDelegate(org.apache.hadoop.hbase.master.cleaner.BaseHFileCleanerDelegate) Configuration(org.apache.hadoop.conf.Configuration) StoppableImplementation(org.apache.hadoop.hbase.util.StoppableImplementation) ArrayList(java.util.ArrayList) Stoppable(org.apache.hadoop.hbase.Stoppable) HFileCleaner(org.apache.hadoop.hbase.master.cleaner.HFileCleaner) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) CountDownLatch(java.util.concurrent.CountDownLatch) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) CompactedHFilesDischarger(org.apache.hadoop.hbase.regionserver.CompactedHFilesDischarger) FileSystem(org.apache.hadoop.fs.FileSystem) Test(org.junit.Test)

Aggregations

ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)199 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)95 Test (org.junit.Test)92 TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)78 IOException (java.io.IOException)44 TableName (org.apache.hadoop.hbase.TableName)44 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)42 Path (org.apache.hadoop.fs.Path)41 Admin (org.apache.hadoop.hbase.client.Admin)36 Configuration (org.apache.hadoop.conf.Configuration)34 ArrayList (java.util.ArrayList)32 Put (org.apache.hadoop.hbase.client.Put)32 FileSystem (org.apache.hadoop.fs.FileSystem)28 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)24 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)22 Get (org.apache.hadoop.hbase.client.Get)20 Result (org.apache.hadoop.hbase.client.Result)19 ColumnFamilyDescriptorBuilder (org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder)17 Scan (org.apache.hadoop.hbase.client.Scan)17 Table (org.apache.hadoop.hbase.client.Table)17