Search in sources :

Example 36 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestNamespaceAuditor method testRestoreSnapshot.

@Test
public void testRestoreSnapshot() throws Exception {
    String nsp = prefix + "_testRestoreSnapshot";
    NamespaceDescriptor nspDesc = NamespaceDescriptor.create(nsp).addConfiguration(TableNamespaceManager.KEY_MAX_REGIONS, "10").build();
    ADMIN.createNamespace(nspDesc);
    assertNotNull("Namespace descriptor found null.", ADMIN.getNamespaceDescriptor(nsp));
    TableName tableName1 = TableName.valueOf(nsp + TableName.NAMESPACE_DELIM + "table1");
    ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("fam1")).build();
    TableDescriptorBuilder tableDescOne = TableDescriptorBuilder.newBuilder(tableName1);
    tableDescOne.setColumnFamily(columnFamilyDescriptor);
    ADMIN.createTable(tableDescOne.build(), Bytes.toBytes("AAA"), Bytes.toBytes("ZZZ"), 4);
    NamespaceTableAndRegionInfo nstate = getNamespaceState(nsp);
    assertEquals("Intial region count should be 4.", 4, nstate.getRegionCount());
    String snapshot = "snapshot_testRestoreSnapshot";
    ADMIN.snapshot(snapshot, tableName1);
    List<RegionInfo> regions = ADMIN.getRegions(tableName1);
    Collections.sort(regions, RegionInfo.COMPARATOR);
    ADMIN.split(tableName1, Bytes.toBytes("JJJ"));
    Thread.sleep(2000);
    assertEquals("Total regions count should be 5.", 5, nstate.getRegionCount());
    ADMIN.disableTable(tableName1);
    ADMIN.restoreSnapshot(snapshot);
    assertEquals("Total regions count should be 4 after restore.", 4, nstate.getRegionCount());
    ADMIN.enableTable(tableName1);
    ADMIN.deleteSnapshot(snapshot);
}
Also used : TableName(org.apache.hadoop.hbase.TableName) NamespaceDescriptor(org.apache.hadoop.hbase.NamespaceDescriptor) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) Test(org.junit.Test)

Example 37 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestNamespaceAuditor method testTableOperations.

@Test
public void testTableOperations() throws Exception {
    String nsp = prefix + "_np2";
    NamespaceDescriptor nspDesc = NamespaceDescriptor.create(nsp).addConfiguration(TableNamespaceManager.KEY_MAX_REGIONS, "5").addConfiguration(TableNamespaceManager.KEY_MAX_TABLES, "2").build();
    ADMIN.createNamespace(nspDesc);
    assertNotNull("Namespace descriptor found null.", ADMIN.getNamespaceDescriptor(nsp));
    assertEquals(3, ADMIN.listNamespaceDescriptors().length);
    ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("fam1")).build();
    TableDescriptorBuilder tableDescOne = TableDescriptorBuilder.newBuilder(TableName.valueOf(nsp + TableName.NAMESPACE_DELIM + "table1"));
    tableDescOne.setColumnFamily(columnFamilyDescriptor);
    TableDescriptorBuilder tableDescTwo = TableDescriptorBuilder.newBuilder(TableName.valueOf(nsp + TableName.NAMESPACE_DELIM + "table2"));
    tableDescTwo.setColumnFamily(columnFamilyDescriptor);
    TableDescriptorBuilder tableDescThree = TableDescriptorBuilder.newBuilder(TableName.valueOf(nsp + TableName.NAMESPACE_DELIM + "table3"));
    tableDescThree.setColumnFamily(columnFamilyDescriptor);
    ADMIN.createTable(tableDescOne.build());
    boolean constraintViolated = false;
    try {
        ADMIN.createTable(tableDescTwo.build(), Bytes.toBytes("AAA"), Bytes.toBytes("ZZZ"), 5);
    } catch (Exception exp) {
        assertTrue(exp instanceof IOException);
        constraintViolated = true;
    } finally {
        assertTrue("Constraint not violated for table " + tableDescTwo.build().getTableName(), constraintViolated);
    }
    ADMIN.createTable(tableDescTwo.build(), Bytes.toBytes("AAA"), Bytes.toBytes("ZZZ"), 4);
    NamespaceTableAndRegionInfo nspState = getQuotaManager().getState(nsp);
    assertNotNull(nspState);
    assertTrue(nspState.getTables().size() == 2);
    assertTrue(nspState.getRegionCount() == 5);
    constraintViolated = false;
    try {
        ADMIN.createTable(tableDescThree.build());
    } catch (Exception exp) {
        assertTrue(exp instanceof IOException);
        constraintViolated = true;
    } finally {
        assertTrue("Constraint not violated for table " + tableDescThree.build().getTableName(), constraintViolated);
    }
}
Also used : NamespaceDescriptor(org.apache.hadoop.hbase.NamespaceDescriptor) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) IOException(java.io.IOException) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) RestoreSnapshotException(org.apache.hadoop.hbase.snapshot.RestoreSnapshotException) KeeperException(org.apache.zookeeper.KeeperException) IOException(java.io.IOException) DoNotRetryRegionException(org.apache.hadoop.hbase.client.DoNotRetryRegionException) QuotaExceededException(org.apache.hadoop.hbase.quotas.QuotaExceededException) ExecutionException(java.util.concurrent.ExecutionException) Test(org.junit.Test)

Example 38 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestNamespaceAuditor method testExceedTableQuotaInNamespace.

@Test(expected = QuotaExceededException.class)
public void testExceedTableQuotaInNamespace() throws Exception {
    String nsp = prefix + "_testExceedTableQuotaInNamespace";
    NamespaceDescriptor nspDesc = NamespaceDescriptor.create(nsp).addConfiguration(TableNamespaceManager.KEY_MAX_TABLES, "1").build();
    ADMIN.createNamespace(nspDesc);
    assertNotNull("Namespace descriptor found null.", ADMIN.getNamespaceDescriptor(nsp));
    assertEquals(3, ADMIN.listNamespaceDescriptors().length);
    ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("fam1")).build();
    TableDescriptorBuilder tableDescOne = TableDescriptorBuilder.newBuilder(TableName.valueOf(nsp + TableName.NAMESPACE_DELIM + "table1"));
    tableDescOne.setColumnFamily(columnFamilyDescriptor);
    TableDescriptorBuilder tableDescTwo = TableDescriptorBuilder.newBuilder(TableName.valueOf(nsp + TableName.NAMESPACE_DELIM + "table2"));
    tableDescTwo.setColumnFamily(columnFamilyDescriptor);
    ADMIN.createTable(tableDescOne.build());
    ADMIN.createTable(tableDescTwo.build(), Bytes.toBytes("AAA"), Bytes.toBytes("ZZZ"), 4);
}
Also used : NamespaceDescriptor(org.apache.hadoop.hbase.NamespaceDescriptor) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) Test(org.junit.Test)

Example 39 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestStripeCompactor method createCompactor.

private StripeCompactor createCompactor(StoreFileWritersCapture writers, KeyValue[] input) throws Exception {
    Configuration conf = HBaseConfiguration.create();
    conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
    final Scanner scanner = new Scanner(input);
    // Create store mock that is satisfactory for compactor.
    ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.of(NAME_OF_THINGS);
    ScanInfo si = new ScanInfo(conf, familyDescriptor, Long.MAX_VALUE, 0, CellComparatorImpl.COMPARATOR);
    HStore store = mock(HStore.class);
    when(store.getColumnFamilyDescriptor()).thenReturn(familyDescriptor);
    when(store.getScanInfo()).thenReturn(si);
    when(store.areWritesEnabled()).thenReturn(true);
    when(store.getFileSystem()).thenReturn(mock(FileSystem.class));
    when(store.getRegionInfo()).thenReturn(RegionInfoBuilder.newBuilder(TABLE_NAME).build());
    StoreEngine storeEngine = mock(StoreEngine.class);
    when(storeEngine.createWriter(any(CreateStoreFileWriterParams.class))).thenAnswer(writers);
    when(store.getStoreEngine()).thenReturn(storeEngine);
    when(store.getComparator()).thenReturn(CellComparatorImpl.COMPARATOR);
    return new StripeCompactor(conf, store) {

        @Override
        protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
            return scanner;
        }

        @Override
        protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
            return scanner;
        }
    };
}
Also used : Scanner(org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.Scanner) StoreFileScanner(org.apache.hadoop.hbase.regionserver.StoreFileScanner) InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) ScanType(org.apache.hadoop.hbase.regionserver.ScanType) StoreEngine(org.apache.hadoop.hbase.regionserver.StoreEngine) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) ArrayList(java.util.ArrayList) List(java.util.List) CreateStoreFileWriterParams(org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) HStore(org.apache.hadoop.hbase.regionserver.HStore)

Example 40 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestBulkLoadHFiles method testSplitStoreFileWithDifferentEncoding.

private void testSplitStoreFileWithDifferentEncoding(DataBlockEncoding bulkloadEncoding, DataBlockEncoding cfEncoding) throws IOException {
    Path dir = util.getDataTestDirOnTestFS("testSplitHFileWithDifferentEncoding");
    FileSystem fs = util.getTestFileSystem();
    Path testIn = new Path(dir, "testhfile");
    ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setDataBlockEncoding(cfEncoding).build();
    HFileTestUtil.createHFileWithDataBlockEncoding(util.getConfiguration(), fs, testIn, bulkloadEncoding, FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000);
    Path bottomOut = new Path(dir, "bottom.out");
    Path topOut = new Path(dir, "top.out");
    BulkLoadHFilesTool.splitStoreFile(util.getConfiguration(), testIn, familyDesc, Bytes.toBytes("ggg"), bottomOut, topOut);
    int rowCount = verifyHFile(bottomOut);
    rowCount += verifyHFile(topOut);
    assertEquals(1000, rowCount);
}
Also used : Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)

Aggregations

ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)199 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)95 Test (org.junit.Test)92 TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)78 IOException (java.io.IOException)44 TableName (org.apache.hadoop.hbase.TableName)44 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)42 Path (org.apache.hadoop.fs.Path)41 Admin (org.apache.hadoop.hbase.client.Admin)36 Configuration (org.apache.hadoop.conf.Configuration)34 ArrayList (java.util.ArrayList)32 Put (org.apache.hadoop.hbase.client.Put)32 FileSystem (org.apache.hadoop.fs.FileSystem)28 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)24 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)22 Get (org.apache.hadoop.hbase.client.Get)20 Result (org.apache.hadoop.hbase.client.Result)19 ColumnFamilyDescriptorBuilder (org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder)17 Scan (org.apache.hadoop.hbase.client.Scan)17 Table (org.apache.hadoop.hbase.client.Table)17