Search in sources :

Example 91 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestDeleteMobTable method testDeleteNonMobTable.

@Test
public void testDeleteNonMobTable() throws Exception {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    TableDescriptor htd = createTableDescriptor(tableName, false);
    ColumnFamilyDescriptor hcd = htd.getColumnFamily(FAMILY);
    Table table = createTableWithOneFile(htd);
    try {
        // the mob file doesn't exist
        Assert.assertEquals(0, countMobFiles(tableName, hcd.getNameAsString()));
        Assert.assertEquals(0, countArchiveMobFiles(tableName, hcd.getNameAsString()));
        Assert.assertFalse(mobTableDirExist(tableName));
    } finally {
        table.close();
        TEST_UTIL.deleteTable(tableName);
    }
    Assert.assertFalse(TEST_UTIL.getAdmin().tableExists(tableName));
    Assert.assertEquals(0, countMobFiles(tableName, hcd.getNameAsString()));
    Assert.assertEquals(0, countArchiveMobFiles(tableName, hcd.getNameAsString()));
    Assert.assertFalse(mobTableDirExist(tableName));
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Table(org.apache.hadoop.hbase.client.Table) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Test(org.junit.Test)

Example 92 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestDeleteMobTable method testMobFamilyDelete.

@Test
public void testMobFamilyDelete() throws Exception {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    TableDescriptor tableDescriptor = createTableDescriptor(tableName, true);
    ColumnFamilyDescriptor familyDescriptor = tableDescriptor.getColumnFamily(FAMILY);
    tableDescriptor = TableDescriptorBuilder.newBuilder(tableDescriptor).setColumnFamily(ColumnFamilyDescriptorBuilder.of(Bytes.toBytes("family2"))).build();
    Table table = createTableWithOneFile(tableDescriptor);
    try {
        // the mob file exists
        Assert.assertEquals(1, countMobFiles(tableName, familyDescriptor.getNameAsString()));
        Assert.assertEquals(0, countArchiveMobFiles(tableName, familyDescriptor.getNameAsString()));
        String fileName = assertHasOneMobRow(table, tableName, familyDescriptor.getNameAsString());
        Assert.assertFalse(mobArchiveExist(tableName, familyDescriptor.getNameAsString(), fileName));
        Assert.assertTrue(mobTableDirExist(tableName));
        TEST_UTIL.getAdmin().deleteColumnFamily(tableName, FAMILY);
        Assert.assertEquals(0, countMobFiles(tableName, familyDescriptor.getNameAsString()));
        Assert.assertEquals(1, countArchiveMobFiles(tableName, familyDescriptor.getNameAsString()));
        Assert.assertTrue(mobArchiveExist(tableName, familyDescriptor.getNameAsString(), fileName));
        Assert.assertFalse(mobColumnFamilyDirExist(tableName, familyDescriptor.getNameAsString()));
    } finally {
        table.close();
        TEST_UTIL.deleteTable(tableName);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Table(org.apache.hadoop.hbase.client.Table) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Test(org.junit.Test)

Example 93 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestHRegion method testGetWithFilter.

@Test
public void testGetWithFilter() throws IOException, InterruptedException {
    byte[] row1 = Bytes.toBytes("row1");
    byte[] fam1 = Bytes.toBytes("fam1");
    byte[] col1 = Bytes.toBytes("col1");
    byte[] value1 = Bytes.toBytes("value1");
    byte[] value2 = Bytes.toBytes("value2");
    final int maxVersions = 3;
    TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf("testFilterAndColumnTracker")).setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(fam1).setMaxVersions(maxVersions).build()).build();
    ChunkCreator.initialize(MemStoreLAB.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null, MemStoreLAB.INDEX_CHUNK_SIZE_PERCENTAGE_DEFAULT);
    RegionInfo info = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build();
    Path logDir = TEST_UTIL.getDataTestDirOnTestFS(method + ".log");
    final WAL wal = HBaseTestingUtil.createWal(TEST_UTIL.getConfiguration(), logDir, info);
    this.region = TEST_UTIL.createLocalHRegion(info, CONF, tableDescriptor, wal);
    // Put 4 version to memstore
    long ts = 0;
    Put put = new Put(row1, ts);
    put.addColumn(fam1, col1, value1);
    region.put(put);
    put = new Put(row1, ts + 1);
    put.addColumn(fam1, col1, Bytes.toBytes("filter1"));
    region.put(put);
    put = new Put(row1, ts + 2);
    put.addColumn(fam1, col1, Bytes.toBytes("filter2"));
    region.put(put);
    put = new Put(row1, ts + 3);
    put.addColumn(fam1, col1, value2);
    region.put(put);
    Get get = new Get(row1);
    get.readAllVersions();
    Result res = region.get(get);
    // Get 3 versions, the oldest version has gone from user view
    assertEquals(maxVersions, res.size());
    get.setFilter(new ValueFilter(CompareOperator.EQUAL, new SubstringComparator("value")));
    res = region.get(get);
    // When use value filter, the oldest version should still gone from user view and it
    // should only return one key vaule
    assertEquals(1, res.size());
    assertTrue(CellUtil.matchingValue(new KeyValue(row1, fam1, col1, value2), res.rawCells()[0]));
    assertEquals(ts + 3, res.rawCells()[0].getTimestamp());
    region.flush(true);
    region.compact(true);
    Thread.sleep(1000);
    res = region.get(get);
    // After flush and compact, the result should be consistent with previous result
    assertEquals(1, res.size());
    assertTrue(CellUtil.matchingValue(new KeyValue(row1, fam1, col1, value2), res.rawCells()[0]));
}
Also used : Path(org.apache.hadoop.fs.Path) WAL(org.apache.hadoop.hbase.wal.WAL) KeyValue(org.apache.hadoop.hbase.KeyValue) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Put(org.apache.hadoop.hbase.client.Put) CheckAndMutateResult(org.apache.hadoop.hbase.client.CheckAndMutateResult) Result(org.apache.hadoop.hbase.client.Result) SubstringComparator(org.apache.hadoop.hbase.filter.SubstringComparator) Get(org.apache.hadoop.hbase.client.Get) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) ValueFilter(org.apache.hadoop.hbase.filter.ValueFilter) Test(org.junit.Test)

Example 94 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestRegionReplicaReplication method testRegionReplicaWithoutMemstoreReplication.

@Test
public void testRegionReplicaWithoutMemstoreReplication() throws Exception {
    int regionReplication = 3;
    TableDescriptor htd = HTU.createModifyableTableDescriptor(name.getMethodName()).setRegionReplication(regionReplication).setRegionMemStoreReplication(false).build();
    createOrEnableTableWithRetries(htd, true);
    final TableName tableName = htd.getTableName();
    Connection connection = ConnectionFactory.createConnection(HTU.getConfiguration());
    Table table = connection.getTable(tableName);
    try {
        // write data to the primary. The replicas should not receive the data
        final int STEP = 100;
        for (int i = 0; i < 3; ++i) {
            final int startRow = i * STEP;
            final int endRow = (i + 1) * STEP;
            LOG.info("Writing data from " + startRow + " to " + endRow);
            HTU.loadNumericRows(table, HBaseTestingUtil.fam1, startRow, endRow);
            verifyReplication(tableName, regionReplication, startRow, endRow, false);
            // Flush the table, now the data should show up in the replicas
            LOG.info("flushing table");
            HTU.flush(tableName);
            verifyReplication(tableName, regionReplication, 0, endRow, true);
        }
    } finally {
        table.close();
        connection.close();
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Table(org.apache.hadoop.hbase.client.Table) Connection(org.apache.hadoop.hbase.client.Connection) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Test(org.junit.Test)

Example 95 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestRegionReplicaReplication method testRegionReplicaReplication.

private void testRegionReplicaReplication(int regionReplication) throws Exception {
    // test region replica replication. Create a table with single region, write some data
    // ensure that data is replicated to the secondary region
    TableName tableName = TableName.valueOf("testRegionReplicaReplicationWithReplicas_" + regionReplication);
    TableDescriptor htd = HTU.createModifyableTableDescriptor(TableName.valueOf(tableName.toString()), ColumnFamilyDescriptorBuilder.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER, ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED).setRegionReplication(regionReplication).build();
    createOrEnableTableWithRetries(htd, true);
    TableName tableNameNoReplicas = TableName.valueOf("testRegionReplicaReplicationWithReplicas_NO_REPLICAS");
    HTU.deleteTableIfAny(tableNameNoReplicas);
    HTU.createTable(tableNameNoReplicas, HBaseTestingUtil.fam1);
    try (Connection connection = ConnectionFactory.createConnection(HTU.getConfiguration());
        Table table = connection.getTable(tableName);
        Table tableNoReplicas = connection.getTable(tableNameNoReplicas)) {
        // load some data to the non-replicated table
        HTU.loadNumericRows(tableNoReplicas, HBaseTestingUtil.fam1, 6000, 7000);
        // load the data to the table
        HTU.loadNumericRows(table, HBaseTestingUtil.fam1, 0, 1000);
        verifyReplication(tableName, regionReplication, 0, 1000);
    } finally {
        HTU.deleteTableIfAny(tableNameNoReplicas);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Table(org.apache.hadoop.hbase.client.Table) Connection(org.apache.hadoop.hbase.client.Connection) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Aggregations

TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)639 Test (org.junit.Test)356 TableName (org.apache.hadoop.hbase.TableName)237 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)180 IOException (java.io.IOException)151 Put (org.apache.hadoop.hbase.client.Put)142 Admin (org.apache.hadoop.hbase.client.Admin)136 Path (org.apache.hadoop.fs.Path)124 Table (org.apache.hadoop.hbase.client.Table)121 ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)96 Configuration (org.apache.hadoop.conf.Configuration)91 TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)77 ArrayList (java.util.ArrayList)75 FileSystem (org.apache.hadoop.fs.FileSystem)66 Result (org.apache.hadoop.hbase.client.Result)66 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)64 Connection (org.apache.hadoop.hbase.client.Connection)59 Scan (org.apache.hadoop.hbase.client.Scan)50 Get (org.apache.hadoop.hbase.client.Get)49 List (java.util.List)39