Search in sources :

Example 76 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestVisibilityLabelsWithDeletes method doPuts.

private Table doPuts(TableName tableName) throws IOException, InterruptedIOException, RetriesExhaustedWithDetailsException, InterruptedException {
    Admin hBaseAdmin = TEST_UTIL.getAdmin();
    HColumnDescriptor colDesc = new HColumnDescriptor(fam);
    colDesc.setMaxVersions(5);
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(colDesc);
    hBaseAdmin.createTable(desc);
    List<Put> puts = new ArrayList<>(5);
    Put put = new Put(Bytes.toBytes("row1"));
    put.addColumn(fam, qual, 123l, value);
    put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
    puts.add(put);
    put = new Put(Bytes.toBytes("row1"));
    put.addColumn(fam, qual, 124l, value);
    put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|(" + TOPSECRET + "&" + SECRET + ")"));
    puts.add(put);
    put = new Put(Bytes.toBytes("row1"));
    put.addColumn(fam, qual, 125l, value);
    put.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
    puts.add(put);
    put = new Put(Bytes.toBytes("row1"));
    put.addColumn(fam, qual, 126l, value);
    put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|(" + TOPSECRET + "&" + SECRET + ")"));
    puts.add(put);
    put = new Put(Bytes.toBytes("row1"));
    put.addColumn(fam, qual, 127l, value);
    put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|(" + TOPSECRET + "&" + SECRET + ")"));
    puts.add(put);
    TEST_UTIL.getAdmin().flush(tableName);
    put = new Put(Bytes.toBytes("row2"));
    put.addColumn(fam, qual, 127l, value);
    put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|(" + TOPSECRET + "&" + SECRET + ")"));
    puts.add(put);
    Table table = TEST_UTIL.getConnection().getTable(tableName);
    table.put(puts);
    return table;
}
Also used : Table(org.apache.hadoop.hbase.client.Table) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) ArrayList(java.util.ArrayList) Admin(org.apache.hadoop.hbase.client.Admin) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 77 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestVisibilityLabelsWithDeletes method testDeletesWithoutAndWithVisibilityLabels.

@Test
public void testDeletesWithoutAndWithVisibilityLabels() throws Exception {
    final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
    Admin hBaseAdmin = TEST_UTIL.getAdmin();
    HColumnDescriptor colDesc = new HColumnDescriptor(fam);
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(colDesc);
    hBaseAdmin.createTable(desc);
    try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
        Put put = new Put(row1);
        put.addColumn(fam, qual, value);
        put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
        table.put(put);
        Delete d = new Delete(row1);
        // without visibility
        d.addColumn(fam, qual);
        table.delete(d);
        PrivilegedExceptionAction<Void> scanAction = new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                try (Connection connection = ConnectionFactory.createConnection(conf);
                    Table table = connection.getTable(tableName)) {
                    Scan s = new Scan();
                    ResultScanner scanner = table.getScanner(s);
                    // The delete would not be able to apply it because of visibility mismatch
                    Result[] next = scanner.next(3);
                    assertEquals(next.length, 1);
                } catch (Throwable t) {
                    throw new IOException(t);
                }
                return null;
            }
        };
        SUPERUSER.runAs(scanAction);
        d = new Delete(row1);
        // with visibility
        d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
        d.addColumn(fam, qual);
        table.delete(d);
        scanAction = new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                try (Connection connection = ConnectionFactory.createConnection(conf);
                    Table table = connection.getTable(tableName)) {
                    Scan s = new Scan();
                    ResultScanner scanner = table.getScanner(s);
                    Result[] next = scanner.next(3);
                    // this will alone match
                    assertEquals(next.length, 0);
                } catch (Throwable t) {
                    throw new IOException(t);
                }
                return null;
            }
        };
        SUPERUSER.runAs(scanAction);
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Connection(org.apache.hadoop.hbase.client.Connection) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) Admin(org.apache.hadoop.hbase.client.Admin) Put(org.apache.hadoop.hbase.client.Put) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) RetriesExhaustedWithDetailsException(org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Result(org.apache.hadoop.hbase.client.Result) TableName(org.apache.hadoop.hbase.TableName) Scan(org.apache.hadoop.hbase.client.Scan) Test(org.junit.Test)

Example 78 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestVisibilityLabelsWithDeletes method testVisibilityLabelsWithDeleteColumnWithSpecificVersionWithPutsReAppearing.

@Test
public void testVisibilityLabelsWithDeleteColumnWithSpecificVersionWithPutsReAppearing() throws Exception {
    final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
    Admin hBaseAdmin = TEST_UTIL.getAdmin();
    HColumnDescriptor colDesc = new HColumnDescriptor(fam);
    colDesc.setMaxVersions(5);
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(colDesc);
    hBaseAdmin.createTable(desc);
    try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
        Put put1 = new Put(Bytes.toBytes("row1"));
        put1.addColumn(fam, qual, 123l, value);
        put1.setCellVisibility(new CellVisibility(CONFIDENTIAL));
        Put put2 = new Put(Bytes.toBytes("row1"));
        put2.addColumn(fam, qual, 123l, value1);
        put2.setCellVisibility(new CellVisibility(SECRET));
        table.put(createList(put1, put2));
        Scan s = new Scan();
        s.setMaxVersions(5);
        s.setAuthorizations(new Authorizations(CONFIDENTIAL, SECRET));
        ResultScanner scanner = table.getScanner(s);
        assertEquals(scanner.next(3).length, 1);
        scanner.close();
        PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                try (Connection connection = ConnectionFactory.createConnection(conf);
                    Table table = connection.getTable(tableName)) {
                    Delete d = new Delete(row1);
                    d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
                    d.addColumn(fam, qual, 123l);
                    table.delete(d);
                }
                try (Connection connection = ConnectionFactory.createConnection(conf);
                    Table table = connection.getTable(tableName)) {
                    Delete d = new Delete(row1);
                    d.setCellVisibility(new CellVisibility(SECRET));
                    d.addColumn(fam, qual, 123l);
                    table.delete(d);
                } catch (Throwable t) {
                    throw new IOException(t);
                }
                return null;
            }
        };
        SUPERUSER.runAs(actiona);
        s = new Scan();
        s.setMaxVersions(5);
        s.setAuthorizations(new Authorizations(CONFIDENTIAL));
        scanner = table.getScanner(s);
        assertEquals(scanner.next(3).length, 0);
        scanner.close();
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Connection(org.apache.hadoop.hbase.client.Connection) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) Admin(org.apache.hadoop.hbase.client.Admin) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) TableName(org.apache.hadoop.hbase.TableName) Scan(org.apache.hadoop.hbase.client.Scan) Test(org.junit.Test)

Example 79 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestReplicaWithCluster method testChangeTable.

@Test(timeout = 120000)
public void testChangeTable() throws Exception {
    HTableDescriptor hdt = HTU.createTableDescriptor("testChangeTable");
    hdt.setRegionReplication(NB_SERVERS);
    hdt.addCoprocessor(SlowMeCopro.class.getName());
    Table table = HTU.createTable(hdt, new byte[][] { f }, null);
    // basic test: it should work.
    Put p = new Put(row);
    p.addColumn(f, row, row);
    table.put(p);
    Get g = new Get(row);
    Result r = table.get(g);
    Assert.assertFalse(r.isStale());
    // Add a CF, it should work.
    HTableDescriptor bHdt = HTU.getAdmin().getTableDescriptor(hdt.getTableName());
    HColumnDescriptor hcd = new HColumnDescriptor(row);
    hdt.addFamily(hcd);
    HTU.getAdmin().disableTable(hdt.getTableName());
    HTU.getAdmin().modifyTable(hdt.getTableName(), hdt);
    HTU.getAdmin().enableTable(hdt.getTableName());
    HTableDescriptor nHdt = HTU.getAdmin().getTableDescriptor(hdt.getTableName());
    Assert.assertEquals("fams=" + Arrays.toString(nHdt.getColumnFamilies()), bHdt.getColumnFamilyCount() + 1, nHdt.getColumnFamilyCount());
    p = new Put(row);
    p.addColumn(row, row, row);
    table.put(p);
    g = new Get(row);
    r = table.get(g);
    Assert.assertFalse(r.isStale());
    try {
        SlowMeCopro.cdl.set(new CountDownLatch(1));
        g = new Get(row);
        g.setConsistency(Consistency.TIMELINE);
        r = table.get(g);
        Assert.assertTrue(r.isStale());
    } finally {
        SlowMeCopro.cdl.get().countDown();
        SlowMeCopro.sleepTime.set(0);
    }
    Admin admin = HTU.getAdmin();
    nHdt = admin.getTableDescriptor(hdt.getTableName());
    Assert.assertEquals("fams=" + Arrays.toString(nHdt.getColumnFamilies()), bHdt.getColumnFamilyCount() + 1, nHdt.getColumnFamilyCount());
    admin.disableTable(hdt.getTableName());
    admin.deleteTable(hdt.getTableName());
    admin.close();
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) CountDownLatch(java.util.concurrent.CountDownLatch) ReplicationAdmin(org.apache.hadoop.hbase.client.replication.ReplicationAdmin) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 80 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestReplicaWithCluster method testBulkLoad.

@Test(timeout = 30000)
public void testBulkLoad() throws IOException {
    // Create table then get the single region for our new table.
    LOG.debug("Creating test table");
    HTableDescriptor hdt = HTU.createTableDescriptor("testBulkLoad");
    hdt.setRegionReplication(NB_SERVERS);
    hdt.addCoprocessor(SlowMeCopro.class.getName());
    Table table = HTU.createTable(hdt, new byte[][] { f }, null);
    // create hfiles to load.
    LOG.debug("Creating test data");
    Path dir = HTU.getDataTestDirOnTestFS("testBulkLoad");
    final int numRows = 10;
    final byte[] qual = Bytes.toBytes("qual");
    final byte[] val = Bytes.toBytes("val");
    final List<Pair<byte[], String>> famPaths = new ArrayList<>();
    for (HColumnDescriptor col : hdt.getColumnFamilies()) {
        Path hfile = new Path(dir, col.getNameAsString());
        TestHRegionServerBulkLoad.createHFile(HTU.getTestFileSystem(), hfile, col.getName(), qual, val, numRows);
        famPaths.add(new Pair<>(col.getName(), hfile.toString()));
    }
    // bulk load HFiles
    LOG.debug("Loading test data");
    final ClusterConnection conn = (ClusterConnection) HTU.getAdmin().getConnection();
    table = conn.getTable(hdt.getTableName());
    final String bulkToken = new SecureBulkLoadClient(HTU.getConfiguration(), table).prepareBulkLoad(conn);
    ClientServiceCallable<Void> callable = new ClientServiceCallable<Void>(conn, hdt.getTableName(), TestHRegionServerBulkLoad.rowkey(0), new RpcControllerFactory(HTU.getConfiguration()).newController()) {

        @Override
        protected Void rpcCall() throws Exception {
            LOG.debug("Going to connect to server " + getLocation() + " for row " + Bytes.toStringBinary(getRow()));
            SecureBulkLoadClient secureClient = null;
            byte[] regionName = getLocation().getRegionInfo().getRegionName();
            try (Table table = conn.getTable(getTableName())) {
                secureClient = new SecureBulkLoadClient(HTU.getConfiguration(), table);
                secureClient.secureBulkLoadHFiles(getStub(), famPaths, regionName, true, null, bulkToken);
            }
            return null;
        }
    };
    RpcRetryingCallerFactory factory = new RpcRetryingCallerFactory(HTU.getConfiguration());
    RpcRetryingCaller<Void> caller = factory.newCaller();
    caller.callWithRetries(callable, 10000);
    // verify we can read them from the primary
    LOG.debug("Verifying data load");
    for (int i = 0; i < numRows; i++) {
        byte[] row = TestHRegionServerBulkLoad.rowkey(i);
        Get g = new Get(row);
        Result r = table.get(g);
        Assert.assertFalse(r.isStale());
    }
    // verify we can read them from the replica
    LOG.debug("Verifying replica queries");
    try {
        SlowMeCopro.cdl.set(new CountDownLatch(1));
        for (int i = 0; i < numRows; i++) {
            byte[] row = TestHRegionServerBulkLoad.rowkey(i);
            Get g = new Get(row);
            g.setConsistency(Consistency.TIMELINE);
            Result r = table.get(g);
            Assert.assertTrue(r.isStale());
        }
        SlowMeCopro.cdl.get().countDown();
    } finally {
        SlowMeCopro.cdl.get().countDown();
        SlowMeCopro.sleepTime.set(0);
    }
    HTU.getAdmin().disableTable(hdt.getTableName());
    HTU.deleteTable(hdt.getTableName());
}
Also used : Path(org.apache.hadoop.fs.Path) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) ArrayList(java.util.ArrayList) CountDownLatch(java.util.concurrent.CountDownLatch) RpcControllerFactory(org.apache.hadoop.hbase.ipc.RpcControllerFactory) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Pair(org.apache.hadoop.hbase.util.Pair) Test(org.junit.Test)

Aggregations

HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)679 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)561 Test (org.junit.Test)358 TableName (org.apache.hadoop.hbase.TableName)200 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)137 Put (org.apache.hadoop.hbase.client.Put)132 Table (org.apache.hadoop.hbase.client.Table)118 IOException (java.io.IOException)112 Admin (org.apache.hadoop.hbase.client.Admin)112 Path (org.apache.hadoop.fs.Path)81 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)74 ArrayList (java.util.ArrayList)66 Configuration (org.apache.hadoop.conf.Configuration)65 Connection (org.apache.hadoop.hbase.client.Connection)52 Scan (org.apache.hadoop.hbase.client.Scan)50 Result (org.apache.hadoop.hbase.client.Result)45 FileSystem (org.apache.hadoop.fs.FileSystem)44 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)42 Connection (java.sql.Connection)41 Properties (java.util.Properties)38