Search in sources :

Example 1 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestClassLoading method testClassLoadingFromLocalFS.

@Test
public // HBASE-3516: Test CP Class loading from local file system
void testClassLoadingFromLocalFS() throws Exception {
    File jarFile = buildCoprocessorJar(cpName3);
    // create a table that references the jar
    TableDescriptorBuilder tdb = TableDescriptorBuilder.newBuilder(TableName.valueOf(cpName3));
    tdb.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("test")).build());
    tdb.setValue("COPROCESSOR$1", getLocalPath(jarFile) + "|" + cpName3 + "|" + Coprocessor.PRIORITY_USER);
    TableDescriptor tableDescriptor = tdb.build();
    Admin admin = TEST_UTIL.getAdmin();
    admin.createTable(tableDescriptor);
    waitForTable(tableDescriptor.getTableName());
    // verify that the coprocessor was loaded
    boolean found = false;
    SingleProcessHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
    for (HRegion region : hbase.getRegionServer(0).getOnlineRegionsLocalContext()) {
        if (region.getRegionInfo().getRegionNameAsString().startsWith(cpName3)) {
            found = (region.getCoprocessorHost().findCoprocessor(cpName3) != null);
        }
    }
    assertTrue("Class " + cpName3 + " was missing on a region", found);
}
Also used : SingleProcessHBaseCluster(org.apache.hadoop.hbase.SingleProcessHBaseCluster) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) Admin(org.apache.hadoop.hbase.client.Admin) File(java.io.File) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Test(org.junit.Test)

Example 2 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestClassLoading method loadingClassFromLibDirInJar.

void loadingClassFromLibDirInJar(String libPrefix) throws Exception {
    FileSystem fs = cluster.getFileSystem();
    File innerJarFile1 = buildCoprocessorJar(cpName1);
    File innerJarFile2 = buildCoprocessorJar(cpName2);
    File outerJarFile = new File(TEST_UTIL.getDataTestDir().toString(), "outer.jar");
    ClassLoaderTestHelper.addJarFilesToJar(outerJarFile, libPrefix, innerJarFile1, innerJarFile2);
    // copy the jars into dfs
    fs.copyFromLocalFile(new Path(outerJarFile.getPath()), new Path(fs.getUri().toString() + Path.SEPARATOR));
    String jarFileOnHDFS = fs.getUri().toString() + Path.SEPARATOR + outerJarFile.getName();
    assertTrue("Copy jar file to HDFS failed.", fs.exists(new Path(jarFileOnHDFS)));
    LOG.info("Copied jar file to HDFS: " + jarFileOnHDFS);
    // create a table that references the coprocessors
    TableDescriptorBuilder tdb = TableDescriptorBuilder.newBuilder(tableName);
    tdb.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("test")).build());
    // without configuration values
    tdb.setValue("COPROCESSOR$1", jarFileOnHDFS + "|" + cpName1 + "|" + Coprocessor.PRIORITY_USER);
    // with configuration values
    tdb.setValue("COPROCESSOR$2", jarFileOnHDFS + "|" + cpName2 + "|" + Coprocessor.PRIORITY_USER + "|k1=v1,k2=v2,k3=v3");
    Admin admin = TEST_UTIL.getAdmin();
    if (admin.tableExists(tableName)) {
        if (admin.isTableEnabled(tableName)) {
            admin.disableTable(tableName);
        }
        admin.deleteTable(tableName);
    }
    TableDescriptor tableDescriptor = tdb.build();
    admin.createTable(tableDescriptor);
    waitForTable(tableDescriptor.getTableName());
    // verify that the coprocessors were loaded
    boolean found1 = false, found2 = false, found2_k1 = false, found2_k2 = false, found2_k3 = false;
    SingleProcessHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
    for (HRegion region : hbase.getRegionServer(0).getOnlineRegionsLocalContext()) {
        if (region.getRegionInfo().getRegionNameAsString().startsWith(tableName.getNameAsString())) {
            CoprocessorEnvironment env;
            env = region.getCoprocessorHost().findCoprocessorEnvironment(cpName1);
            if (env != null) {
                found1 = true;
            }
            env = region.getCoprocessorHost().findCoprocessorEnvironment(cpName2);
            if (env != null) {
                found2 = true;
                Configuration conf = env.getConfiguration();
                found2_k1 = conf.get("k1") != null;
                found2_k2 = conf.get("k2") != null;
                found2_k3 = conf.get("k3") != null;
            }
        }
    }
    assertTrue("Class " + cpName1 + " was missing on a region", found1);
    assertTrue("Class " + cpName2 + " was missing on a region", found2);
    assertTrue("Configuration key 'k1' was missing on a region", found2_k1);
    assertTrue("Configuration key 'k2' was missing on a region", found2_k2);
    assertTrue("Configuration key 'k3' was missing on a region", found2_k3);
}
Also used : Path(org.apache.hadoop.fs.Path) SingleProcessHBaseCluster(org.apache.hadoop.hbase.SingleProcessHBaseCluster) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) Configuration(org.apache.hadoop.conf.Configuration) FileSystem(org.apache.hadoop.fs.FileSystem) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) CoprocessorEnvironment(org.apache.hadoop.hbase.CoprocessorEnvironment) Admin(org.apache.hadoop.hbase.client.Admin) File(java.io.File) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 3 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestCoprocessorEndpoint method setupBeforeClass.

@BeforeClass
public static void setupBeforeClass() throws Exception {
    // set configure to indicate which cp should be loaded
    Configuration conf = util.getConfiguration();
    conf.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 5000);
    conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, org.apache.hadoop.hbase.coprocessor.ColumnAggregationEndpoint.class.getName(), ProtobufCoprocessorService.class.getName());
    conf.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, ProtobufCoprocessorService.class.getName());
    util.startMiniCluster(2);
    Admin admin = util.getAdmin();
    TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TEST_TABLE).setColumnFamily(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY)).build();
    admin.createTable(tableDescriptor, new byte[][] { ROWS[rowSeperator1], ROWS[rowSeperator2] });
    util.waitUntilAllRegionsAssigned(TEST_TABLE);
    Table table = util.getConnection().getTable(TEST_TABLE);
    for (int i = 0; i < ROWSIZE; i++) {
        Put put = new Put(ROWS[i]);
        put.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i));
        table.put(put);
    }
    table.close();
}
Also used : Table(org.apache.hadoop.hbase.client.Table) Configuration(org.apache.hadoop.conf.Configuration) Admin(org.apache.hadoop.hbase.client.Admin) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Put(org.apache.hadoop.hbase.client.Put) BeforeClass(org.junit.BeforeClass)

Example 4 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestSecureExport method testVisibilityLabels.

@Test
// See HBASE-23990
@org.junit.Ignore
public void testVisibilityLabels() throws IOException, Throwable {
    final String exportTable = name.getMethodName() + "_export";
    final String importTable = name.getMethodName() + "_import";
    final TableDescriptor exportHtd = TableDescriptorBuilder.newBuilder(TableName.valueOf(exportTable)).setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYA)).build();
    User owner = User.createUserForTesting(UTIL.getConfiguration(), USER_OWNER, new String[0]);
    SecureTestUtil.createTable(UTIL, owner, exportHtd, new byte[][] { Bytes.toBytes("s") });
    AccessTestAction putAction = () -> {
        Put p1 = new Put(ROW1);
        p1.addColumn(FAMILYA, QUAL, NOW, QUAL);
        p1.setCellVisibility(new CellVisibility(SECRET));
        Put p2 = new Put(ROW2);
        p2.addColumn(FAMILYA, QUAL, NOW, QUAL);
        p2.setCellVisibility(new CellVisibility(PRIVATE + " & " + CONFIDENTIAL));
        Put p3 = new Put(ROW3);
        p3.addColumn(FAMILYA, QUAL, NOW, QUAL);
        p3.setCellVisibility(new CellVisibility("!" + CONFIDENTIAL + " & " + TOPSECRET));
        try (Connection conn = ConnectionFactory.createConnection(UTIL.getConfiguration());
            Table t = conn.getTable(TableName.valueOf(exportTable))) {
            t.put(p1);
            t.put(p2);
            t.put(p3);
        }
        return null;
    };
    SecureTestUtil.verifyAllowed(putAction, getUserByLogin(USER_OWNER));
    List<Pair<List<String>, Integer>> labelsAndRowCounts = new LinkedList<>();
    labelsAndRowCounts.add(new Pair<>(Arrays.asList(SECRET), 1));
    labelsAndRowCounts.add(new Pair<>(Arrays.asList(PRIVATE, CONFIDENTIAL), 1));
    labelsAndRowCounts.add(new Pair<>(Arrays.asList(TOPSECRET), 1));
    labelsAndRowCounts.add(new Pair<>(Arrays.asList(TOPSECRET, CONFIDENTIAL), 0));
    labelsAndRowCounts.add(new Pair<>(Arrays.asList(TOPSECRET, CONFIDENTIAL, PRIVATE, SECRET), 2));
    for (final Pair<List<String>, Integer> labelsAndRowCount : labelsAndRowCounts) {
        final List<String> labels = labelsAndRowCount.getFirst();
        final int rowCount = labelsAndRowCount.getSecond();
        // create a open permission directory.
        final Path openDir = new Path("testAccessCase");
        final FileSystem fs = openDir.getFileSystem(UTIL.getConfiguration());
        fs.mkdirs(openDir);
        fs.setPermission(openDir, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
        final Path output = fs.makeQualified(new Path(openDir, "output"));
        AccessTestAction exportAction = () -> {
            StringBuilder buf = new StringBuilder();
            labels.forEach(v -> buf.append(v).append(","));
            buf.deleteCharAt(buf.length() - 1);
            try {
                String[] args = new String[] { "-D " + ExportUtils.EXPORT_VISIBILITY_LABELS + "=" + buf.toString(), exportTable, output.toString() };
                Export.run(new Configuration(UTIL.getConfiguration()), args);
                return null;
            } catch (ServiceException | IOException ex) {
                throw ex;
            } catch (Throwable ex) {
                throw new Exception(ex);
            }
        };
        SecureTestUtil.verifyAllowed(exportAction, getUserByLogin(USER_OWNER));
        final TableDescriptor importHtd = TableDescriptorBuilder.newBuilder(TableName.valueOf(importTable)).setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYB)).build();
        SecureTestUtil.createTable(UTIL, owner, importHtd, new byte[][] { Bytes.toBytes("s") });
        AccessTestAction importAction = () -> {
            String[] args = new String[] { "-D" + Import.CF_RENAME_PROP + "=" + FAMILYA_STRING + ":" + FAMILYB_STRING, importTable, output.toString() };
            assertEquals(0, ToolRunner.run(new Configuration(UTIL.getConfiguration()), new Import(), args));
            return null;
        };
        SecureTestUtil.verifyAllowed(importAction, getUserByLogin(USER_OWNER));
        AccessTestAction scanAction = () -> {
            Scan scan = new Scan();
            scan.setAuthorizations(new Authorizations(labels));
            try (Connection conn = ConnectionFactory.createConnection(UTIL.getConfiguration());
                Table table = conn.getTable(importHtd.getTableName());
                ResultScanner scanner = table.getScanner(scan)) {
                int count = 0;
                for (Result r : scanner) {
                    ++count;
                }
                assertEquals(rowCount, count);
            }
            return null;
        };
        SecureTestUtil.verifyAllowed(scanAction, getUserByLogin(USER_OWNER));
        AccessTestAction deleteAction = () -> {
            UTIL.deleteTable(importHtd.getTableName());
            return null;
        };
        SecureTestUtil.verifyAllowed(deleteAction, getUserByLogin(USER_OWNER));
        clearOutput(output);
    }
    AccessTestAction deleteAction = () -> {
        UTIL.deleteTable(exportHtd.getTableName());
        return null;
    };
    SecureTestUtil.verifyAllowed(deleteAction, getUserByLogin(USER_OWNER));
}
Also used : Arrays(java.util.Arrays) UserProvider(org.apache.hadoop.hbase.security.UserProvider) VisibilityConstants(org.apache.hadoop.hbase.security.visibility.VisibilityConstants) Result(org.apache.hadoop.hbase.client.Result) FileSystem(org.apache.hadoop.fs.FileSystem) LoggerFactory(org.slf4j.LoggerFactory) PermissionStorage(org.apache.hadoop.hbase.security.access.PermissionStorage) FileStatus(org.apache.hadoop.fs.FileStatus) FsPermission(org.apache.hadoop.fs.permission.FsPermission) ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) AccessControlConstants(org.apache.hadoop.hbase.security.access.AccessControlConstants) VisibilityLabelsProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.VisibilityLabelsProtos) Map(java.util.Map) Configuration(org.apache.hadoop.conf.Configuration) After(org.junit.After) Path(org.apache.hadoop.fs.Path) HadoopSecurityEnabledUserProviderForTesting(org.apache.hadoop.hbase.security.HadoopSecurityEnabledUserProviderForTesting) ClassRule(org.junit.ClassRule) Pair(org.apache.hadoop.hbase.util.Pair) AfterClass(org.junit.AfterClass) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) HBaseClassTestRule(org.apache.hadoop.hbase.HBaseClassTestRule) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) HBaseKerberosUtils(org.apache.hadoop.hbase.security.HBaseKerberosUtils) Category(org.junit.experimental.categories.Category) List(java.util.List) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) VisibilityClient(org.apache.hadoop.hbase.security.visibility.VisibilityClient) EnvironmentEdgeManager(org.apache.hadoop.hbase.util.EnvironmentEdgeManager) ExportUtils(org.apache.hadoop.hbase.mapreduce.ExportUtils) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Permission(org.apache.hadoop.hbase.security.access.Permission) AccessTestAction(org.apache.hadoop.hbase.security.access.SecureTestUtil.AccessTestAction) BeforeClass(org.junit.BeforeClass) FsAction(org.apache.hadoop.fs.permission.FsAction) User(org.apache.hadoop.hbase.security.User) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptorBuilder(org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) TestName(org.junit.rules.TestName) LinkedList(java.util.LinkedList) Bytes(org.apache.hadoop.hbase.util.Bytes) Before(org.junit.Before) TableName(org.apache.hadoop.hbase.TableName) Logger(org.slf4j.Logger) MediumTests(org.apache.hadoop.hbase.testclassification.MediumTests) Put(org.apache.hadoop.hbase.client.Put) Import(org.apache.hadoop.hbase.mapreduce.Import) MiniKdc(org.apache.hadoop.minikdc.MiniKdc) ToolRunner(org.apache.hadoop.util.ToolRunner) IOException(java.io.IOException) Test(org.junit.Test) File(java.io.File) ConnectionFactory(org.apache.hadoop.hbase.client.ConnectionFactory) Scan(org.apache.hadoop.hbase.client.Scan) Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) Rule(org.junit.Rule) SecureTestUtil(org.apache.hadoop.hbase.security.access.SecureTestUtil) VisibilityTestUtil(org.apache.hadoop.hbase.security.visibility.VisibilityTestUtil) CellVisibility(org.apache.hadoop.hbase.security.visibility.CellVisibility) Connection(org.apache.hadoop.hbase.client.Connection) Table(org.apache.hadoop.hbase.client.Table) Assert.assertEquals(org.junit.Assert.assertEquals) User(org.apache.hadoop.hbase.security.User) Import(org.apache.hadoop.hbase.mapreduce.Import) Configuration(org.apache.hadoop.conf.Configuration) AccessTestAction(org.apache.hadoop.hbase.security.access.SecureTestUtil.AccessTestAction) CellVisibility(org.apache.hadoop.hbase.security.visibility.CellVisibility) Result(org.apache.hadoop.hbase.client.Result) FileSystem(org.apache.hadoop.fs.FileSystem) List(java.util.List) LinkedList(java.util.LinkedList) FsPermission(org.apache.hadoop.fs.permission.FsPermission) Pair(org.apache.hadoop.hbase.util.Pair) Path(org.apache.hadoop.fs.Path) Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Connection(org.apache.hadoop.hbase.client.Connection) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Put(org.apache.hadoop.hbase.client.Put) LinkedList(java.util.LinkedList) ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) IOException(java.io.IOException) Scan(org.apache.hadoop.hbase.client.Scan) Test(org.junit.Test)

Example 5 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class ExportEndpointExample method main.

public static void main(String[] args) throws Throwable {
    int rowCount = 100;
    byte[] family = Bytes.toBytes("family");
    Configuration conf = HBaseConfiguration.create();
    TableName tableName = TableName.valueOf("ExportEndpointExample");
    try (Connection con = ConnectionFactory.createConnection(conf);
        Admin admin = con.getAdmin()) {
        TableDescriptor desc = TableDescriptorBuilder.newBuilder(tableName).setCoprocessor(Export.class.getName()).setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build();
        admin.createTable(desc);
        List<Put> puts = new ArrayList<>(rowCount);
        for (int row = 0; row != rowCount; ++row) {
            byte[] bs = Bytes.toBytes(row);
            Put put = new Put(bs);
            put.addColumn(family, bs, bs);
            puts.add(put);
        }
        try (Table table = con.getTable(tableName)) {
            table.put(puts);
        }
        Path output = new Path("/tmp/ExportEndpointExample_output");
        Scan scan = new Scan();
        Map<byte[], Export.Response> result = Export.run(conf, tableName, scan, output);
        final long totalOutputRows = result.values().stream().mapToLong(v -> v.getRowCount()).sum();
        final long totalOutputCells = result.values().stream().mapToLong(v -> v.getCellCount()).sum();
        System.out.println("table:" + tableName);
        System.out.println("output:" + output);
        System.out.println("total rows:" + totalOutputRows);
        System.out.println("total cells:" + totalOutputCells);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) TableName(org.apache.hadoop.hbase.TableName) Put(org.apache.hadoop.hbase.client.Put) ConnectionFactory(org.apache.hadoop.hbase.client.ConnectionFactory) Scan(org.apache.hadoop.hbase.client.Scan) ArrayList(java.util.ArrayList) List(java.util.List) InterfaceAudience(org.apache.yetus.audience.InterfaceAudience) Admin(org.apache.hadoop.hbase.client.Admin) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) ColumnFamilyDescriptorBuilder(org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder) Export(org.apache.hadoop.hbase.coprocessor.Export) Map(java.util.Map) Configuration(org.apache.hadoop.conf.Configuration) Connection(org.apache.hadoop.hbase.client.Connection) Path(org.apache.hadoop.fs.Path) Table(org.apache.hadoop.hbase.client.Table) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Bytes(org.apache.hadoop.hbase.util.Bytes) Table(org.apache.hadoop.hbase.client.Table) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) Configuration(org.apache.hadoop.conf.Configuration) Connection(org.apache.hadoop.hbase.client.Connection) ArrayList(java.util.ArrayList) Admin(org.apache.hadoop.hbase.client.Admin) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Put(org.apache.hadoop.hbase.client.Put) TableName(org.apache.hadoop.hbase.TableName) Export(org.apache.hadoop.hbase.coprocessor.Export) Scan(org.apache.hadoop.hbase.client.Scan)

Aggregations

TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)639 Test (org.junit.Test)356 TableName (org.apache.hadoop.hbase.TableName)237 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)180 IOException (java.io.IOException)151 Put (org.apache.hadoop.hbase.client.Put)142 Admin (org.apache.hadoop.hbase.client.Admin)136 Path (org.apache.hadoop.fs.Path)124 Table (org.apache.hadoop.hbase.client.Table)121 ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)96 Configuration (org.apache.hadoop.conf.Configuration)91 TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)77 ArrayList (java.util.ArrayList)75 FileSystem (org.apache.hadoop.fs.FileSystem)66 Result (org.apache.hadoop.hbase.client.Result)66 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)64 Connection (org.apache.hadoop.hbase.client.Connection)59 Scan (org.apache.hadoop.hbase.client.Scan)50 Get (org.apache.hadoop.hbase.client.Get)49 List (java.util.List)39