Search in sources :

Example 1 with CoprocessorEnvironment

use of org.apache.hadoop.hbase.CoprocessorEnvironment in project hbase by apache.

the class TestHTableWrapper method testHTableInterfaceMethods.

@Test
public void testHTableInterfaceMethods() throws Exception {
    Configuration conf = util.getConfiguration();
    MasterCoprocessorHost cpHost = util.getMiniHBaseCluster().getMaster().getMasterCoprocessorHost();
    Class<?> implClazz = DummyRegionObserver.class;
    cpHost.load(implClazz, Coprocessor.PRIORITY_HIGHEST, conf);
    CoprocessorEnvironment env = cpHost.findCoprocessorEnvironment(implClazz.getName());
    assertEquals(Coprocessor.VERSION, env.getVersion());
    assertEquals(VersionInfo.getVersion(), env.getHBaseVersion());
    hTableInterface = env.getTable(TEST_TABLE);
    checkHTableInterfaceMethods();
    cpHost.shutdown(env);
}
Also used : MasterCoprocessorHost(org.apache.hadoop.hbase.master.MasterCoprocessorHost) Configuration(org.apache.hadoop.conf.Configuration) CoprocessorEnvironment(org.apache.hadoop.hbase.CoprocessorEnvironment) Test(org.junit.Test)

Example 2 with CoprocessorEnvironment

use of org.apache.hadoop.hbase.CoprocessorEnvironment in project hbase by apache.

the class TestClassLoading method loadingClassFromLibDirInJar.

void loadingClassFromLibDirInJar(String libPrefix) throws Exception {
    FileSystem fs = cluster.getFileSystem();
    File innerJarFile1 = buildCoprocessorJar(cpName1);
    File innerJarFile2 = buildCoprocessorJar(cpName2);
    File outerJarFile = new File(TEST_UTIL.getDataTestDir().toString(), "outer.jar");
    ClassLoaderTestHelper.addJarFilesToJar(outerJarFile, libPrefix, innerJarFile1, innerJarFile2);
    // copy the jars into dfs
    fs.copyFromLocalFile(new Path(outerJarFile.getPath()), new Path(fs.getUri().toString() + Path.SEPARATOR));
    String jarFileOnHDFS = fs.getUri().toString() + Path.SEPARATOR + outerJarFile.getName();
    assertTrue("Copy jar file to HDFS failed.", fs.exists(new Path(jarFileOnHDFS)));
    LOG.info("Copied jar file to HDFS: " + jarFileOnHDFS);
    // create a table that references the coprocessors
    TableDescriptorBuilder tdb = TableDescriptorBuilder.newBuilder(tableName);
    tdb.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("test")).build());
    // without configuration values
    tdb.setValue("COPROCESSOR$1", jarFileOnHDFS + "|" + cpName1 + "|" + Coprocessor.PRIORITY_USER);
    // with configuration values
    tdb.setValue("COPROCESSOR$2", jarFileOnHDFS + "|" + cpName2 + "|" + Coprocessor.PRIORITY_USER + "|k1=v1,k2=v2,k3=v3");
    Admin admin = TEST_UTIL.getAdmin();
    if (admin.tableExists(tableName)) {
        if (admin.isTableEnabled(tableName)) {
            admin.disableTable(tableName);
        }
        admin.deleteTable(tableName);
    }
    TableDescriptor tableDescriptor = tdb.build();
    admin.createTable(tableDescriptor);
    waitForTable(tableDescriptor.getTableName());
    // verify that the coprocessors were loaded
    boolean found1 = false, found2 = false, found2_k1 = false, found2_k2 = false, found2_k3 = false;
    SingleProcessHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
    for (HRegion region : hbase.getRegionServer(0).getOnlineRegionsLocalContext()) {
        if (region.getRegionInfo().getRegionNameAsString().startsWith(tableName.getNameAsString())) {
            CoprocessorEnvironment env;
            env = region.getCoprocessorHost().findCoprocessorEnvironment(cpName1);
            if (env != null) {
                found1 = true;
            }
            env = region.getCoprocessorHost().findCoprocessorEnvironment(cpName2);
            if (env != null) {
                found2 = true;
                Configuration conf = env.getConfiguration();
                found2_k1 = conf.get("k1") != null;
                found2_k2 = conf.get("k2") != null;
                found2_k3 = conf.get("k3") != null;
            }
        }
    }
    assertTrue("Class " + cpName1 + " was missing on a region", found1);
    assertTrue("Class " + cpName2 + " was missing on a region", found2);
    assertTrue("Configuration key 'k1' was missing on a region", found2_k1);
    assertTrue("Configuration key 'k2' was missing on a region", found2_k2);
    assertTrue("Configuration key 'k3' was missing on a region", found2_k3);
}
Also used : Path(org.apache.hadoop.fs.Path) SingleProcessHBaseCluster(org.apache.hadoop.hbase.SingleProcessHBaseCluster) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) Configuration(org.apache.hadoop.conf.Configuration) FileSystem(org.apache.hadoop.fs.FileSystem) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) CoprocessorEnvironment(org.apache.hadoop.hbase.CoprocessorEnvironment) Admin(org.apache.hadoop.hbase.client.Admin) File(java.io.File) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 3 with CoprocessorEnvironment

use of org.apache.hadoop.hbase.CoprocessorEnvironment in project hbase by apache.

the class MetaTableMetrics method start.

@Override
public void start(CoprocessorEnvironment env) throws IOException {
    observer = new ExampleRegionObserverMeta();
    if (env instanceof RegionCoprocessorEnvironment && ((RegionCoprocessorEnvironment) env).getRegionInfo().getTable() != null && ((RegionCoprocessorEnvironment) env).getRegionInfo().getTable().equals(TableName.META_TABLE_NAME)) {
        RegionCoprocessorEnvironment regionCoprocessorEnv = (RegionCoprocessorEnvironment) env;
        registry = regionCoprocessorEnv.getMetricRegistryForRegionServer();
        LossyCounting.LossyCountingListener<String> listener = key -> {
            registry.remove(key);
            metrics.remove(key);
        };
        final Configuration conf = regionCoprocessorEnv.getConfiguration();
        clientMetricsLossyCounting = new LossyCounting<>("clientMetaMetrics", conf, listener);
        regionMetricsLossyCounting = new LossyCounting<>("regionMetaMetrics", conf, listener);
        // only be active mode when this region holds meta table.
        active = true;
    }
}
Also used : LossyCounting(org.apache.hadoop.hbase.util.LossyCounting) TableName(org.apache.hadoop.hbase.TableName) ImmutableMap(org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap) Put(org.apache.hadoop.hbase.client.Put) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Get(org.apache.hadoop.hbase.client.Get) Set(java.util.Set) CoprocessorEnvironment(org.apache.hadoop.hbase.CoprocessorEnvironment) IOException(java.io.IOException) Row(org.apache.hadoop.hbase.client.Row) StringUtils(org.apache.commons.lang3.StringUtils) List(java.util.List) Delete(org.apache.hadoop.hbase.client.Delete) InterfaceAudience(org.apache.yetus.audience.InterfaceAudience) Configuration(org.apache.hadoop.conf.Configuration) Optional(java.util.Optional) MetricRegistry(org.apache.hadoop.hbase.metrics.MetricRegistry) WALEdit(org.apache.hadoop.hbase.wal.WALEdit) Durability(org.apache.hadoop.hbase.client.Durability) RpcServer(org.apache.hadoop.hbase.ipc.RpcServer) Cell(org.apache.hadoop.hbase.Cell) Bytes(org.apache.hadoop.hbase.util.Bytes) LossyCounting(org.apache.hadoop.hbase.util.LossyCounting) Configuration(org.apache.hadoop.conf.Configuration)

Example 4 with CoprocessorEnvironment

use of org.apache.hadoop.hbase.CoprocessorEnvironment in project hbase by apache.

the class TestCoprocessorHost method testDoubleLoadingAndPriorityValue.

@Test
public void testDoubleLoadingAndPriorityValue() {
    final Configuration conf = HBaseConfiguration.create();
    final String key = "KEY";
    final String coprocessor = "org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver";
    CoprocessorHost<RegionCoprocessor, CoprocessorEnvironment<RegionCoprocessor>> host;
    host = new CoprocessorHostForTest<>(conf);
    int overridePriority = Integer.MAX_VALUE - 1;
    final String coprocessor_v3 = SimpleRegionObserverV3.class.getName() + "|" + overridePriority;
    // Try and load a coprocessor three times
    conf.setStrings(key, coprocessor, coprocessor, coprocessor, SimpleRegionObserverV2.class.getName(), coprocessor_v3);
    host.loadSystemCoprocessors(conf, key);
    // Three coprocessors(SimpleRegionObserver, SimpleRegionObserverV2,
    // SimpleRegionObserverV3) loaded
    Assert.assertEquals(3, host.coprocEnvironments.size());
    // Check the priority value
    CoprocessorEnvironment<?> simpleEnv = host.findCoprocessorEnvironment(SimpleRegionObserver.class.getName());
    CoprocessorEnvironment<?> simpleEnv_v2 = host.findCoprocessorEnvironment(SimpleRegionObserverV2.class.getName());
    CoprocessorEnvironment<?> simpleEnv_v3 = host.findCoprocessorEnvironment(SimpleRegionObserverV3.class.getName());
    assertNotNull(simpleEnv);
    assertNotNull(simpleEnv_v2);
    assertNotNull(simpleEnv_v3);
    assertEquals(Coprocessor.PRIORITY_SYSTEM, simpleEnv.getPriority());
    assertEquals(Coprocessor.PRIORITY_SYSTEM + 1, simpleEnv_v2.getPriority());
    assertEquals(overridePriority, simpleEnv_v3.getPriority());
}
Also used : HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) Configuration(org.apache.hadoop.conf.Configuration) CoprocessorEnvironment(org.apache.hadoop.hbase.CoprocessorEnvironment) Test(org.junit.Test)

Example 5 with CoprocessorEnvironment

use of org.apache.hadoop.hbase.CoprocessorEnvironment in project hbase by apache.

the class TestCoprocessorHost method testLoadSystemCoprocessorWithPathDoesNotExistAndPriority.

@Test(expected = AssertionError.class)
public void testLoadSystemCoprocessorWithPathDoesNotExistAndPriority() throws Exception {
    Configuration conf = TEST_UTIL.getConfiguration();
    final String key = "KEY";
    final String testClassName = "TestSystemCoprocessor";
    CoprocessorHost<RegionCoprocessor, CoprocessorEnvironment<RegionCoprocessor>> host;
    host = new CoprocessorHostForTest<>(conf);
    int overridePriority = Integer.MAX_VALUE - 1;
    // make a string of coprocessor with path and priority
    final String coprocessor = testClassName + "|" + overridePriority + "|" + testClassName + ".jar";
    // Try and load a system coprocessors
    conf.setStrings(key, coprocessor);
    // when loading non-exist coprocessor, it aborts with AssertionError
    host.loadSystemCoprocessors(conf, key);
}
Also used : HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) Configuration(org.apache.hadoop.conf.Configuration) CoprocessorEnvironment(org.apache.hadoop.hbase.CoprocessorEnvironment) Test(org.junit.Test)

Aggregations

Configuration (org.apache.hadoop.conf.Configuration)11 CoprocessorEnvironment (org.apache.hadoop.hbase.CoprocessorEnvironment)11 Test (org.junit.Test)8 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)5 File (java.io.File)4 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)4 Path (org.apache.hadoop.fs.Path)3 SingleProcessHBaseCluster (org.apache.hadoop.hbase.SingleProcessHBaseCluster)3 Admin (org.apache.hadoop.hbase.client.Admin)3 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)3 TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)3 HashMap (java.util.HashMap)2 Set (java.util.Set)2 FileSystem (org.apache.hadoop.fs.FileSystem)2 IOException (java.io.IOException)1 HashSet (java.util.HashSet)1 List (java.util.List)1 Map (java.util.Map)1 Optional (java.util.Optional)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1