Search in sources :

Example 6 with CoprocessorEnvironment

use of org.apache.hadoop.hbase.CoprocessorEnvironment in project Gaffer by gchq.

the class GafferCoprocessorTest method setup.

@BeforeEach
public void setup() throws IOException {
    coprocessor = new GafferCoprocessor();
    final CoprocessorEnvironment coEnv = mock(CoprocessorEnvironment.class);
    final Configuration conf = mock(Configuration.class);
    given(coEnv.getConfiguration()).willReturn(conf);
    given(conf.get(HBaseStoreConstants.SCHEMA)).willReturn(StringUtil.escapeComma(Bytes.toString(SCHEMA.toCompactJson())));
    coprocessor.start(coEnv);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) CoprocessorEnvironment(org.apache.hadoop.hbase.CoprocessorEnvironment) RegionCoprocessorEnvironment(org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment) BeforeEach(org.junit.jupiter.api.BeforeEach)

Example 7 with CoprocessorEnvironment

use of org.apache.hadoop.hbase.CoprocessorEnvironment in project hbase by apache.

the class TestClassLoading method testClassLoadingFromHDFS.

@Test
public // HBASE-3516: Test CP Class loading from HDFS
void testClassLoadingFromHDFS() throws Exception {
    FileSystem fs = cluster.getFileSystem();
    File jarFile1 = buildCoprocessorJar(cpName1);
    File jarFile2 = buildCoprocessorJar(cpName2);
    // copy the jars into dfs
    fs.copyFromLocalFile(new Path(jarFile1.getPath()), new Path(fs.getUri().toString() + Path.SEPARATOR));
    String jarFileOnHDFS1 = fs.getUri().toString() + Path.SEPARATOR + jarFile1.getName();
    Path pathOnHDFS1 = new Path(jarFileOnHDFS1);
    assertTrue("Copy jar file to HDFS failed.", fs.exists(pathOnHDFS1));
    LOG.info("Copied jar file to HDFS: " + jarFileOnHDFS1);
    fs.copyFromLocalFile(new Path(jarFile2.getPath()), new Path(fs.getUri().toString() + Path.SEPARATOR));
    String jarFileOnHDFS2 = fs.getUri().toString() + Path.SEPARATOR + jarFile2.getName();
    Path pathOnHDFS2 = new Path(jarFileOnHDFS2);
    assertTrue("Copy jar file to HDFS failed.", fs.exists(pathOnHDFS2));
    LOG.info("Copied jar file to HDFS: " + jarFileOnHDFS2);
    // create a table that references the coprocessors
    TableDescriptorBuilder tdb = TableDescriptorBuilder.newBuilder(tableName);
    tdb.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("test")).build());
    // without configuration values
    tdb.setValue("COPROCESSOR$1", jarFileOnHDFS1 + "|" + cpName1 + "|" + Coprocessor.PRIORITY_USER);
    // with configuration values
    tdb.setValue("COPROCESSOR$2", jarFileOnHDFS2 + "|" + cpName2 + "|" + Coprocessor.PRIORITY_USER + "|k1=v1,k2=v2,k3=v3");
    Admin admin = TEST_UTIL.getAdmin();
    if (admin.tableExists(tableName)) {
        if (admin.isTableEnabled(tableName)) {
            admin.disableTable(tableName);
        }
        admin.deleteTable(tableName);
    }
    CoprocessorClassLoader.clearCache();
    byte[] startKey = { 10, 63 };
    byte[] endKey = { 12, 43 };
    TableDescriptor tableDescriptor = tdb.build();
    admin.createTable(tableDescriptor, startKey, endKey, 4);
    waitForTable(tableDescriptor.getTableName());
    // verify that the coprocessors were loaded
    boolean foundTableRegion = false;
    boolean found1 = true, found2 = true, found2_k1 = true, found2_k2 = true, found2_k3 = true;
    Map<Region, Set<ClassLoader>> regionsActiveClassLoaders = new HashMap<>();
    SingleProcessHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
    for (HRegion region : hbase.getRegionServer(0).getOnlineRegionsLocalContext()) {
        if (region.getRegionInfo().getRegionNameAsString().startsWith(tableName.getNameAsString())) {
            foundTableRegion = true;
            CoprocessorEnvironment env;
            env = region.getCoprocessorHost().findCoprocessorEnvironment(cpName1);
            found1 = found1 && (env != null);
            env = region.getCoprocessorHost().findCoprocessorEnvironment(cpName2);
            found2 = found2 && (env != null);
            if (env != null) {
                Configuration conf = env.getConfiguration();
                found2_k1 = found2_k1 && (conf.get("k1") != null);
                found2_k2 = found2_k2 && (conf.get("k2") != null);
                found2_k3 = found2_k3 && (conf.get("k3") != null);
            } else {
                found2_k1 = false;
                found2_k2 = false;
                found2_k3 = false;
            }
            regionsActiveClassLoaders.put(region, ((CoprocessorHost) region.getCoprocessorHost()).getExternalClassLoaders());
        }
    }
    assertTrue("No region was found for table " + tableName, foundTableRegion);
    assertTrue("Class " + cpName1 + " was missing on a region", found1);
    assertTrue("Class " + cpName2 + " was missing on a region", found2);
    assertTrue("Configuration key 'k1' was missing on a region", found2_k1);
    assertTrue("Configuration key 'k2' was missing on a region", found2_k2);
    assertTrue("Configuration key 'k3' was missing on a region", found2_k3);
    // check if CP classloaders are cached
    assertNotNull(jarFileOnHDFS1 + " was not cached", CoprocessorClassLoader.getIfCached(pathOnHDFS1));
    assertNotNull(jarFileOnHDFS2 + " was not cached", CoprocessorClassLoader.getIfCached(pathOnHDFS2));
    // two external jar used, should be one classloader per jar
    assertEquals("The number of cached classloaders should be equal to the number" + " of external jar files", 2, CoprocessorClassLoader.getAllCached().size());
    // check if region active classloaders are shared across all RS regions
    Set<ClassLoader> externalClassLoaders = new HashSet<>(CoprocessorClassLoader.getAllCached());
    for (Map.Entry<Region, Set<ClassLoader>> regionCP : regionsActiveClassLoaders.entrySet()) {
        assertTrue("Some CP classloaders for region " + regionCP.getKey() + " are not cached." + " ClassLoader Cache:" + externalClassLoaders + " Region ClassLoaders:" + regionCP.getValue(), externalClassLoaders.containsAll(regionCP.getValue()));
    }
}
Also used : Path(org.apache.hadoop.fs.Path) SingleProcessHBaseCluster(org.apache.hadoop.hbase.SingleProcessHBaseCluster) HashSet(java.util.HashSet) Set(java.util.Set) Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) CoprocessorEnvironment(org.apache.hadoop.hbase.CoprocessorEnvironment) Admin(org.apache.hadoop.hbase.client.Admin) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) FileSystem(org.apache.hadoop.fs.FileSystem) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) Region(org.apache.hadoop.hbase.regionserver.Region) CoprocessorClassLoader(org.apache.hadoop.hbase.util.CoprocessorClassLoader) File(java.io.File) HashMap(java.util.HashMap) Map(java.util.Map) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 8 with CoprocessorEnvironment

use of org.apache.hadoop.hbase.CoprocessorEnvironment in project hbase by apache.

the class TestClassLoading method testHBase3810.

@Test
public // less strict
void testHBase3810() throws Exception {
    // allowed value pattern: [path] | class name | [priority] | [key values]
    File jarFile1 = buildCoprocessorJar(cpName1);
    File jarFile2 = buildCoprocessorJar(cpName2);
    File jarFile5 = buildCoprocessorJar(cpName5);
    File jarFile6 = buildCoprocessorJar(cpName6);
    String cpKey1 = "COPROCESSOR$1";
    String cpKey2 = " Coprocessor$2 ";
    String cpKey3 = " coprocessor$03 ";
    String cpValue1 = getLocalPath(jarFile1) + "|" + cpName1 + "|" + Coprocessor.PRIORITY_USER;
    String cpValue2 = getLocalPath(jarFile2) + " | " + cpName2 + " | ";
    // load from default class loader
    String cpValue3 = " | org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver | | k=v ";
    // create a table that references the jar
    TableDescriptorBuilder tdb = TableDescriptorBuilder.newBuilder(tableName);
    tdb.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("test")).build());
    // add 3 coprocessors by setting htd attributes directly.
    tdb.setValue(cpKey1, cpValue1);
    tdb.setValue(cpKey2, cpValue2);
    tdb.setValue(cpKey3, cpValue3);
    // add 2 coprocessor by using new htd.setCoprocessor() api
    CoprocessorDescriptor coprocessorDescriptor = CoprocessorDescriptorBuilder.newBuilder(cpName5).setJarPath(new Path(getLocalPath(jarFile5)).toString()).setPriority(Coprocessor.PRIORITY_USER).setProperties(Collections.emptyMap()).build();
    tdb.setCoprocessor(coprocessorDescriptor);
    Map<String, String> kvs = new HashMap<>();
    kvs.put("k1", "v1");
    kvs.put("k2", "v2");
    kvs.put("k3", "v3");
    coprocessorDescriptor = CoprocessorDescriptorBuilder.newBuilder(cpName6).setJarPath(new Path(getLocalPath(jarFile6)).toString()).setPriority(Coprocessor.PRIORITY_USER).setProperties(kvs).build();
    tdb.setCoprocessor(coprocessorDescriptor);
    Admin admin = TEST_UTIL.getAdmin();
    if (admin.tableExists(tableName)) {
        if (admin.isTableEnabled(tableName)) {
            admin.disableTable(tableName);
        }
        admin.deleteTable(tableName);
    }
    TableDescriptor tableDescriptor = tdb.build();
    admin.createTable(tableDescriptor);
    waitForTable(tableDescriptor.getTableName());
    // verify that the coprocessor was loaded
    boolean found_2 = false, found_1 = false, found_3 = false, found_5 = false, found_6 = false;
    boolean found6_k1 = false, found6_k2 = false, found6_k3 = false, found6_k4 = false;
    SingleProcessHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
    for (HRegion region : hbase.getRegionServer(0).getOnlineRegionsLocalContext()) {
        if (region.getRegionInfo().getRegionNameAsString().startsWith(tableName.getNameAsString())) {
            found_1 = found_1 || (region.getCoprocessorHost().findCoprocessor(cpName1) != null);
            found_2 = found_2 || (region.getCoprocessorHost().findCoprocessor(cpName2) != null);
            found_3 = found_3 || (region.getCoprocessorHost().findCoprocessor("SimpleRegionObserver") != null);
            found_5 = found_5 || (region.getCoprocessorHost().findCoprocessor(cpName5) != null);
            CoprocessorEnvironment env = region.getCoprocessorHost().findCoprocessorEnvironment(cpName6);
            if (env != null) {
                found_6 = true;
                Configuration conf = env.getConfiguration();
                found6_k1 = conf.get("k1") != null;
                found6_k2 = conf.get("k2") != null;
                found6_k3 = conf.get("k3") != null;
            }
        }
    }
    assertTrue("Class " + cpName1 + " was missing on a region", found_1);
    assertTrue("Class " + cpName2 + " was missing on a region", found_2);
    assertTrue("Class SimpleRegionObserver was missing on a region", found_3);
    assertTrue("Class " + cpName5 + " was missing on a region", found_5);
    assertTrue("Class " + cpName6 + " was missing on a region", found_6);
    assertTrue("Configuration key 'k1' was missing on a region", found6_k1);
    assertTrue("Configuration key 'k2' was missing on a region", found6_k2);
    assertTrue("Configuration key 'k3' was missing on a region", found6_k3);
    assertFalse("Configuration key 'k4' wasn't configured", found6_k4);
}
Also used : Path(org.apache.hadoop.fs.Path) SingleProcessHBaseCluster(org.apache.hadoop.hbase.SingleProcessHBaseCluster) Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) CoprocessorEnvironment(org.apache.hadoop.hbase.CoprocessorEnvironment) Admin(org.apache.hadoop.hbase.client.Admin) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) CoprocessorDescriptor(org.apache.hadoop.hbase.client.CoprocessorDescriptor) File(java.io.File) Test(org.junit.Test)

Example 9 with CoprocessorEnvironment

use of org.apache.hadoop.hbase.CoprocessorEnvironment in project hbase by apache.

the class TestCoprocessorHost method testLoadSystemCoprocessorWithPath.

@Test
public void testLoadSystemCoprocessorWithPath() throws Exception {
    Configuration conf = TEST_UTIL.getConfiguration();
    final String key = "KEY";
    final String testClassName = "TestSystemCoprocessor";
    final String testClassNameWithPriorityAndPath = testClassName + "PriorityAndPath";
    File jarFile = buildCoprocessorJar(testClassName);
    File jarFileWithPriorityAndPath = buildCoprocessorJar(testClassNameWithPriorityAndPath);
    try {
        CoprocessorHost<RegionCoprocessor, CoprocessorEnvironment<RegionCoprocessor>> host;
        host = new CoprocessorHostForTest<>(conf);
        // make a string of coprocessor with only priority
        int overridePriority = Integer.MAX_VALUE - 1;
        final String coprocessorWithPriority = SimpleRegionObserverV3.class.getName() + "|" + overridePriority;
        // make a string of coprocessor with path but no priority
        final String coprocessorWithPath = String.format("%s|%s|%s", testClassName, "", jarFile.getAbsolutePath());
        // make a string of coprocessor with priority and path
        final String coprocessorWithPriorityAndPath = String.format("%s|%s|%s", testClassNameWithPriorityAndPath, (overridePriority - 1), jarFileWithPriorityAndPath.getAbsolutePath());
        // Try and load a system coprocessors
        conf.setStrings(key, SimpleRegionObserverV2.class.getName(), coprocessorWithPriority, coprocessorWithPath, coprocessorWithPriorityAndPath);
        host.loadSystemCoprocessors(conf, key);
        // first loaded system coprocessor with default priority
        CoprocessorEnvironment<?> simpleEnv = host.findCoprocessorEnvironment(SimpleRegionObserverV2.class.getName());
        assertNotNull(simpleEnv);
        assertEquals(Coprocessor.PRIORITY_SYSTEM, simpleEnv.getPriority());
        // external system coprocessor with default priority
        CoprocessorEnvironment<?> coprocessorEnvironmentWithPath = host.findCoprocessorEnvironment(testClassName);
        assertNotNull(coprocessorEnvironmentWithPath);
        assertEquals(Coprocessor.PRIORITY_SYSTEM + 1, coprocessorEnvironmentWithPath.getPriority());
        // system coprocessor with configured priority
        CoprocessorEnvironment<?> coprocessorEnvironmentWithPriority = host.findCoprocessorEnvironment(SimpleRegionObserverV3.class.getName());
        assertNotNull(coprocessorEnvironmentWithPriority);
        assertEquals(overridePriority, coprocessorEnvironmentWithPriority.getPriority());
        // external system coprocessor with override priority
        CoprocessorEnvironment<?> coprocessorEnvironmentWithPriorityAndPath = host.findCoprocessorEnvironment(testClassNameWithPriorityAndPath);
        assertNotNull(coprocessorEnvironmentWithPriorityAndPath);
        assertEquals(overridePriority - 1, coprocessorEnvironmentWithPriorityAndPath.getPriority());
    } finally {
        if (jarFile.exists()) {
            jarFile.delete();
        }
        if (jarFileWithPriorityAndPath.exists()) {
            jarFileWithPriorityAndPath.delete();
        }
    }
}
Also used : HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) Configuration(org.apache.hadoop.conf.Configuration) CoprocessorEnvironment(org.apache.hadoop.hbase.CoprocessorEnvironment) File(java.io.File) Test(org.junit.Test)

Example 10 with CoprocessorEnvironment

use of org.apache.hadoop.hbase.CoprocessorEnvironment in project hbase by apache.

the class TestCoprocessorHost method testLoadSystemCoprocessorWithPathDoesNotExist.

@Test(expected = AssertionError.class)
public void testLoadSystemCoprocessorWithPathDoesNotExist() throws Exception {
    Configuration conf = TEST_UTIL.getConfiguration();
    final String key = "KEY";
    final String testClassName = "TestSystemCoprocessor";
    CoprocessorHost<RegionCoprocessor, CoprocessorEnvironment<RegionCoprocessor>> host;
    host = new CoprocessorHostForTest<>(conf);
    // make a string of coprocessor with path but no priority
    final String coprocessorWithPath = testClassName + "||" + testClassName + ".jar";
    // Try and load a system coprocessors
    conf.setStrings(key, coprocessorWithPath);
    // when loading non-exist with CoprocessorHostForTest host, it aborts with AssertionError
    host.loadSystemCoprocessors(conf, key);
}
Also used : HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) Configuration(org.apache.hadoop.conf.Configuration) CoprocessorEnvironment(org.apache.hadoop.hbase.CoprocessorEnvironment) Test(org.junit.Test)

Aggregations

Configuration (org.apache.hadoop.conf.Configuration)11 CoprocessorEnvironment (org.apache.hadoop.hbase.CoprocessorEnvironment)11 Test (org.junit.Test)8 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)5 File (java.io.File)4 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)4 Path (org.apache.hadoop.fs.Path)3 SingleProcessHBaseCluster (org.apache.hadoop.hbase.SingleProcessHBaseCluster)3 Admin (org.apache.hadoop.hbase.client.Admin)3 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)3 TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)3 HashMap (java.util.HashMap)2 Set (java.util.Set)2 FileSystem (org.apache.hadoop.fs.FileSystem)2 IOException (java.io.IOException)1 HashSet (java.util.HashSet)1 List (java.util.List)1 Map (java.util.Map)1 Optional (java.util.Optional)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1