Search in sources :

Example 71 with BeforeClass

use of org.junit.BeforeClass in project flink by apache.

the class CassandraConnectorITCase method startFlink.

@BeforeClass
public static void startFlink() throws Exception {
    Configuration config = new Configuration();
    config.setInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, 4);
    flinkCluster = new LocalFlinkMiniCluster(config);
    flinkCluster.start();
}
Also used : Configuration(org.apache.flink.configuration.Configuration) LocalFlinkMiniCluster(org.apache.flink.runtime.minicluster.LocalFlinkMiniCluster) BeforeClass(org.junit.BeforeClass)

Example 72 with BeforeClass

use of org.junit.BeforeClass in project spring-framework by spring-projects.

the class RequestPartIntegrationTests method startServer.

@BeforeClass
public static void startServer() throws Exception {
    // Let server pick its own random, available port.
    server = new Server(0);
    ServletContextHandler handler = new ServletContextHandler();
    handler.setContextPath("/");
    Class<?> config = CommonsMultipartResolverTestConfig.class;
    ServletHolder commonsResolverServlet = new ServletHolder(DispatcherServlet.class);
    commonsResolverServlet.setInitParameter("contextConfigLocation", config.getName());
    commonsResolverServlet.setInitParameter("contextClass", AnnotationConfigWebApplicationContext.class.getName());
    handler.addServlet(commonsResolverServlet, "/commons-resolver/*");
    config = StandardMultipartResolverTestConfig.class;
    ServletHolder standardResolverServlet = new ServletHolder(DispatcherServlet.class);
    standardResolverServlet.setInitParameter("contextConfigLocation", config.getName());
    standardResolverServlet.setInitParameter("contextClass", AnnotationConfigWebApplicationContext.class.getName());
    standardResolverServlet.getRegistration().setMultipartConfig(new MultipartConfigElement(""));
    handler.addServlet(standardResolverServlet, "/standard-resolver/*");
    server.setHandler(handler);
    server.start();
    Connector[] connectors = server.getConnectors();
    NetworkConnector connector = (NetworkConnector) connectors[0];
    baseUrl = "http://localhost:" + connector.getLocalPort();
}
Also used : NetworkConnector(org.eclipse.jetty.server.NetworkConnector) Connector(org.eclipse.jetty.server.Connector) MultipartConfigElement(javax.servlet.MultipartConfigElement) Server(org.eclipse.jetty.server.Server) ServletHolder(org.eclipse.jetty.servlet.ServletHolder) NetworkConnector(org.eclipse.jetty.server.NetworkConnector) ServletContextHandler(org.eclipse.jetty.servlet.ServletContextHandler) AnnotationConfigWebApplicationContext(org.springframework.web.context.support.AnnotationConfigWebApplicationContext) BeforeClass(org.junit.BeforeClass)

Example 73 with BeforeClass

use of org.junit.BeforeClass in project hadoop by apache.

the class TestRawLocalContractUnderlyingFileBehavior method before.

@BeforeClass
public static void before() {
    RawlocalFSContract contract = new RawlocalFSContract(new Configuration());
    testDirectory = contract.getTestDirectory();
    testDirectory.mkdirs();
    assertTrue(testDirectory.isDirectory());
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) BeforeClass(org.junit.BeforeClass)

Example 74 with BeforeClass

use of org.junit.BeforeClass in project hadoop by apache.

the class TestFuseDFS method startUp.

@BeforeClass
public static void startUp() throws IOException {
    Configuration conf = new HdfsConfiguration();
    r = Runtime.getRuntime();
    mountPoint = System.getProperty("build.test") + "/mnt";
    conf.setBoolean(DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY, false);
    cluster = new MiniDFSCluster.Builder(conf).build();
    cluster.waitClusterUp();
    fs = cluster.getFileSystem();
    fuseProcess = establishMount(fs.getUri());
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) BeforeClass(org.junit.BeforeClass)

Example 75 with BeforeClass

use of org.junit.BeforeClass in project hadoop by apache.

the class TestSecureEncryptionZoneWithKMS method init.

@BeforeClass
public static void init() throws Exception {
    baseDir = getTestDir();
    FileUtil.fullyDelete(baseDir);
    assertTrue(baseDir.mkdirs());
    Properties kdcConf = MiniKdc.createConf();
    kdc = new MiniKdc(kdcConf, baseDir);
    kdc.start();
    baseConf = new HdfsConfiguration();
    SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, baseConf);
    UserGroupInformation.setConfiguration(baseConf);
    assertTrue("Expected configuration to enable security", UserGroupInformation.isSecurityEnabled());
    File keytabFile = new File(baseDir, "test.keytab");
    keytab = keytabFile.getAbsolutePath();
    // Windows will not reverse name lookup "127.0.0.1" to "localhost".
    String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
    kdc.createPrincipal(keytabFile, HDFS_USER_NAME + "/" + krbInstance, SPNEGO_USER_NAME + "/" + krbInstance, OOZIE_USER_NAME + "/" + krbInstance, OOZIE_PROXIED_USER_NAME + "/" + krbInstance);
    hdfsPrincipal = HDFS_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
    spnegoPrincipal = SPNEGO_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
    ooziePrincipal = OOZIE_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
    // Allow oozie to proxy user
    baseConf.set("hadoop.proxyuser.oozie.hosts", "*");
    baseConf.set("hadoop.proxyuser.oozie.groups", "*");
    baseConf.set("hadoop.user.group.static.mapping.overrides", OOZIE_PROXIED_USER_NAME + "=oozie");
    baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
    baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
    baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
    baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
    baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
    baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
    baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
    // Set a small (2=4*0.5) KMSClient EDEK cache size to trigger
    // on demand refill upon the 3rd file creation
    baseConf.set(KMS_CLIENT_ENC_KEY_CACHE_SIZE, "4");
    baseConf.set(KMS_CLIENT_ENC_KEY_CACHE_LOW_WATERMARK, "0.5");
    keystoresDir = baseDir.getAbsolutePath();
    sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSecureEncryptionZoneWithKMS.class);
    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
    baseConf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getClientSSLConfigFileName());
    baseConf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getServerSSLConfigFileName());
    File kmsFile = new File(baseDir, "kms-site.xml");
    if (kmsFile.exists()) {
        FileUtil.fullyDelete(kmsFile);
    }
    Configuration kmsConf = new Configuration(true);
    kmsConf.set(KMSConfiguration.KEY_PROVIDER_URI, "jceks://file@" + new Path(baseDir.toString(), "kms.keystore").toUri());
    kmsConf.set("hadoop.kms.authentication.type", "kerberos");
    kmsConf.set("hadoop.kms.authentication.kerberos.keytab", keytab);
    kmsConf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost");
    kmsConf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT");
    kmsConf.set("hadoop.kms.acl.GENERATE_EEK", "hdfs");
    Writer writer = new FileWriter(kmsFile);
    kmsConf.writeXml(writer);
    writer.close();
    // Start MiniKMS
    MiniKMS.Builder miniKMSBuilder = new MiniKMS.Builder();
    miniKMS = miniKMSBuilder.setKmsConfDir(baseDir).build();
    miniKMS.start();
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) KMSConfiguration(org.apache.hadoop.crypto.key.kms.server.KMSConfiguration) FileWriter(java.io.FileWriter) Properties(java.util.Properties) MiniKMS(org.apache.hadoop.crypto.key.kms.server.MiniKMS) MiniKdc(org.apache.hadoop.minikdc.MiniKdc) File(java.io.File) Writer(java.io.Writer) FileWriter(java.io.FileWriter) BeforeClass(org.junit.BeforeClass)

Aggregations

BeforeClass (org.junit.BeforeClass)2813 File (java.io.File)388 Configuration (org.apache.hadoop.conf.Configuration)287 IOException (java.io.IOException)128 Connection (java.sql.Connection)126 Properties (java.util.Properties)108 Reader (java.io.Reader)99 SqlSessionFactoryBuilder (org.apache.ibatis.session.SqlSessionFactoryBuilder)98 Provisioning (com.zimbra.cs.account.Provisioning)93 ScriptRunner (org.apache.ibatis.jdbc.ScriptRunner)91 HiveConf (org.apache.hadoop.hive.conf.HiveConf)86 MockProvisioning (com.zimbra.cs.account.MockProvisioning)77 Path (org.apache.hadoop.fs.Path)75 URI (java.net.URI)73 HashMap (java.util.HashMap)70 URL (java.net.URL)63 SqlSession (org.apache.ibatis.session.SqlSession)62 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)61 Injector (com.google.inject.Injector)57 CConfiguration (co.cask.cdap.common.conf.CConfiguration)56