Search in sources :

Example 86 with Properties

use of java.util.Properties in project hadoop by apache.

the class KMSAuthenticationFilter method getConfiguration.

@Override
protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) {
    Properties props = new Properties();
    Configuration conf = KMSWebApp.getConfiguration();
    for (Map.Entry<String, String> entry : conf) {
        String name = entry.getKey();
        if (name.startsWith(CONFIG_PREFIX)) {
            String value = conf.get(name);
            name = name.substring(CONFIG_PREFIX.length());
            props.setProperty(name, value);
        }
    }
    String authType = props.getProperty(AUTH_TYPE);
    if (authType.equals(PseudoAuthenticationHandler.TYPE)) {
        props.setProperty(AUTH_TYPE, PseudoDelegationTokenAuthenticationHandler.class.getName());
    } else if (authType.equals(KerberosAuthenticationHandler.TYPE)) {
        props.setProperty(AUTH_TYPE, KerberosDelegationTokenAuthenticationHandler.class.getName());
    }
    props.setProperty(DelegationTokenAuthenticationHandler.TOKEN_KIND, KMSDelegationToken.TOKEN_KIND_STR);
    return props;
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) Properties(java.util.Properties) Map(java.util.Map) PseudoDelegationTokenAuthenticationHandler(org.apache.hadoop.security.token.delegation.web.PseudoDelegationTokenAuthenticationHandler)

Example 87 with Properties

use of java.util.Properties in project hadoop by apache.

the class Server method init.

/**
   * Initializes the Server.
   * <p>
   * The initialization steps are:
   * <ul>
   * <li>It verifies the service home and temp directories exist</li>
   * <li>Loads the Server <code>#SERVER#-default.xml</code>
   * configuration file from the classpath</li>
   * <li>Initializes log4j logging. If the
   * <code>#SERVER#-log4j.properties</code> file does not exist in the config
   * directory it load <code>default-log4j.properties</code> from the classpath
   * </li>
   * <li>Loads the <code>#SERVER#-site.xml</code> file from the server config
   * directory and merges it with the default configuration.</li>
   * <li>Loads the services</li>
   * <li>Initializes the services</li>
   * <li>Post-initializes the services</li>
   * <li>Sets the server startup status</li>
   * </ul>
   *
   * @throws ServerException thrown if the server could not be initialized.
   */
public void init() throws ServerException {
    if (status != Status.UNDEF) {
        throw new IllegalStateException("Server already initialized");
    }
    status = Status.BOOTING;
    verifyDir(homeDir);
    verifyDir(tempDir);
    Properties serverInfo = new Properties();
    try {
        InputStream is = getResource(name + ".properties");
        serverInfo.load(is);
        is.close();
    } catch (IOException ex) {
        throw new RuntimeException("Could not load server information file: " + name + ".properties");
    }
    initLog();
    log.info("++++++++++++++++++++++++++++++++++++++++++++++++++++++");
    log.info("Server [{}] starting", name);
    log.info("  Built information:");
    log.info("    Version           : {}", serverInfo.getProperty(name + ".version", "undef"));
    log.info("    Source Repository : {}", serverInfo.getProperty(name + ".source.repository", "undef"));
    log.info("    Source Revision   : {}", serverInfo.getProperty(name + ".source.revision", "undef"));
    log.info("    Built by          : {}", serverInfo.getProperty(name + ".build.username", "undef"));
    log.info("    Built timestamp   : {}", serverInfo.getProperty(name + ".build.timestamp", "undef"));
    log.info("  Runtime information:");
    log.info("    Home   dir: {}", homeDir);
    log.info("    Config dir: {}", (config == null) ? configDir : "-");
    log.info("    Log    dir: {}", logDir);
    log.info("    Temp   dir: {}", tempDir);
    initConfig();
    log.debug("Loading services");
    List<Service> list = loadServices();
    try {
        log.debug("Initializing services");
        initServices(list);
        log.info("Services initialized");
    } catch (ServerException ex) {
        log.error("Services initialization failure, destroying initialized services");
        destroyServices();
        throw ex;
    }
    Status status = Status.valueOf(getConfig().get(getPrefixedName(CONF_STARTUP_STATUS), Status.NORMAL.toString()));
    setStatus(status);
    log.info("Server [{}] started!, status [{}]", name, status);
}
Also used : FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) IOException(java.io.IOException) Properties(java.util.Properties)

Example 88 with Properties

use of java.util.Properties in project hadoop by apache.

the class Server method initLog.

/**
   * Initializes Log4j logging.
   *
   * @throws ServerException thrown if Log4j could not be initialized.
   */
protected void initLog() throws ServerException {
    verifyDir(logDir);
    LogManager.resetConfiguration();
    File log4jFile = new File(configDir, name + "-log4j.properties");
    if (log4jFile.exists()) {
        //every 10 secs
        PropertyConfigurator.configureAndWatch(log4jFile.toString(), 10 * 1000);
        log = LoggerFactory.getLogger(Server.class);
    } else {
        Properties props = new Properties();
        try {
            InputStream is = getResource(DEFAULT_LOG4J_PROPERTIES);
            try {
                props.load(is);
            } finally {
                is.close();
            }
        } catch (IOException ex) {
            throw new ServerException(ServerException.ERROR.S03, DEFAULT_LOG4J_PROPERTIES, ex.getMessage(), ex);
        }
        PropertyConfigurator.configure(props);
        log = LoggerFactory.getLogger(Server.class);
        log.warn("Log4j [{}] configuration file not found, using default configuration from classpath", log4jFile);
    }
}
Also used : FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) IOException(java.io.IOException) Properties(java.util.Properties) File(java.io.File)

Example 89 with Properties

use of java.util.Properties in project hadoop by apache.

the class TestSecureNNWithQJM method init.

@BeforeClass
public static void init() throws Exception {
    baseDir = GenericTestUtils.getTestDir(TestSecureNNWithQJM.class.getSimpleName());
    FileUtil.fullyDelete(baseDir);
    assertTrue(baseDir.mkdirs());
    Properties kdcConf = MiniKdc.createConf();
    kdc = new MiniKdc(kdcConf, baseDir);
    kdc.start();
    baseConf = new HdfsConfiguration();
    SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, baseConf);
    UserGroupInformation.setConfiguration(baseConf);
    assertTrue("Expected configuration to enable security", UserGroupInformation.isSecurityEnabled());
    String userName = UserGroupInformation.getLoginUser().getShortUserName();
    File keytabFile = new File(baseDir, userName + ".keytab");
    String keytab = keytabFile.getAbsolutePath();
    // Windows will not reverse name lookup "127.0.0.1" to "localhost".
    String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
    kdc.createPrincipal(keytabFile, userName + "/" + krbInstance, "HTTP/" + krbInstance);
    String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
    String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();
    baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
    baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
    baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
    baseConf.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
    baseConf.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    baseConf.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, spnegoPrincipal);
    baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
    baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
    baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
    baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
    keystoresDir = baseDir.getAbsolutePath();
    sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSecureNNWithQJM.class);
    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
    baseConf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getClientSSLConfigFileName());
    baseConf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getServerSSLConfigFileName());
}
Also used : MiniKdc(org.apache.hadoop.minikdc.MiniKdc) Properties(java.util.Properties) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) File(java.io.File) BeforeClass(org.junit.BeforeClass)

Example 90 with Properties

use of java.util.Properties in project hadoop by apache.

the class Crc32PerformanceTest method printSystemProperties.

static void printSystemProperties(PrintStream out) {
    final String[] names = { "java.version", "java.runtime.name", "java.runtime.version", "java.vm.version", "java.vm.vendor", "java.vm.name", "java.vm.specification.version", "java.specification.version", "os.arch", "os.name", "os.version" };
    int max = 0;
    for (String n : names) {
        if (n.length() > max) {
            max = n.length();
        }
    }
    final Properties p = System.getProperties();
    for (String n : names) {
        out.printf("%" + max + "s = %s\n", n, p.getProperty(n));
    }
}
Also used : Properties(java.util.Properties)

Aggregations

Properties (java.util.Properties)9354 Test (org.junit.Test)3005 IOException (java.io.IOException)1277 Connection (java.sql.Connection)1179 File (java.io.File)1013 ResultSet (java.sql.ResultSet)860 ConfigurationProperties (org.apache.geode.distributed.ConfigurationProperties)819 PreparedStatement (java.sql.PreparedStatement)791 InputStream (java.io.InputStream)614 FileInputStream (java.io.FileInputStream)598 HashMap (java.util.HashMap)475 Map (java.util.Map)387 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)387 ArrayList (java.util.ArrayList)371 DistributedTest (org.apache.geode.test.junit.categories.DistributedTest)321 SQLException (java.sql.SQLException)308 Before (org.junit.Before)272 AttributesFactory (org.apache.geode.cache.AttributesFactory)245 InitialContext (javax.naming.InitialContext)214 Configuration (org.apache.hadoop.conf.Configuration)187