Search in sources :

Example 81 with Properties

use of java.util.Properties in project hadoop by apache.

the class NNStorage method readProperties.

void readProperties(StorageDirectory sd, StartupOption startupOption) throws IOException {
    Properties props = readPropertiesFile(sd.getVersionFile());
    if (HdfsServerConstants.RollingUpgradeStartupOption.ROLLBACK.matches(startupOption)) {
        int lv = Integer.parseInt(getProperty(props, sd, "layoutVersion"));
        if (lv > getServiceLayoutVersion()) {
            // we should not use a newer version for rollingUpgrade rollback
            throw new IncorrectVersionException(getServiceLayoutVersion(), lv, "storage directory " + sd.getRoot().getAbsolutePath());
        }
        props.setProperty("layoutVersion", Integer.toString(HdfsServerConstants.NAMENODE_LAYOUT_VERSION));
    }
    setFieldsFromProperties(props, sd);
}
Also used : IncorrectVersionException(org.apache.hadoop.hdfs.server.common.IncorrectVersionException) Properties(java.util.Properties)

Example 82 with Properties

use of java.util.Properties in project hadoop by apache.

the class TestSecureEncryptionZoneWithKMS method init.

@BeforeClass
public static void init() throws Exception {
    baseDir = getTestDir();
    FileUtil.fullyDelete(baseDir);
    assertTrue(baseDir.mkdirs());
    Properties kdcConf = MiniKdc.createConf();
    kdc = new MiniKdc(kdcConf, baseDir);
    kdc.start();
    baseConf = new HdfsConfiguration();
    SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, baseConf);
    UserGroupInformation.setConfiguration(baseConf);
    assertTrue("Expected configuration to enable security", UserGroupInformation.isSecurityEnabled());
    File keytabFile = new File(baseDir, "test.keytab");
    keytab = keytabFile.getAbsolutePath();
    // Windows will not reverse name lookup "127.0.0.1" to "localhost".
    String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
    kdc.createPrincipal(keytabFile, HDFS_USER_NAME + "/" + krbInstance, SPNEGO_USER_NAME + "/" + krbInstance, OOZIE_USER_NAME + "/" + krbInstance, OOZIE_PROXIED_USER_NAME + "/" + krbInstance);
    hdfsPrincipal = HDFS_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
    spnegoPrincipal = SPNEGO_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
    ooziePrincipal = OOZIE_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
    // Allow oozie to proxy user
    baseConf.set("hadoop.proxyuser.oozie.hosts", "*");
    baseConf.set("hadoop.proxyuser.oozie.groups", "*");
    baseConf.set("hadoop.user.group.static.mapping.overrides", OOZIE_PROXIED_USER_NAME + "=oozie");
    baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
    baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
    baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
    baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
    baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
    baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
    baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
    // Set a small (2=4*0.5) KMSClient EDEK cache size to trigger
    // on demand refill upon the 3rd file creation
    baseConf.set(KMS_CLIENT_ENC_KEY_CACHE_SIZE, "4");
    baseConf.set(KMS_CLIENT_ENC_KEY_CACHE_LOW_WATERMARK, "0.5");
    keystoresDir = baseDir.getAbsolutePath();
    sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSecureEncryptionZoneWithKMS.class);
    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
    baseConf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getClientSSLConfigFileName());
    baseConf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getServerSSLConfigFileName());
    File kmsFile = new File(baseDir, "kms-site.xml");
    if (kmsFile.exists()) {
        FileUtil.fullyDelete(kmsFile);
    }
    Configuration kmsConf = new Configuration(true);
    kmsConf.set(KMSConfiguration.KEY_PROVIDER_URI, "jceks://file@" + new Path(baseDir.toString(), "kms.keystore").toUri());
    kmsConf.set("hadoop.kms.authentication.type", "kerberos");
    kmsConf.set("hadoop.kms.authentication.kerberos.keytab", keytab);
    kmsConf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost");
    kmsConf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT");
    kmsConf.set("hadoop.kms.acl.GENERATE_EEK", "hdfs");
    Writer writer = new FileWriter(kmsFile);
    kmsConf.writeXml(writer);
    writer.close();
    // Start MiniKMS
    MiniKMS.Builder miniKMSBuilder = new MiniKMS.Builder();
    miniKMS = miniKMSBuilder.setKmsConfDir(baseDir).build();
    miniKMS.start();
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) KMSConfiguration(org.apache.hadoop.crypto.key.kms.server.KMSConfiguration) FileWriter(java.io.FileWriter) Properties(java.util.Properties) MiniKMS(org.apache.hadoop.crypto.key.kms.server.MiniKMS) MiniKdc(org.apache.hadoop.minikdc.MiniKdc) File(java.io.File) Writer(java.io.Writer) FileWriter(java.io.FileWriter) BeforeClass(org.junit.BeforeClass)

Example 83 with Properties

use of java.util.Properties in project hadoop by apache.

the class UpgradeUtilities method createDataNodeStorageDirs.

/**
   * Simulate the {@link DFSConfigKeys#DFS_DATANODE_DATA_DIR_KEY} of a 
   * populated DFS filesystem.
   * This method populates for each parent directory, <code>parent/dirName</code>
   * with the content of datanode storage directory that comes from a singleton
   * datanode master (that contains version and block files). If the destination
   * directory does not exist, it will be created.  If the directory already 
   * exists, it will first be deleted.
   * 
   * @param parents parent directory where {@code dirName} is created
   * @param dirName directory under which storage directory is created
   * @return the array of created directories
   */
public static File[] createDataNodeStorageDirs(String[] parents, String dirName) throws Exception {
    File[] retVal = new File[parents.length];
    for (int i = 0; i < parents.length; i++) {
        File newDir = new File(parents[i], dirName);
        createEmptyDirs(new String[] { newDir.toString() });
        LocalFileSystem localFS = FileSystem.getLocal(new HdfsConfiguration());
        localFS.copyToLocalFile(new Path(datanodeStorage.toString(), "current"), new Path(newDir.toString()), false);
        // Change the storage UUID to avoid conflicts when DN starts up.
        StorageDirectory sd = new StorageDirectory(new File(datanodeStorage.toString()));
        sd.setStorageUuid(DatanodeStorage.generateUuid());
        Properties properties = Storage.readPropertiesFile(sd.getVersionFile());
        properties.setProperty("storageID", sd.getStorageUuid());
        Storage.writeProperties(sd.getVersionFile(), properties);
        retVal[i] = newDir;
    }
    return retVal;
}
Also used : Path(org.apache.hadoop.fs.Path) LocalFileSystem(org.apache.hadoop.fs.LocalFileSystem) StorageDirectory(org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory) Properties(java.util.Properties) File(java.io.File)

Example 84 with Properties

use of java.util.Properties in project hadoop by apache.

the class TestAuthFilter method testGetSimpleAuthDisabledConfiguration.

@Test
public void testGetSimpleAuthDisabledConfiguration() throws ServletException {
    AuthFilter filter = new AuthFilter();
    Map<String, String> m = new HashMap<String, String>();
    m.put(DFSConfigKeys.DFS_WEB_AUTHENTICATION_SIMPLE_ANONYMOUS_ALLOWED, "false");
    FilterConfig config = new DummyFilterConfig(m);
    Properties p = filter.getConfiguration("random", config);
    Assert.assertEquals("false", p.getProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED));
}
Also used : HashMap(java.util.HashMap) FilterConfig(javax.servlet.FilterConfig) Properties(java.util.Properties) Test(org.junit.Test)

Example 85 with Properties

use of java.util.Properties in project hadoop by apache.

the class TestAuthFilter method testGetSimpleAuthDefaultConfiguration.

@Test
public void testGetSimpleAuthDefaultConfiguration() throws ServletException {
    AuthFilter filter = new AuthFilter();
    Map<String, String> m = new HashMap<String, String>();
    FilterConfig config = new DummyFilterConfig(m);
    Properties p = filter.getConfiguration("random", config);
    Assert.assertEquals("true", p.getProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED));
}
Also used : HashMap(java.util.HashMap) FilterConfig(javax.servlet.FilterConfig) Properties(java.util.Properties) Test(org.junit.Test)

Aggregations

Properties (java.util.Properties)9354 Test (org.junit.Test)3005 IOException (java.io.IOException)1277 Connection (java.sql.Connection)1179 File (java.io.File)1013 ResultSet (java.sql.ResultSet)860 ConfigurationProperties (org.apache.geode.distributed.ConfigurationProperties)819 PreparedStatement (java.sql.PreparedStatement)791 InputStream (java.io.InputStream)614 FileInputStream (java.io.FileInputStream)598 HashMap (java.util.HashMap)475 Map (java.util.Map)387 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)387 ArrayList (java.util.ArrayList)371 DistributedTest (org.apache.geode.test.junit.categories.DistributedTest)321 SQLException (java.sql.SQLException)308 Before (org.junit.Before)272 AttributesFactory (org.apache.geode.cache.AttributesFactory)245 InitialContext (javax.naming.InitialContext)214 Configuration (org.apache.hadoop.conf.Configuration)187