use of java.util.Properties in project hadoop by apache.
the class NNStorage method readProperties.
void readProperties(StorageDirectory sd, StartupOption startupOption) throws IOException {
Properties props = readPropertiesFile(sd.getVersionFile());
if (HdfsServerConstants.RollingUpgradeStartupOption.ROLLBACK.matches(startupOption)) {
int lv = Integer.parseInt(getProperty(props, sd, "layoutVersion"));
if (lv > getServiceLayoutVersion()) {
// we should not use a newer version for rollingUpgrade rollback
throw new IncorrectVersionException(getServiceLayoutVersion(), lv, "storage directory " + sd.getRoot().getAbsolutePath());
}
props.setProperty("layoutVersion", Integer.toString(HdfsServerConstants.NAMENODE_LAYOUT_VERSION));
}
setFieldsFromProperties(props, sd);
}
use of java.util.Properties in project hadoop by apache.
the class TestSecureEncryptionZoneWithKMS method init.
@BeforeClass
public static void init() throws Exception {
baseDir = getTestDir();
FileUtil.fullyDelete(baseDir);
assertTrue(baseDir.mkdirs());
Properties kdcConf = MiniKdc.createConf();
kdc = new MiniKdc(kdcConf, baseDir);
kdc.start();
baseConf = new HdfsConfiguration();
SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, baseConf);
UserGroupInformation.setConfiguration(baseConf);
assertTrue("Expected configuration to enable security", UserGroupInformation.isSecurityEnabled());
File keytabFile = new File(baseDir, "test.keytab");
keytab = keytabFile.getAbsolutePath();
// Windows will not reverse name lookup "127.0.0.1" to "localhost".
String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
kdc.createPrincipal(keytabFile, HDFS_USER_NAME + "/" + krbInstance, SPNEGO_USER_NAME + "/" + krbInstance, OOZIE_USER_NAME + "/" + krbInstance, OOZIE_PROXIED_USER_NAME + "/" + krbInstance);
hdfsPrincipal = HDFS_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
spnegoPrincipal = SPNEGO_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
ooziePrincipal = OOZIE_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
// Allow oozie to proxy user
baseConf.set("hadoop.proxyuser.oozie.hosts", "*");
baseConf.set("hadoop.proxyuser.oozie.groups", "*");
baseConf.set("hadoop.user.group.static.mapping.overrides", OOZIE_PROXIED_USER_NAME + "=oozie");
baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
// Set a small (2=4*0.5) KMSClient EDEK cache size to trigger
// on demand refill upon the 3rd file creation
baseConf.set(KMS_CLIENT_ENC_KEY_CACHE_SIZE, "4");
baseConf.set(KMS_CLIENT_ENC_KEY_CACHE_LOW_WATERMARK, "0.5");
keystoresDir = baseDir.getAbsolutePath();
sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSecureEncryptionZoneWithKMS.class);
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
baseConf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getClientSSLConfigFileName());
baseConf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getServerSSLConfigFileName());
File kmsFile = new File(baseDir, "kms-site.xml");
if (kmsFile.exists()) {
FileUtil.fullyDelete(kmsFile);
}
Configuration kmsConf = new Configuration(true);
kmsConf.set(KMSConfiguration.KEY_PROVIDER_URI, "jceks://file@" + new Path(baseDir.toString(), "kms.keystore").toUri());
kmsConf.set("hadoop.kms.authentication.type", "kerberos");
kmsConf.set("hadoop.kms.authentication.kerberos.keytab", keytab);
kmsConf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost");
kmsConf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT");
kmsConf.set("hadoop.kms.acl.GENERATE_EEK", "hdfs");
Writer writer = new FileWriter(kmsFile);
kmsConf.writeXml(writer);
writer.close();
// Start MiniKMS
MiniKMS.Builder miniKMSBuilder = new MiniKMS.Builder();
miniKMS = miniKMSBuilder.setKmsConfDir(baseDir).build();
miniKMS.start();
}
use of java.util.Properties in project hadoop by apache.
the class UpgradeUtilities method createDataNodeStorageDirs.
/**
* Simulate the {@link DFSConfigKeys#DFS_DATANODE_DATA_DIR_KEY} of a
* populated DFS filesystem.
* This method populates for each parent directory, <code>parent/dirName</code>
* with the content of datanode storage directory that comes from a singleton
* datanode master (that contains version and block files). If the destination
* directory does not exist, it will be created. If the directory already
* exists, it will first be deleted.
*
* @param parents parent directory where {@code dirName} is created
* @param dirName directory under which storage directory is created
* @return the array of created directories
*/
public static File[] createDataNodeStorageDirs(String[] parents, String dirName) throws Exception {
File[] retVal = new File[parents.length];
for (int i = 0; i < parents.length; i++) {
File newDir = new File(parents[i], dirName);
createEmptyDirs(new String[] { newDir.toString() });
LocalFileSystem localFS = FileSystem.getLocal(new HdfsConfiguration());
localFS.copyToLocalFile(new Path(datanodeStorage.toString(), "current"), new Path(newDir.toString()), false);
// Change the storage UUID to avoid conflicts when DN starts up.
StorageDirectory sd = new StorageDirectory(new File(datanodeStorage.toString()));
sd.setStorageUuid(DatanodeStorage.generateUuid());
Properties properties = Storage.readPropertiesFile(sd.getVersionFile());
properties.setProperty("storageID", sd.getStorageUuid());
Storage.writeProperties(sd.getVersionFile(), properties);
retVal[i] = newDir;
}
return retVal;
}
use of java.util.Properties in project hadoop by apache.
the class TestAuthFilter method testGetSimpleAuthDisabledConfiguration.
@Test
public void testGetSimpleAuthDisabledConfiguration() throws ServletException {
AuthFilter filter = new AuthFilter();
Map<String, String> m = new HashMap<String, String>();
m.put(DFSConfigKeys.DFS_WEB_AUTHENTICATION_SIMPLE_ANONYMOUS_ALLOWED, "false");
FilterConfig config = new DummyFilterConfig(m);
Properties p = filter.getConfiguration("random", config);
Assert.assertEquals("false", p.getProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED));
}
use of java.util.Properties in project hadoop by apache.
the class TestAuthFilter method testGetSimpleAuthDefaultConfiguration.
@Test
public void testGetSimpleAuthDefaultConfiguration() throws ServletException {
AuthFilter filter = new AuthFilter();
Map<String, String> m = new HashMap<String, String>();
FilterConfig config = new DummyFilterConfig(m);
Properties p = filter.getConfiguration("random", config);
Assert.assertEquals("true", p.getProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED));
}
Aggregations