use of java.util.Properties in project hadoop by apache.
the class TestMover method initSecureConf.
private void initSecureConf(Configuration conf) throws Exception {
String username = "mover";
File baseDir = GenericTestUtils.getTestDir(TestMover.class.getSimpleName());
FileUtil.fullyDelete(baseDir);
Assert.assertTrue(baseDir.mkdirs());
Properties kdcConf = MiniKdc.createConf();
MiniKdc kdc = new MiniKdc(kdcConf, baseDir);
kdc.start();
SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
UserGroupInformation.setConfiguration(conf);
KerberosName.resetDefaultRealm();
Assert.assertTrue("Expected configuration to enable security", UserGroupInformation.isSecurityEnabled());
keytabFile = new File(baseDir, username + ".keytab");
String keytab = keytabFile.getAbsolutePath();
// Windows will not reverse name lookup "127.0.0.1" to "localhost".
String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
principal = username + "/" + krbInstance + "@" + kdc.getRealm();
String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();
kdc.createPrincipal(keytabFile, username, username + "/" + krbInstance, "HTTP/" + krbInstance);
conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, principal);
conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, principal);
conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
conf.setBoolean(DFS_MOVER_KEYTAB_ENABLED_KEY, true);
conf.set(DFS_MOVER_ADDRESS_KEY, "localhost:0");
conf.set(DFS_MOVER_KEYTAB_FILE_KEY, keytab);
conf.set(DFS_MOVER_KERBEROS_PRINCIPAL_KEY, principal);
String keystoresDir = baseDir.getAbsolutePath();
String sslConfDir = KeyStoreTestUtil.getClasspathDir(TestMover.class);
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
conf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getClientSSLConfigFileName());
conf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getServerSSLConfigFileName());
initConf(conf);
}
use of java.util.Properties in project hadoop by apache.
the class FSImageTestUtil method assertPropertiesFilesSame.
/**
* Assert that a set of properties files all contain the same data.
*
* @param propFiles the files to compare.
* @param ignoredProperties the property names to be ignored during
* comparison.
* @throws IOException if the files cannot be opened or read
* @throws AssertionError if the files differ
*/
public static void assertPropertiesFilesSame(File[] propFiles, Set<String> ignoredProperties) throws IOException {
Set<Map.Entry<Object, Object>> prevProps = null;
for (File f : propFiles) {
Properties props;
FileInputStream is = new FileInputStream(f);
try {
props = new Properties();
props.load(is);
} finally {
IOUtils.closeStream(is);
}
if (prevProps == null) {
prevProps = props.entrySet();
} else {
Set<Entry<Object, Object>> diff = Sets.symmetricDifference(prevProps, props.entrySet());
Iterator<Entry<Object, Object>> it = diff.iterator();
while (it.hasNext()) {
Entry<Object, Object> entry = it.next();
if (ignoredProperties != null && ignoredProperties.contains(entry.getKey())) {
continue;
}
fail("Properties file " + f + " differs from " + propFiles[0]);
}
}
}
}
use of java.util.Properties in project hadoop by apache.
the class TestRestClientBindings method testBindAgainstConf.
public void testBindAgainstConf() throws Exception {
Properties props = RestClientBindings.bind(filesysURI, conf);
assertPropertyEquals(props, SWIFT_CONTAINER_PROPERTY, CONTAINER);
assertPropertyEquals(props, SWIFT_SERVICE_PROPERTY, SERVICE);
assertPropertyEquals(props, SWIFT_AUTH_PROPERTY, AUTH_URL);
assertPropertyEquals(props, SWIFT_AUTH_PROPERTY, AUTH_URL);
assertPropertyEquals(props, SWIFT_USERNAME_PROPERTY, USER);
assertPropertyEquals(props, SWIFT_PASSWORD_PROPERTY, PASS);
assertPropertyEquals(props, SWIFT_TENANT_PROPERTY, null);
assertPropertyEquals(props, SWIFT_REGION_PROPERTY, null);
assertPropertyEquals(props, SWIFT_HTTP_PORT_PROPERTY, null);
assertPropertyEquals(props, SWIFT_HTTPS_PORT_PROPERTY, null);
}
use of java.util.Properties in project hadoop by apache.
the class TestRestClientBindings method expectBindingFailure.
public void expectBindingFailure(URI fsURI, Configuration config) {
try {
Properties binding = RestClientBindings.bind(fsURI, config);
//if we get here, binding didn't fail- there is something else.
//list the properties but not the values.
StringBuilder details = new StringBuilder();
for (Object key : binding.keySet()) {
details.append(key.toString()).append(" ");
}
fail("Expected a failure, got the binding [ " + details + "]");
} catch (SwiftConfigurationException expected) {
}
}
use of java.util.Properties in project hadoop by apache.
the class LoggedJob method compareJobProperties.
private void compareJobProperties(JobProperties jprop1, JobProperties jprop2, TreePath loc, String eltname) throws DeepInequalityException {
if (jprop1 == null && jprop2 == null) {
return;
}
if (jprop1 == null || jprop2 == null) {
throw new DeepInequalityException(eltname + " miscompared", new TreePath(loc, eltname));
}
Properties prop1 = jprop1.getValue();
Properties prop2 = jprop2.getValue();
if (prop1.size() != prop2.size()) {
throw new DeepInequalityException(eltname + " miscompared [size]", new TreePath(loc, eltname));
}
for (Map.Entry<Object, Object> entry : prop1.entrySet()) {
String v1 = entry.getValue().toString();
String v2 = prop2.get(entry.getKey()).toString();
compare1(v1, v2, new TreePath(loc, eltname), "key:" + entry.getKey());
}
}
Aggregations