use of java.util.Properties in project flink by apache.
the class ParameterToolTest method testFromMapOrProperties.
@Test
public void testFromMapOrProperties() {
Properties props = new Properties();
props.setProperty("input", "myInput");
props.setProperty("expectedCount", "15");
ParameterTool parameter = ParameterTool.fromMap((Map) props);
Assert.assertEquals(2, parameter.getNumberOfParameters());
validate(parameter);
}
use of java.util.Properties in project flink by apache.
the class ParameterToolTest method testFromPropertiesFile.
@Test
public void testFromPropertiesFile() throws IOException {
File propertiesFile = tmp.newFile();
Properties props = new Properties();
props.setProperty("input", "myInput");
props.setProperty("expectedCount", "15");
try (final OutputStream out = new FileOutputStream(propertiesFile)) {
props.store(out, "Test properties");
}
ParameterTool parameter = ParameterTool.fromPropertiesFile(propertiesFile.getAbsolutePath());
Assert.assertEquals(2, parameter.getNumberOfParameters());
validate(parameter);
}
use of java.util.Properties in project hadoop by apache.
the class HttpServer2 method getFilterProperties.
private static Properties getFilterProperties(Configuration conf, String prefix) {
Properties prop = new Properties();
Map<String, String> filterConfig = AuthenticationFilterInitializer.getFilterConfigMap(conf, prefix);
prop.putAll(filterConfig);
return prop;
}
use of java.util.Properties in project hadoop by apache.
the class HttpServer2 method constructSecretProvider.
private static SignerSecretProvider constructSecretProvider(final Builder b, ServletContext ctx) throws Exception {
final Configuration conf = b.conf;
Properties config = getFilterProperties(conf, b.authFilterConfigurationPrefix);
return AuthenticationFilter.constructSecretProvider(ctx, config, b.disallowFallbackToRandomSignerSecretProvider);
}
use of java.util.Properties in project hadoop by apache.
the class MiniKdc method main.
public static void main(String[] args) throws Exception {
if (args.length < 4) {
System.out.println("Arguments: <WORKDIR> <MINIKDCPROPERTIES> " + "<KEYTABFILE> [<PRINCIPALS>]+");
System.exit(1);
}
File workDir = new File(args[0]);
if (!workDir.exists()) {
throw new RuntimeException("Specified work directory does not exists: " + workDir.getAbsolutePath());
}
Properties conf = createConf();
File file = new File(args[1]);
if (!file.exists()) {
throw new RuntimeException("Specified configuration does not exists: " + file.getAbsolutePath());
}
Properties userConf = new Properties();
InputStreamReader r = null;
try {
r = new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8);
userConf.load(r);
} finally {
if (r != null) {
r.close();
}
}
for (Map.Entry<?, ?> entry : userConf.entrySet()) {
conf.put(entry.getKey(), entry.getValue());
}
final MiniKdc miniKdc = new MiniKdc(conf, workDir);
miniKdc.start();
File krb5conf = new File(workDir, "krb5.conf");
if (miniKdc.getKrb5conf().renameTo(krb5conf)) {
File keytabFile = new File(args[2]).getAbsoluteFile();
String[] principals = new String[args.length - 3];
System.arraycopy(args, 3, principals, 0, args.length - 3);
miniKdc.createPrincipal(keytabFile, principals);
System.out.println();
System.out.println("Standalone MiniKdc Running");
System.out.println("---------------------------------------------------");
System.out.println(" Realm : " + miniKdc.getRealm());
System.out.println(" Running at : " + miniKdc.getHost() + ":" + miniKdc.getHost());
System.out.println(" krb5conf : " + krb5conf);
System.out.println();
System.out.println(" created keytab : " + keytabFile);
System.out.println(" with principals : " + Arrays.asList(principals));
System.out.println();
System.out.println(" Do <CTRL-C> or kill <PID> to stop it");
System.out.println("---------------------------------------------------");
System.out.println();
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
miniKdc.stop();
}
});
} else {
throw new RuntimeException("Cannot rename KDC's krb5conf to " + krb5conf.getAbsolutePath());
}
}
Aggregations