Search in sources :

Example 71 with Properties

use of java.util.Properties in project flink by apache.

the class SecureTestEnvironment method prepare.

public static void prepare(TemporaryFolder tempFolder) {
    try {
        File baseDirForSecureRun = tempFolder.newFolder();
        LOG.info("Base Directory for Secure Environment: {}", baseDirForSecureRun);
        String hostName = "localhost";
        Properties kdcConf = MiniKdc.createConf();
        if (LOG.isDebugEnabled()) {
            kdcConf.setProperty(MiniKdc.DEBUG, "true");
        }
        kdcConf.setProperty(MiniKdc.KDC_BIND_ADDRESS, hostName);
        kdc = new MiniKdc(kdcConf, baseDirForSecureRun);
        kdc.start();
        LOG.info("Started Mini KDC");
        File keytabFile = new File(baseDirForSecureRun, "test-users.keytab");
        testKeytab = keytabFile.getAbsolutePath();
        testZkServerPrincipal = "zookeeper/127.0.0.1";
        testZkClientPrincipal = "zk-client/127.0.0.1";
        testKafkaServerPrincipal = "kafka/" + hostName;
        hadoopServicePrincipal = "hadoop/" + hostName;
        testPrincipal = "client/" + hostName;
        kdc.createPrincipal(keytabFile, testPrincipal, testZkServerPrincipal, hadoopServicePrincipal, testZkClientPrincipal, testKafkaServerPrincipal);
        testPrincipal = testPrincipal + "@" + kdc.getRealm();
        testZkServerPrincipal = testZkServerPrincipal + "@" + kdc.getRealm();
        testZkClientPrincipal = testZkClientPrincipal + "@" + kdc.getRealm();
        testKafkaServerPrincipal = testKafkaServerPrincipal + "@" + kdc.getRealm();
        hadoopServicePrincipal = hadoopServicePrincipal + "@" + kdc.getRealm();
        LOG.info("-------------------------------------------------------------------");
        LOG.info("Test Principal: {}", testPrincipal);
        LOG.info("Test ZK Server Principal: {}", testZkServerPrincipal);
        LOG.info("Test ZK Client Principal: {}", testZkClientPrincipal);
        LOG.info("Test Kafka Server Principal: {}", testKafkaServerPrincipal);
        LOG.info("Test Hadoop Service Principal: {}", hadoopServicePrincipal);
        LOG.info("Test Keytab: {}", testKeytab);
        LOG.info("-------------------------------------------------------------------");
        //Security Context is established to allow non hadoop applications that requires JAAS
        //based SASL/Kerberos authentication to work. However, for Hadoop specific applications
        //the context can be reinitialized with Hadoop configuration by calling
        //ctx.setHadoopConfiguration() for the UGI implementation to work properly.
        //See Yarn test case module for reference
        Configuration flinkConfig = GlobalConfiguration.loadConfiguration();
        flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, testKeytab);
        flinkConfig.setBoolean(SecurityOptions.KERBEROS_LOGIN_USETICKETCACHE, false);
        flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, testPrincipal);
        flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_CONTEXTS, "Client,KafkaClient");
        SecurityUtils.SecurityConfiguration ctx = new SecurityUtils.SecurityConfiguration(flinkConfig);
        TestingSecurityContext.install(ctx, getClientSecurityConfigurationMap());
        populateJavaPropertyVariables();
    } catch (Exception e) {
        throw new RuntimeException("Exception occured while preparing secure environment.", e);
    }
}
Also used : Configuration(org.apache.flink.configuration.Configuration) GlobalConfiguration(org.apache.flink.configuration.GlobalConfiguration) MiniKdc(org.apache.hadoop.minikdc.MiniKdc) SecurityUtils(org.apache.flink.runtime.security.SecurityUtils) Properties(java.util.Properties) File(java.io.File)

Example 72 with Properties

use of java.util.Properties in project flink by apache.

the class FlinkYarnSessionCli method run.

public int run(String[] args) {
    //
    //	Command Line Options
    //
    Options options = new Options();
    addGeneralOptions(options);
    addRunOptions(options);
    CommandLineParser parser = new PosixParser();
    CommandLine cmd;
    try {
        cmd = parser.parse(options, args);
    } catch (Exception e) {
        System.out.println(e.getMessage());
        printUsage();
        return 1;
    }
    // Query cluster for metrics
    if (cmd.hasOption(QUERY.getOpt())) {
        AbstractYarnClusterDescriptor yarnDescriptor = getClusterDescriptor();
        String description;
        try {
            description = yarnDescriptor.getClusterDescription();
        } catch (Exception e) {
            System.err.println("Error while querying the YARN cluster for available resources: " + e.getMessage());
            e.printStackTrace(System.err);
            return 1;
        }
        System.out.println(description);
        return 0;
    } else if (cmd.hasOption(APPLICATION_ID.getOpt())) {
        AbstractYarnClusterDescriptor yarnDescriptor = getClusterDescriptor();
        //configure ZK namespace depending on the value passed
        String zkNamespace = cmd.hasOption(ZOOKEEPER_NAMESPACE.getOpt()) ? cmd.getOptionValue(ZOOKEEPER_NAMESPACE.getOpt()) : yarnDescriptor.getFlinkConfiguration().getString(HA_ZOOKEEPER_NAMESPACE_KEY, cmd.getOptionValue(APPLICATION_ID.getOpt()));
        LOG.info("Going to use the ZK namespace: {}", zkNamespace);
        yarnDescriptor.getFlinkConfiguration().setString(HA_ZOOKEEPER_NAMESPACE_KEY, zkNamespace);
        try {
            yarnCluster = yarnDescriptor.retrieve(cmd.getOptionValue(APPLICATION_ID.getOpt()));
        } catch (Exception e) {
            throw new RuntimeException("Could not retrieve existing Yarn application", e);
        }
        if (detachedMode) {
            LOG.info("The Flink YARN client has been started in detached mode. In order to stop " + "Flink on YARN, use the following command or a YARN web interface to stop it:\n" + "yarn application -kill " + APPLICATION_ID.getOpt());
            yarnCluster.disconnect();
        } else {
            runInteractiveCli(yarnCluster, true);
        }
    } else {
        AbstractYarnClusterDescriptor yarnDescriptor;
        try {
            yarnDescriptor = createDescriptor(null, cmd);
        } catch (Exception e) {
            System.err.println("Error while starting the YARN Client: " + e.getMessage());
            e.printStackTrace(System.err);
            return 1;
        }
        try {
            yarnCluster = yarnDescriptor.deploy();
        } catch (Exception e) {
            System.err.println("Error while deploying YARN cluster: " + e.getMessage());
            e.printStackTrace(System.err);
            return 1;
        }
        //------------------ ClusterClient deployed, handle connection details
        String jobManagerAddress = yarnCluster.getJobManagerAddress().getAddress().getHostName() + ":" + yarnCluster.getJobManagerAddress().getPort();
        System.out.println("Flink JobManager is now running on " + jobManagerAddress);
        System.out.println("JobManager Web Interface: " + yarnCluster.getWebInterfaceURL());
        // file that we write into the conf/ dir containing the jobManager address and the dop.
        File yarnPropertiesFile = getYarnPropertiesLocation(yarnCluster.getFlinkConfiguration());
        Properties yarnProps = new Properties();
        yarnProps.setProperty(YARN_APPLICATION_ID_KEY, yarnCluster.getApplicationId().toString());
        if (yarnDescriptor.getTaskManagerSlots() != -1) {
            String parallelism = Integer.toString(yarnDescriptor.getTaskManagerSlots() * yarnDescriptor.getTaskManagerCount());
            yarnProps.setProperty(YARN_PROPERTIES_PARALLELISM, parallelism);
        }
        // add dynamic properties
        if (yarnDescriptor.getDynamicPropertiesEncoded() != null) {
            yarnProps.setProperty(YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING, yarnDescriptor.getDynamicPropertiesEncoded());
        }
        writeYarnProperties(yarnProps, yarnPropertiesFile);
        if (detachedMode) {
            // print info and quit:
            LOG.info("The Flink YARN client has been started in detached mode. In order to stop " + "Flink on YARN, use the following command or a YARN web interface to stop it:\n" + "yarn application -kill " + yarnCluster.getApplicationId() + System.lineSeparator() + "Please also note that the temporary files of the YARN session in {} will not be removed.", yarnDescriptor.getSessionFilesDir());
            yarnCluster.waitForClusterToBeReady();
            yarnCluster.disconnect();
        } else {
            runInteractiveCli(yarnCluster, acceptInteractiveInput);
        }
    }
    return 0;
}
Also used : Options(org.apache.commons.cli.Options) HighAvailabilityOptions(org.apache.flink.configuration.HighAvailabilityOptions) CustomCommandLine(org.apache.flink.client.cli.CustomCommandLine) CommandLine(org.apache.commons.cli.CommandLine) PosixParser(org.apache.commons.cli.PosixParser) AbstractYarnClusterDescriptor(org.apache.flink.yarn.AbstractYarnClusterDescriptor) CommandLineParser(org.apache.commons.cli.CommandLineParser) Properties(java.util.Properties) File(java.io.File) IllegalConfigurationException(org.apache.flink.configuration.IllegalConfigurationException) IOException(java.io.IOException) UnsupportedEncodingException(java.io.UnsupportedEncodingException)

Example 73 with Properties

use of java.util.Properties in project groovy by apache.

the class GroovyDocToolTest method testCharsetFallbackToFileEncoding.

public void testCharsetFallbackToFileEncoding() throws Exception {
    String expectedCharset = "ISO-88591";
    Properties props = new Properties();
    props.setProperty("fileEncoding", expectedCharset);
    GroovyDocTool tool = new GroovyDocTool(new FileSystemResourceManager("src"), new String[0], new String[0], new String[0], new String[0], new ArrayList<LinkArgument>(), props);
    assertEquals("'charset' falls back to 'fileEncoding' if not provided", expectedCharset, tool.properties.getProperty("charset"));
}
Also used : Properties(java.util.Properties)

Example 74 with Properties

use of java.util.Properties in project groovy by apache.

the class GroovyDocToolTest method testFileEncodingFallbackToCharset.

public void testFileEncodingFallbackToCharset() throws Exception {
    String expectedCharset = "ISO-88591";
    Properties props = new Properties();
    props.setProperty("charset", expectedCharset);
    GroovyDocTool tool = new GroovyDocTool(new FileSystemResourceManager("src"), new String[0], new String[0], new String[0], new String[0], new ArrayList<LinkArgument>(), props);
    assertEquals("'fileEncoding' falls back to 'charset' if not provided", expectedCharset, tool.properties.getProperty("fileEncoding"));
}
Also used : Properties(java.util.Properties)

Example 75 with Properties

use of java.util.Properties in project flink by apache.

the class AbstractParameterToolTest method validate.

protected void validate(ParameterTool parameter) {
    ClosureCleaner.ensureSerializable(parameter);
    Assert.assertEquals("myInput", parameter.getRequired("input"));
    Assert.assertEquals("myDefaultValue", parameter.get("output", "myDefaultValue"));
    Assert.assertEquals(null, parameter.get("whatever"));
    Assert.assertEquals(15L, parameter.getLong("expectedCount", -1L));
    Assert.assertTrue(parameter.getBoolean("thisIsUseful", true));
    Assert.assertEquals(42, parameter.getByte("myDefaultByte", (byte) 42));
    Assert.assertEquals(42, parameter.getShort("myDefaultShort", (short) 42));
    Configuration config = parameter.getConfiguration();
    Assert.assertEquals(15L, config.getLong("expectedCount", -1L));
    Properties props = parameter.getProperties();
    Assert.assertEquals("myInput", props.getProperty("input"));
    props = null;
    // -------- test the default file creation ------------
    try {
        String pathToFile = tmp.newFile().getAbsolutePath();
        parameter.createPropertiesFile(pathToFile);
        Properties defaultProps = new Properties();
        try (FileInputStream fis = new FileInputStream(pathToFile)) {
            defaultProps.load(fis);
        }
        Assert.assertEquals("myDefaultValue", defaultProps.get("output"));
        Assert.assertEquals("-1", defaultProps.get("expectedCount"));
        Assert.assertTrue(defaultProps.containsKey("input"));
    } catch (IOException e) {
        Assert.fail(e.getMessage());
        e.printStackTrace();
    }
}
Also used : Configuration(org.apache.flink.configuration.Configuration) IOException(java.io.IOException) Properties(java.util.Properties) FileInputStream(java.io.FileInputStream)

Aggregations

Properties (java.util.Properties)9354 Test (org.junit.Test)3005 IOException (java.io.IOException)1277 Connection (java.sql.Connection)1179 File (java.io.File)1013 ResultSet (java.sql.ResultSet)860 ConfigurationProperties (org.apache.geode.distributed.ConfigurationProperties)819 PreparedStatement (java.sql.PreparedStatement)791 InputStream (java.io.InputStream)614 FileInputStream (java.io.FileInputStream)598 HashMap (java.util.HashMap)475 Map (java.util.Map)387 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)387 ArrayList (java.util.ArrayList)371 DistributedTest (org.apache.geode.test.junit.categories.DistributedTest)321 SQLException (java.sql.SQLException)308 Before (org.junit.Before)272 AttributesFactory (org.apache.geode.cache.AttributesFactory)245 InitialContext (javax.naming.InitialContext)214 Configuration (org.apache.hadoop.conf.Configuration)187