Search in sources :

Example 21 with KafkaServerStartable

use of kafka.server.KafkaServerStartable in project testcases by coheigea.

the class KafkaSentryAuthorizerTest method setup.

@org.junit.BeforeClass
public static void setup() throws Exception {
    zkServer = new TestingServer();
    // Get a random port
    ServerSocket serverSocket = new ServerSocket(0);
    port = serverSocket.getLocalPort();
    serverSocket.close();
    final Properties props = new Properties();
    props.put("broker.id", 1);
    props.put("host.name", "localhost");
    props.put("port", port);
    props.put("log.dir", "/tmp/kafka");
    props.put("zookeeper.connect", zkServer.getConnectString());
    props.put("replica.socket.timeout.ms", "1500");
    props.put("controlled.shutdown.enable", Boolean.TRUE.toString());
    // Enable SSL
    props.put("listeners", "SSL://localhost:" + port);
    props.put("ssl.keystore.location", KafkaAuthorizerTest.class.getResource("/servicestore.jks").getPath());
    props.put("ssl.keystore.password", "sspass");
    props.put("ssl.key.password", "skpass");
    props.put("ssl.truststore.location", KafkaAuthorizerTest.class.getResource("/truststore.jks").getPath());
    props.put("ssl.truststore.password", "security");
    props.put("security.inter.broker.protocol", "SSL");
    props.put("ssl.client.auth", "required");
    // Plug in Apache Sentry authorizer
    props.put("authorizer.class.name", "org.apache.sentry.kafka.authorizer.SentryKafkaAuthorizer");
    props.put("sentry.kafka.site.url", "file:" + KafkaAuthorizerTest.class.getResource("/sentry-site.xml").getPath());
    KafkaConfig config = new KafkaConfig(props);
    kafkaServer = new KafkaServerStartable(config);
    kafkaServer.startup();
    // Create some topics
    ZkClient zkClient = new ZkClient(zkServer.getConnectString(), 30000, 30000, ZKStringSerializer$.MODULE$);
    final ZkUtils zkUtils = new ZkUtils(zkClient, new ZkConnection(zkServer.getConnectString()), false);
    AdminUtils.createTopic(zkUtils, "test", 1, 1, new Properties(), RackAwareMode.Enforced$.MODULE$);
    AdminUtils.createTopic(zkUtils, "dev", 1, 1, new Properties(), RackAwareMode.Enforced$.MODULE$);
}
Also used : TestingServer(org.apache.curator.test.TestingServer) KafkaAuthorizerTest(org.apache.coheigea.bigdata.kafka.KafkaAuthorizerTest) ZkClient(org.I0Itec.zkclient.ZkClient) ServerSocket(java.net.ServerSocket) KafkaServerStartable(kafka.server.KafkaServerStartable) ZkUtils(kafka.utils.ZkUtils) Properties(java.util.Properties) ZkConnection(org.I0Itec.zkclient.ZkConnection) KafkaConfig(kafka.server.KafkaConfig)

Example 22 with KafkaServerStartable

use of kafka.server.KafkaServerStartable in project apex-malhar by apache.

the class KafkaOperatorTestBase method startKafkaServer.

public void startKafkaServer(int clusterid, int brokerid, int defaultPartitions) {
    // before start, clean the kafka dir if it exists
    FileUtils.deleteQuietly(new File(baseDir, kafkaBaseDir));
    Properties props = new Properties();
    props.setProperty("broker.id", "" + brokerid);
    props.setProperty("log.dirs", new File(baseDir, kafkadir[clusterid][brokerid]).toString());
    props.setProperty("zookeeper.connect", "localhost:" + TEST_ZOOKEEPER_PORT[clusterid]);
    props.setProperty("port", "" + TEST_KAFKA_BROKER_PORT[clusterid][brokerid]);
    props.setProperty("default.replication.factor", "1");
    // set this to 50000 to boost the performance so most test data are in memory before flush to disk
    props.setProperty("log.flush.interval.messages", "50000");
    if (hasMultiPartition) {
        props.setProperty("num.partitions", "2");
    } else {
        props.setProperty("num.partitions", "1");
    }
    broker[clusterid][brokerid] = new KafkaServerStartable(new KafkaConfig(props));
    broker[clusterid][brokerid].startup();
}
Also used : KafkaServerStartable(kafka.server.KafkaServerStartable) Properties(java.util.Properties) File(java.io.File) KafkaConfig(kafka.server.KafkaConfig)

Example 23 with KafkaServerStartable

use of kafka.server.KafkaServerStartable in project ranger by apache.

the class KafkaRangerAuthorizerGSSTest method setup.

@org.junit.BeforeClass
public static void setup() throws Exception {
    String basedir = System.getProperty("basedir");
    if (basedir == null) {
        basedir = new File(".").getCanonicalPath();
    }
    configureKerby(basedir);
    // JAAS Config file - We need to point to the correct keytab files
    Path path = FileSystems.getDefault().getPath(basedir, "/src/test/resources/kafka_kerberos.jaas");
    String content = new String(Files.readAllBytes(path), StandardCharsets.UTF_8);
    content = content.replaceAll("<basedir>", basedir);
    // content = content.replaceAll("zookeeper/localhost", "zookeeper/" + address);
    Path path2 = FileSystems.getDefault().getPath(basedir, "/target/test-classes/kafka_kerberos.jaas");
    Files.write(path2, content.getBytes(StandardCharsets.UTF_8));
    System.setProperty("java.security.auth.login.config", path2.toString());
    // Set up Zookeeper to require SASL
    Map<String, Object> zookeeperProperties = new HashMap<>();
    zookeeperProperties.put("authProvider.1", "org.apache.zookeeper.server.auth.SASLAuthenticationProvider");
    zookeeperProperties.put("requireClientAuthScheme", "sasl");
    zookeeperProperties.put("jaasLoginRenew", "3600000");
    InstanceSpec instanceSpec = new InstanceSpec(null, -1, -1, -1, true, 1, -1, -1, zookeeperProperties, "localhost");
    zkServer = new TestingServer(instanceSpec, true);
    // Get a random port
    ServerSocket serverSocket = new ServerSocket(0);
    port = serverSocket.getLocalPort();
    serverSocket.close();
    tempDir = Files.createTempDirectory("kafka");
    LOG.info("Port is {}", port);
    LOG.info("Temporary directory is at {}", tempDir);
    final Properties props = new Properties();
    props.put("broker.id", 1);
    props.put("host.name", "localhost");
    props.put("port", port);
    props.put("log.dir", tempDir.toString());
    props.put("zookeeper.connect", zkServer.getConnectString());
    props.put("replica.socket.timeout.ms", "1500");
    props.put("controlled.shutdown.enable", Boolean.TRUE.toString());
    // Enable SASL_PLAINTEXT
    props.put("listeners", "SASL_PLAINTEXT://localhost:" + port);
    props.put("security.inter.broker.protocol", "SASL_PLAINTEXT");
    props.put("sasl.enabled.mechanisms", "GSSAPI");
    props.put("sasl.mechanism.inter.broker.protocol", "GSSAPI");
    props.put("sasl.kerberos.service.name", "kafka");
    props.put("offsets.topic.replication.factor", (short) 1);
    props.put("offsets.topic.num.partitions", 1);
    // Plug in Apache Ranger authorizer
    props.put("authorizer.class.name", "org.apache.ranger.authorization.kafka.authorizer.RangerKafkaAuthorizer");
    // Create users for testing
    UserGroupInformation.createUserForTesting("client@kafka.apache.org", new String[] { "public" });
    UserGroupInformation.createUserForTesting("kafka/localhost@kafka.apache.org", new String[] { "IT" });
    KafkaConfig config = new KafkaConfig(props);
    kafkaServer = new KafkaServerStartable(config);
    kafkaServer.startup();
    // Create some topics
    final Properties adminProps = new Properties();
    adminProps.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, "localhost:" + port);
    adminProps.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
    adminProps.put(SaslConfigs.SASL_MECHANISM, "GSSAPI");
    KafkaTestUtils.createSomeTopics(adminProps);
}
Also used : Path(java.nio.file.Path) TestingServer(org.apache.curator.test.TestingServer) InstanceSpec(org.apache.curator.test.InstanceSpec) HashMap(java.util.HashMap) ServerSocket(java.net.ServerSocket) KafkaServerStartable(kafka.server.KafkaServerStartable) Properties(java.util.Properties) File(java.io.File) KafkaConfig(kafka.server.KafkaConfig)

Example 24 with KafkaServerStartable

use of kafka.server.KafkaServerStartable in project ranger by apache.

the class KafkaRangerAuthorizerTest method setup.

@org.junit.BeforeClass
public static void setup() throws Exception {
    // Create keys
    String serviceDN = "CN=localhost,O=Apache,L=Dublin,ST=Leinster,C=IE";
    String clientDN = "CN=localhost,O=Apache,L=Dublin,ST=Leinster,C=IE";
    // Create a truststore
    KeyStore keystore = KeyStore.getInstance(KeyStore.getDefaultType());
    keystore.load(null, "security".toCharArray());
    serviceKeystorePath = KafkaTestUtils.createAndStoreKey(serviceDN, serviceDN, BigInteger.valueOf(30), "sspass", "myservicekey", "skpass", keystore);
    clientKeystorePath = KafkaTestUtils.createAndStoreKey(clientDN, clientDN, BigInteger.valueOf(31), "cspass", "myclientkey", "ckpass", keystore);
    File truststoreFile = File.createTempFile("kafkatruststore", ".jks");
    try (OutputStream output = new FileOutputStream(truststoreFile)) {
        keystore.store(output, "security".toCharArray());
    }
    truststorePath = truststoreFile.getPath();
    zkServer = new TestingServer();
    // Get a random port
    ServerSocket serverSocket = new ServerSocket(0);
    port = serverSocket.getLocalPort();
    serverSocket.close();
    tempDir = Files.createTempDirectory("kafka");
    final Properties props = new Properties();
    props.put("broker.id", 1);
    props.put("host.name", "localhost");
    props.put("port", port);
    props.put("log.dir", tempDir.toString());
    props.put("zookeeper.connect", zkServer.getConnectString());
    props.put("replica.socket.timeout.ms", "1500");
    props.put("controlled.shutdown.enable", Boolean.TRUE.toString());
    // Enable SSL
    props.put("listeners", "SSL://localhost:" + port);
    props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, serviceKeystorePath);
    props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "sspass");
    props.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, "skpass");
    props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, truststorePath);
    props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "security");
    props.put("security.inter.broker.protocol", "SSL");
    props.put("ssl.client.auth", "required");
    props.put("offsets.topic.replication.factor", (short) 1);
    props.put("offsets.topic.num.partitions", 1);
    // Plug in Apache Ranger authorizer
    props.put("authorizer.class.name", "org.apache.ranger.authorization.kafka.authorizer.RangerKafkaAuthorizer");
    // Create users for testing
    UserGroupInformation.createUserForTesting(clientDN, new String[] { "public" });
    UserGroupInformation.createUserForTesting(serviceDN, new String[] { "IT" });
    KafkaConfig config = new KafkaConfig(props);
    kafkaServer = new KafkaServerStartable(config);
    kafkaServer.startup();
    // Create some topics
    final Properties adminProps = new Properties();
    adminProps.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, "localhost:" + port);
    adminProps.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SSL");
    // ssl
    adminProps.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, serviceKeystorePath);
    adminProps.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "sspass");
    adminProps.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, "skpass");
    adminProps.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, truststorePath);
    adminProps.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "security");
    KafkaTestUtils.createSomeTopics(adminProps);
}
Also used : TestingServer(org.apache.curator.test.TestingServer) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) FileOutputStream(java.io.FileOutputStream) ServerSocket(java.net.ServerSocket) KafkaServerStartable(kafka.server.KafkaServerStartable) Properties(java.util.Properties) KeyStore(java.security.KeyStore) File(java.io.File) KafkaConfig(kafka.server.KafkaConfig)

Example 25 with KafkaServerStartable

use of kafka.server.KafkaServerStartable in project ranger by apache.

the class KafkaRangerTopicCreationTest method setup.

@org.junit.BeforeClass
public static void setup() throws Exception {
    String basedir = System.getProperty("basedir");
    if (basedir == null) {
        basedir = new File(".").getCanonicalPath();
    }
    System.out.println("Base Dir " + basedir);
    configureKerby(basedir);
    // JAAS Config file - We need to point to the correct keytab files
    Path path = FileSystems.getDefault().getPath(basedir, "/src/test/resources/kafka_kerberos.jaas");
    String content = new String(Files.readAllBytes(path), StandardCharsets.UTF_8);
    content = content.replaceAll("<basedir>", basedir);
    // content = content.replaceAll("zookeeper/localhost", "zookeeper/" + address);
    Path path2 = FileSystems.getDefault().getPath(basedir, "/target/test-classes/kafka_kerberos.jaas");
    Files.write(path2, content.getBytes(StandardCharsets.UTF_8));
    System.setProperty("java.security.auth.login.config", path2.toString());
    // Set up Zookeeper to require SASL
    Map<String, Object> zookeeperProperties = new HashMap<>();
    zookeeperProperties.put("authProvider.1", "org.apache.zookeeper.server.auth.SASLAuthenticationProvider");
    zookeeperProperties.put("requireClientAuthScheme", "sasl");
    zookeeperProperties.put("jaasLoginRenew", "3600000");
    InstanceSpec instanceSpec = new InstanceSpec(null, -1, -1, -1, true, 1, -1, -1, zookeeperProperties, "localhost");
    zkServer = new TestingServer(instanceSpec, true);
    // Get a random port
    ServerSocket serverSocket = new ServerSocket(0);
    port = serverSocket.getLocalPort();
    serverSocket.close();
    tempDir = Files.createTempDirectory("kafka");
    LOG.info("Port is {}", port);
    LOG.info("Temporary directory is at {}", tempDir);
    final Properties props = new Properties();
    props.put("broker.id", 1);
    props.put("host.name", "localhost");
    props.put("port", port);
    props.put("log.dir", tempDir.toString());
    props.put("zookeeper.connect", zkServer.getConnectString());
    props.put("replica.socket.timeout.ms", "1500");
    props.put("controlled.shutdown.enable", Boolean.TRUE.toString());
    // Enable SASL_PLAINTEXT
    props.put("listeners", "SASL_PLAINTEXT://localhost:" + port);
    props.put("security.inter.broker.protocol", "SASL_PLAINTEXT");
    props.put("sasl.enabled.mechanisms", "GSSAPI");
    props.put("sasl.mechanism.inter.broker.protocol", "GSSAPI");
    props.put("sasl.kerberos.service.name", "kafka");
    props.put("offsets.topic.replication.factor", (short) 1);
    props.put("offsets.topic.num.partitions", 1);
    // Plug in Apache Ranger authorizer
    props.put("authorizer.class.name", "org.apache.ranger.authorization.kafka.authorizer.RangerKafkaAuthorizer");
    // Create users for testing
    UserGroupInformation.createUserForTesting("client@kafka.apache.org", new String[] { "public" });
    UserGroupInformation.createUserForTesting("kafka/localhost@kafka.apache.org", new String[] { "IT" });
    KafkaConfig config = new KafkaConfig(props);
    kafkaServer = new KafkaServerStartable(config);
    kafkaServer.startup();
}
Also used : Path(java.nio.file.Path) TestingServer(org.apache.curator.test.TestingServer) InstanceSpec(org.apache.curator.test.InstanceSpec) HashMap(java.util.HashMap) ServerSocket(java.net.ServerSocket) KafkaServerStartable(kafka.server.KafkaServerStartable) Properties(java.util.Properties) File(java.io.File) KafkaConfig(kafka.server.KafkaConfig)

Aggregations

KafkaServerStartable (kafka.server.KafkaServerStartable)27 KafkaConfig (kafka.server.KafkaConfig)21 Properties (java.util.Properties)17 File (java.io.File)15 TestingServer (org.apache.curator.test.TestingServer)12 ServerSocket (java.net.ServerSocket)11 ZkClient (org.I0Itec.zkclient.ZkClient)8 ZkUtils (kafka.utils.ZkUtils)7 ZkConnection (org.I0Itec.zkclient.ZkConnection)7 Path (java.nio.file.Path)4 HashMap (java.util.HashMap)4 InstanceSpec (org.apache.curator.test.InstanceSpec)4 OfflineClusterIntegrationTest (com.linkedin.pinot.integration.tests.OfflineClusterIntegrationTest)2 FileOutputStream (java.io.FileOutputStream)2 OutputStream (java.io.OutputStream)2 KeyStore (java.security.KeyStore)2 ArrayList (java.util.ArrayList)2 JSONArray (org.json.JSONArray)2 JSONObject (org.json.JSONObject)2 EmbeddedZookeeper.getResourceAsProperties (com.googlecode.jmxtrans.model.output.kafka.EmbeddedZookeeper.getResourceAsProperties)1