Search in sources :

Example 11 with ZkClient

use of org.I0Itec.zkclient.ZkClient in project storm by apache.

the class KafkaUnit method setUp.

public void setUp() throws IOException {
    // setup ZK
    zkServer = new EmbeddedZookeeper();
    String zkConnect = ZK_HOST + ":" + zkServer.port();
    ZkClient zkClient = new ZkClient(zkConnect, 30000, 30000, ZKStringSerializer$.MODULE$);
    zkUtils = ZkUtils.apply(zkClient, false);
    // setup Broker
    Properties brokerProps = new Properties();
    brokerProps.setProperty("zookeeper.connect", zkConnect);
    brokerProps.setProperty("broker.id", "0");
    brokerProps.setProperty("log.dirs", Files.createTempDirectory("kafka-").toAbsolutePath().toString());
    brokerProps.setProperty("listeners", String.format("PLAINTEXT://%s:%d", KAFKA_HOST, KAFKA_PORT));
    KafkaConfig config = new KafkaConfig(brokerProps);
    MockTime mock = new MockTime();
    kafkaServer = TestUtils.createServer(config, mock);
    // setup default Producer
    createProducer();
}
Also used : ZkClient(org.I0Itec.zkclient.ZkClient) EmbeddedZookeeper(kafka.zk.EmbeddedZookeeper) Properties(java.util.Properties) MockTime(kafka.utils.MockTime) KafkaConfig(kafka.server.KafkaConfig)

Example 12 with ZkClient

use of org.I0Itec.zkclient.ZkClient in project motan by weibocom.

the class ZookeeperRegistryFactory method createRegistry.

@Override
protected Registry createRegistry(URL registryUrl) {
    try {
        int timeout = registryUrl.getIntParameter(URLParamType.connectTimeout.getName(), URLParamType.connectTimeout.getIntValue());
        int sessionTimeout = registryUrl.getIntParameter(URLParamType.registrySessionTimeout.getName(), URLParamType.registrySessionTimeout.getIntValue());
        ZkClient zkClient = new ZkClient(registryUrl.getParameter("address"), sessionTimeout, timeout);
        return new ZookeeperRegistry(registryUrl, zkClient);
    } catch (ZkException e) {
        LoggerUtil.error("[ZookeeperRegistry] fail to connect zookeeper, cause: " + e.getMessage());
        throw e;
    }
}
Also used : ZkClient(org.I0Itec.zkclient.ZkClient) ZkException(org.I0Itec.zkclient.exception.ZkException)

Example 13 with ZkClient

use of org.I0Itec.zkclient.ZkClient in project motan by weibocom.

the class ZookeeperRegistryTest method setUp.

@Before
public void setUp() throws Exception {
    Properties properties = new Properties();
    InputStream in = EmbeddedZookeeper.class.getResourceAsStream("/zoo.cfg");
    properties.load(in);
    int port = Integer.parseInt(properties.getProperty("clientPort"));
    in.close();
    URL zkUrl = new URL("zookeeper", "127.0.0.1", port, "com.weibo.api.motan.registry.RegistryService");
    clientUrl = new URL(MotanConstants.PROTOCOL_MOTAN, "127.0.0.1", 0, service);
    clientUrl.addParameter("group", "aaa");
    serviceUrl = new URL(MotanConstants.PROTOCOL_MOTAN, "127.0.0.1", 8001, service);
    serviceUrl.addParameter("group", "aaa");
    zookeeper = new EmbeddedZookeeper();
    zookeeper.start();
    zkClient = new ZkClient("127.0.0.1:" + port, 5000);
    registry = new ZookeeperRegistry(zkUrl, zkClient);
}
Also used : ZkClient(org.I0Itec.zkclient.ZkClient) InputStream(java.io.InputStream) Properties(java.util.Properties) URL(com.weibo.api.motan.rpc.URL) Before(org.junit.Before)

Example 14 with ZkClient

use of org.I0Itec.zkclient.ZkClient in project phoenix by apache.

the class PhoenixConsumerIT method setUp.

@Before
public void setUp() throws IOException, SQLException {
    // setup Zookeeper
    zkServer = new EmbeddedZookeeper();
    String zkConnect = ZKHOST + ":" + zkServer.port();
    zkClient = new ZkClient(zkConnect, 30000, 30000, ZKStringSerializer$.MODULE$);
    ZkUtils zkUtils = ZkUtils.apply(zkClient, false);
    // setup Broker
    Properties brokerProps = new Properties();
    brokerProps.setProperty("zookeeper.connect", zkConnect);
    brokerProps.setProperty("broker.id", "0");
    brokerProps.setProperty("log.dirs", Files.createTempDirectory("kafka-").toAbsolutePath().toString());
    brokerProps.setProperty("listeners", "PLAINTEXT://" + BROKERHOST + ":" + BROKERPORT);
    KafkaConfig config = new KafkaConfig(brokerProps);
    Time mock = new MockTime();
    kafkaServer = TestUtils.createServer(config, mock);
    kafkaServer.startup();
    // create topic
    AdminUtils.createTopic(zkUtils, TOPIC, 1, 1, new Properties());
    pConsumer = new PhoenixConsumer();
    Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
    conn = DriverManager.getConnection(getUrl(), props);
}
Also used : ZkClient(org.I0Itec.zkclient.ZkClient) EmbeddedZookeeper(kafka.zk.EmbeddedZookeeper) PhoenixConsumer(org.apache.phoenix.kafka.consumer.PhoenixConsumer) MockTime(kafka.utils.MockTime) Time(kafka.utils.Time) ZkUtils(kafka.utils.ZkUtils) Properties(java.util.Properties) MockTime(kafka.utils.MockTime) KafkaConfig(kafka.server.KafkaConfig) Before(org.junit.Before)

Example 15 with ZkClient

use of org.I0Itec.zkclient.ZkClient in project druid by druid-io.

the class TestKafkaExtractionCluster method setUp.

@Before
public void setUp() throws Exception {
    zkTestServer = new TestingServer(-1, temporaryFolder.newFolder(), true);
    zkTestServer.start();
    closer.register(new Closeable() {

        @Override
        public void close() throws IOException {
            zkTestServer.stop();
        }
    });
    zkClient = new ZkClient(zkTestServer.getConnectString(), 10000, 10000, ZKStringSerializer$.MODULE$);
    closer.register(new Closeable() {

        @Override
        public void close() throws IOException {
            zkClient.close();
        }
    });
    if (!zkClient.exists("/kafka")) {
        zkClient.create("/kafka", null, CreateMode.PERSISTENT);
    }
    log.info("---------------------------Started ZK---------------------------");
    final String zkKafkaPath = "/kafka";
    final Properties serverProperties = new Properties();
    serverProperties.putAll(kafkaProperties);
    serverProperties.put("broker.id", "0");
    serverProperties.put("log.dir", temporaryFolder.newFolder().getAbsolutePath());
    serverProperties.put("log.cleaner.enable", "true");
    serverProperties.put("host.name", "127.0.0.1");
    serverProperties.put("zookeeper.connect", zkTestServer.getConnectString() + zkKafkaPath);
    serverProperties.put("zookeeper.session.timeout.ms", "10000");
    serverProperties.put("zookeeper.sync.time.ms", "200");
    kafkaConfig = new KafkaConfig(serverProperties);
    final long time = DateTime.parse("2015-01-01").getMillis();
    kafkaServer = new KafkaServer(kafkaConfig, new Time() {

        @Override
        public long milliseconds() {
            return time;
        }

        @Override
        public long nanoseconds() {
            return milliseconds() * 1_000_000;
        }

        @Override
        public void sleep(long ms) {
            try {
                Thread.sleep(ms);
            } catch (InterruptedException e) {
                throw Throwables.propagate(e);
            }
        }
    });
    kafkaServer.startup();
    closer.register(new Closeable() {

        @Override
        public void close() throws IOException {
            kafkaServer.shutdown();
            kafkaServer.awaitShutdown();
        }
    });
    int sleepCount = 0;
    while (!kafkaServer.kafkaController().isActive()) {
        Thread.sleep(100);
        if (++sleepCount > 10) {
            throw new InterruptedException("Controller took to long to awaken");
        }
    }
    log.info("---------------------------Started Kafka Server---------------------------");
    final ZkClient zkClient = new ZkClient(zkTestServer.getConnectString() + zkKafkaPath, 10000, 10000, ZKStringSerializer$.MODULE$);
    try (final AutoCloseable autoCloseable = new AutoCloseable() {

        @Override
        public void close() throws Exception {
            if (zkClient.exists(zkKafkaPath)) {
                try {
                    zkClient.deleteRecursive(zkKafkaPath);
                } catch (org.I0Itec.zkclient.exception.ZkException ex) {
                    log.warn(ex, "error deleting %s zk node", zkKafkaPath);
                }
            }
            zkClient.close();
        }
    }) {
        final Properties topicProperties = new Properties();
        topicProperties.put("cleanup.policy", "compact");
        if (!AdminUtils.topicExists(zkClient, topicName)) {
            AdminUtils.createTopic(zkClient, topicName, 1, 1, topicProperties);
        }
        log.info("---------------------------Created topic---------------------------");
        Assert.assertTrue(AdminUtils.topicExists(zkClient, topicName));
    }
    final Properties kafkaProducerProperties = makeProducerProperties();
    final Producer<byte[], byte[]> producer = new Producer<>(new ProducerConfig(kafkaProducerProperties));
    try (final AutoCloseable autoCloseable = new AutoCloseable() {

        @Override
        public void close() throws Exception {
            producer.close();
        }
    }) {
        producer.send(new KeyedMessage<>(topicName, StringUtils.toUtf8("abcdefg"), StringUtils.toUtf8("abcdefg")));
    }
    System.setProperty("druid.extensions.searchCurrentClassloader", "false");
    injector = Initialization.makeInjectorWithModules(GuiceInjectors.makeStartupInjector(), ImmutableList.of(new Module() {

        @Override
        public void configure(Binder binder) {
            binder.bindConstant().annotatedWith(Names.named("serviceName")).to("test");
            binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0);
        }
    }, // These injections fail under IntelliJ but are required for maven
    new NamespaceExtractionModule(), new KafkaExtractionNamespaceModule()));
    mapper = injector.getInstance(ObjectMapper.class);
    log.info("--------------------------- placed default item via producer ---------------------------");
    final Map<String, String> consumerProperties = new HashMap<>(kafkaProperties);
    consumerProperties.put("zookeeper.connect", zkTestServer.getConnectString() + zkKafkaPath);
    consumerProperties.put("zookeeper.session.timeout.ms", "10000");
    consumerProperties.put("zookeeper.sync.time.ms", "200");
    final KafkaLookupExtractorFactory kafkaLookupExtractorFactory = new KafkaLookupExtractorFactory(null, topicName, consumerProperties);
    factory = (KafkaLookupExtractorFactory) mapper.readValue(mapper.writeValueAsString(kafkaLookupExtractorFactory), LookupExtractorFactory.class);
    Assert.assertEquals(kafkaLookupExtractorFactory.getKafkaTopic(), factory.getKafkaTopic());
    Assert.assertEquals(kafkaLookupExtractorFactory.getKafkaProperties(), factory.getKafkaProperties());
    factory.start();
    closer.register(new Closeable() {

        @Override
        public void close() throws IOException {
            factory.close();
        }
    });
    log.info("--------------------------- started rename manager ---------------------------");
}
Also used : NamespaceExtractionModule(io.druid.server.lookup.namespace.NamespaceExtractionModule) HashMap(java.util.HashMap) Closeable(java.io.Closeable) DateTime(org.joda.time.DateTime) Time(kafka.utils.Time) Properties(java.util.Properties) Binder(com.google.inject.Binder) ProducerConfig(kafka.producer.ProducerConfig) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) TestingServer(org.apache.curator.test.TestingServer) ZkClient(org.I0Itec.zkclient.ZkClient) IOException(java.io.IOException) KafkaServer(kafka.server.KafkaServer) Producer(kafka.javaapi.producer.Producer) Module(com.google.inject.Module) NamespaceExtractionModule(io.druid.server.lookup.namespace.NamespaceExtractionModule) KafkaConfig(kafka.server.KafkaConfig) Before(org.junit.Before)

Aggregations

ZkClient (org.I0Itec.zkclient.ZkClient)31 Properties (java.util.Properties)10 ZkUtils (kafka.utils.ZkUtils)7 KafkaConfig (kafka.server.KafkaConfig)6 ZkConnection (org.I0Itec.zkclient.ZkConnection)6 Before (org.junit.Before)6 IOException (java.io.IOException)5 File (java.io.File)4 InputStream (java.io.InputStream)4 MockTime (kafka.utils.MockTime)4 Time (kafka.utils.Time)4 EmbeddedZookeeper (kafka.zk.EmbeddedZookeeper)3 DateTime (org.joda.time.DateTime)3 PinotHelixResourceManager (com.linkedin.pinot.controller.helix.core.PinotHelixResourceManager)2 URL (com.weibo.api.motan.rpc.URL)2 ISE (io.druid.java.util.common.ISE)2 HashSet (java.util.HashSet)2 TopicExistsException (kafka.common.TopicExistsException)2 TestingServer (org.apache.curator.test.TestingServer)2 KafkaProducer (org.apache.kafka.clients.producer.KafkaProducer)2