Search in sources :

Example 1 with Properties

use of java.util.Properties in project buck by facebook.

the class DefaultAndroidDirectoryResolver method findNdkVersionFromDirectory.

/**
   * The method returns the NDK version of a path.
   * @param ndkDirectory Path to the folder that contains the NDK.
   * @return A string containing the NDK version or absent.
   */
public static Optional<String> findNdkVersionFromDirectory(Path ndkDirectory) {
    Path newNdk = ndkDirectory.resolve(NDK_POST_R11_VERSION_FILENAME);
    Path oldNdk = ndkDirectory.resolve(NDK_PRE_R11_VERSION_FILENAME);
    boolean newNdkPathFound = Files.exists(newNdk);
    boolean oldNdkPathFound = Files.exists(oldNdk);
    if (newNdkPathFound && oldNdkPathFound) {
        throw new HumanReadableException("Android NDK directory " + ndkDirectory + " can not " + "contain both properties files. Remove source.properties or RELEASE.TXT.");
    } else if (newNdkPathFound) {
        Properties sourceProperties = new Properties();
        try (FileInputStream fileStream = new FileInputStream(newNdk.toFile())) {
            sourceProperties.load(fileStream);
            return Optional.ofNullable(sourceProperties.getProperty("Pkg.Revision"));
        } catch (IOException e) {
            throw new HumanReadableException("Failed to read NDK version from " + newNdk + ".");
        }
    } else if (oldNdkPathFound) {
        try (BufferedReader reader = Files.newBufferedReader(oldNdk, Charsets.UTF_8)) {
            // around since we should consider them equivalent.
            return Optional.ofNullable(reader.readLine().split("\\s+")[0].replace("r10e-rc4", "r10e"));
        } catch (IOException e) {
            throw new HumanReadableException("Failed to read NDK version from " + oldNdk + ".");
        }
    } else {
        throw new HumanReadableException(ndkDirectory + " does not contain a valid properties " + "file for Android NDK.");
    }
}
Also used : Path(java.nio.file.Path) HumanReadableException(com.facebook.buck.util.HumanReadableException) BufferedReader(java.io.BufferedReader) IOException(java.io.IOException) Properties(java.util.Properties) FileInputStream(java.io.FileInputStream)

Example 2 with Properties

use of java.util.Properties in project druid by druid-io.

the class PolyBindTest method setUp.

public void setUp(Module... modules) throws Exception {
    props = new Properties();
    injector = Guice.createInjector(Iterables.concat(Arrays.asList(new Module() {

        @Override
        public void configure(Binder binder) {
            binder.bind(Properties.class).toInstance(props);
            PolyBind.createChoice(binder, "billy", Key.get(Gogo.class), Key.get(GoA.class));
            PolyBind.createChoiceWithDefault(binder, "sally", Key.get(GogoSally.class), null, "b");
        }
    }), Arrays.asList(modules)));
}
Also used : Binder(com.google.inject.Binder) MapBinder(com.google.inject.multibindings.MapBinder) Properties(java.util.Properties) Module(com.google.inject.Module)

Example 3 with Properties

use of java.util.Properties in project druid by druid-io.

the class KafkaLookupExtractorFactory method start.

@Override
public boolean start() {
    synchronized (started) {
        if (started.get()) {
            LOG.warn("Already started, not starting again");
            return started.get();
        }
        if (executorService.isShutdown()) {
            LOG.warn("Already shut down, not starting again");
            return false;
        }
        final Properties kafkaProperties = new Properties();
        kafkaProperties.putAll(getKafkaProperties());
        if (kafkaProperties.containsKey("group.id")) {
            throw new IAE("Cannot set kafka property [group.id]. Property is randomly generated for you. Found [%s]", kafkaProperties.getProperty("group.id"));
        }
        if (kafkaProperties.containsKey("auto.offset.reset")) {
            throw new IAE("Cannot set kafka property [auto.offset.reset]. Property will be forced to [smallest]. Found [%s]", kafkaProperties.getProperty("auto.offset.reset"));
        }
        Preconditions.checkNotNull(kafkaProperties.getProperty("zookeeper.connect"), "zookeeper.connect required property");
        kafkaProperties.setProperty("group.id", factoryId);
        final String topic = getKafkaTopic();
        LOG.debug("About to listen to topic [%s] with group.id [%s]", topic, factoryId);
        cacheHandler = cacheManager.createCache();
        final Map<String, String> map = cacheHandler.getCache();
        mapRef.set(map);
        // Enable publish-subscribe
        kafkaProperties.setProperty("auto.offset.reset", "smallest");
        final CountDownLatch startingReads = new CountDownLatch(1);
        final ListenableFuture<?> future = executorService.submit(new Runnable() {

            @Override
            public void run() {
                while (!executorService.isShutdown()) {
                    consumerConnector = buildConnector(kafkaProperties);
                    try {
                        if (executorService.isShutdown()) {
                            break;
                        }
                        final List<KafkaStream<String, String>> streams = consumerConnector.createMessageStreamsByFilter(new Whitelist(Pattern.quote(topic)), 1, DEFAULT_STRING_DECODER, DEFAULT_STRING_DECODER);
                        if (streams == null || streams.isEmpty()) {
                            throw new IAE("Topic [%s] had no streams", topic);
                        }
                        if (streams.size() > 1) {
                            throw new ISE("Topic [%s] has %d streams! expected 1", topic, streams.size());
                        }
                        final KafkaStream<String, String> kafkaStream = streams.get(0);
                        startingReads.countDown();
                        for (final MessageAndMetadata<String, String> messageAndMetadata : kafkaStream) {
                            final String key = messageAndMetadata.key();
                            final String message = messageAndMetadata.message();
                            if (key == null || message == null) {
                                LOG.error("Bad key/message from topic [%s]: [%s]", topic, messageAndMetadata);
                                continue;
                            }
                            doubleEventCount.incrementAndGet();
                            map.put(key, message);
                            doubleEventCount.incrementAndGet();
                            LOG.trace("Placed key[%s] val[%s]", key, message);
                        }
                    } catch (Exception e) {
                        LOG.error(e, "Error reading stream for topic [%s]", topic);
                    } finally {
                        consumerConnector.shutdown();
                    }
                }
            }
        });
        Futures.addCallback(future, new FutureCallback<Object>() {

            @Override
            public void onSuccess(Object result) {
                LOG.debug("Success listening to [%s]", topic);
            }

            @Override
            public void onFailure(Throwable t) {
                if (t instanceof CancellationException) {
                    LOG.debug("Topic [%s] cancelled", topic);
                } else {
                    LOG.error(t, "Error in listening to [%s]", topic);
                }
            }
        }, MoreExecutors.sameThreadExecutor());
        this.future = future;
        final Stopwatch stopwatch = Stopwatch.createStarted();
        try {
            while (!startingReads.await(100, TimeUnit.MILLISECONDS) && connectTimeout > 0L) {
                // Don't return until we have actually connected
                if (future.isDone()) {
                    future.get();
                } else {
                    if (stopwatch.elapsed(TimeUnit.MILLISECONDS) > connectTimeout) {
                        throw new TimeoutException("Failed to connect to kafka in sufficient time");
                    }
                }
            }
        } catch (InterruptedException | ExecutionException | TimeoutException e) {
            executorService.shutdown();
            if (!future.isDone() && !future.cancel(false)) {
                LOG.warn("Could not cancel kafka listening thread");
            }
            LOG.error(e, "Failed to start kafka extraction factory");
            cacheHandler.close();
            return false;
        }
        started.set(true);
        return true;
    }
}
Also used : MessageAndMetadata(kafka.message.MessageAndMetadata) Stopwatch(com.google.common.base.Stopwatch) KafkaStream(kafka.consumer.KafkaStream) Properties(java.util.Properties) IAE(io.druid.java.util.common.IAE) CountDownLatch(java.util.concurrent.CountDownLatch) TimeoutException(java.util.concurrent.TimeoutException) CancellationException(java.util.concurrent.CancellationException) ExecutionException(java.util.concurrent.ExecutionException) CancellationException(java.util.concurrent.CancellationException) Whitelist(kafka.consumer.Whitelist) List(java.util.List) ISE(io.druid.java.util.common.ISE) ExecutionException(java.util.concurrent.ExecutionException) TimeoutException(java.util.concurrent.TimeoutException)

Example 4 with Properties

use of java.util.Properties in project druid by druid-io.

the class KafkaLookupExtractorFactoryTest method testStartStop.

@Test
public void testStartStop() {
    final KafkaStream<String, String> kafkaStream = PowerMock.createStrictMock(KafkaStream.class);
    final ConsumerIterator<String, String> consumerIterator = PowerMock.createStrictMock(ConsumerIterator.class);
    final ConsumerConnector consumerConnector = PowerMock.createStrictMock(ConsumerConnector.class);
    EasyMock.expect(consumerConnector.createMessageStreamsByFilter(EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), EasyMock.eq(DEFAULT_STRING_DECODER), EasyMock.eq(DEFAULT_STRING_DECODER))).andReturn(ImmutableList.of(kafkaStream)).once();
    EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
    EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
    EasyMock.expect(cacheManager.createCache()).andReturn(cacheHandler).once();
    EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
    cacheHandler.close();
    EasyMock.expectLastCall();
    final AtomicBoolean threadWasInterrupted = new AtomicBoolean(false);
    consumerConnector.shutdown();
    EasyMock.expectLastCall().andAnswer(new IAnswer<Object>() {

        @Override
        public Object answer() throws Throwable {
            threadWasInterrupted.set(Thread.currentThread().isInterrupted());
            return null;
        }
    }).times(2);
    PowerMock.replay(cacheManager, cacheHandler, kafkaStream, consumerConnector, consumerIterator);
    final KafkaLookupExtractorFactory factory = new KafkaLookupExtractorFactory(cacheManager, TOPIC, ImmutableMap.of("zookeeper.connect", "localhost"), 10_000L, false) {

        @Override
        ConsumerConnector buildConnector(Properties properties) {
            return consumerConnector;
        }
    };
    Assert.assertTrue(factory.start());
    Assert.assertTrue(factory.close());
    Assert.assertTrue(factory.getFuture().isDone());
    Assert.assertFalse(threadWasInterrupted.get());
    PowerMock.verify(cacheManager, cacheHandler);
}
Also used : AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) IAnswer(org.easymock.IAnswer) ConsumerConnector(kafka.javaapi.consumer.ConsumerConnector) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Properties(java.util.Properties) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 5 with Properties

use of java.util.Properties in project druid by druid-io.

the class KafkaLookupExtractorFactoryTest method testStartStopStart.

@Test
public void testStartStopStart() {
    final KafkaStream<String, String> kafkaStream = PowerMock.createStrictMock(KafkaStream.class);
    final ConsumerIterator<String, String> consumerIterator = PowerMock.createStrictMock(ConsumerIterator.class);
    final ConsumerConnector consumerConnector = PowerMock.createStrictMock(ConsumerConnector.class);
    EasyMock.expect(consumerConnector.createMessageStreamsByFilter(EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), EasyMock.eq(DEFAULT_STRING_DECODER), EasyMock.eq(DEFAULT_STRING_DECODER))).andReturn(ImmutableList.of(kafkaStream)).once();
    EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
    EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
    EasyMock.expect(cacheManager.createCache()).andReturn(cacheHandler).once();
    EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
    cacheHandler.close();
    EasyMock.expectLastCall().once();
    consumerConnector.shutdown();
    EasyMock.expectLastCall().times(2);
    PowerMock.replay(cacheManager, cacheHandler, kafkaStream, consumerConnector, consumerIterator);
    final KafkaLookupExtractorFactory factory = new KafkaLookupExtractorFactory(cacheManager, TOPIC, ImmutableMap.of("zookeeper.connect", "localhost")) {

        @Override
        ConsumerConnector buildConnector(Properties properties) {
            return consumerConnector;
        }
    };
    Assert.assertTrue(factory.start());
    Assert.assertTrue(factory.close());
    Assert.assertFalse(factory.start());
    PowerMock.verify(cacheManager, cacheHandler);
}
Also used : ConsumerConnector(kafka.javaapi.consumer.ConsumerConnector) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Properties(java.util.Properties) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Aggregations

Properties (java.util.Properties)23110 Test (org.junit.Test)6757 IOException (java.io.IOException)3338 File (java.io.File)2522 Connection (java.sql.Connection)1795 InputStream (java.io.InputStream)1685 FileInputStream (java.io.FileInputStream)1561 HashMap (java.util.HashMap)1316 ResultSet (java.sql.ResultSet)1233 PreparedStatement (java.sql.PreparedStatement)1146 ArrayList (java.util.ArrayList)1095 Map (java.util.Map)1003 ConfigurationProperties (org.apache.geode.distributed.ConfigurationProperties)819 Before (org.junit.Before)760 SQLException (java.sql.SQLException)525 List (java.util.List)502 FileOutputStream (java.io.FileOutputStream)495 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)488 Test (org.junit.jupiter.api.Test)465 URL (java.net.URL)428