use of java.util.Properties in project buck by facebook.
the class DefaultAndroidDirectoryResolver method findNdkVersionFromDirectory.
/**
* The method returns the NDK version of a path.
* @param ndkDirectory Path to the folder that contains the NDK.
* @return A string containing the NDK version or absent.
*/
public static Optional<String> findNdkVersionFromDirectory(Path ndkDirectory) {
Path newNdk = ndkDirectory.resolve(NDK_POST_R11_VERSION_FILENAME);
Path oldNdk = ndkDirectory.resolve(NDK_PRE_R11_VERSION_FILENAME);
boolean newNdkPathFound = Files.exists(newNdk);
boolean oldNdkPathFound = Files.exists(oldNdk);
if (newNdkPathFound && oldNdkPathFound) {
throw new HumanReadableException("Android NDK directory " + ndkDirectory + " can not " + "contain both properties files. Remove source.properties or RELEASE.TXT.");
} else if (newNdkPathFound) {
Properties sourceProperties = new Properties();
try (FileInputStream fileStream = new FileInputStream(newNdk.toFile())) {
sourceProperties.load(fileStream);
return Optional.ofNullable(sourceProperties.getProperty("Pkg.Revision"));
} catch (IOException e) {
throw new HumanReadableException("Failed to read NDK version from " + newNdk + ".");
}
} else if (oldNdkPathFound) {
try (BufferedReader reader = Files.newBufferedReader(oldNdk, Charsets.UTF_8)) {
// around since we should consider them equivalent.
return Optional.ofNullable(reader.readLine().split("\\s+")[0].replace("r10e-rc4", "r10e"));
} catch (IOException e) {
throw new HumanReadableException("Failed to read NDK version from " + oldNdk + ".");
}
} else {
throw new HumanReadableException(ndkDirectory + " does not contain a valid properties " + "file for Android NDK.");
}
}
use of java.util.Properties in project druid by druid-io.
the class PolyBindTest method setUp.
public void setUp(Module... modules) throws Exception {
props = new Properties();
injector = Guice.createInjector(Iterables.concat(Arrays.asList(new Module() {
@Override
public void configure(Binder binder) {
binder.bind(Properties.class).toInstance(props);
PolyBind.createChoice(binder, "billy", Key.get(Gogo.class), Key.get(GoA.class));
PolyBind.createChoiceWithDefault(binder, "sally", Key.get(GogoSally.class), null, "b");
}
}), Arrays.asList(modules)));
}
use of java.util.Properties in project druid by druid-io.
the class KafkaLookupExtractorFactory method start.
@Override
public boolean start() {
synchronized (started) {
if (started.get()) {
LOG.warn("Already started, not starting again");
return started.get();
}
if (executorService.isShutdown()) {
LOG.warn("Already shut down, not starting again");
return false;
}
final Properties kafkaProperties = new Properties();
kafkaProperties.putAll(getKafkaProperties());
if (kafkaProperties.containsKey("group.id")) {
throw new IAE("Cannot set kafka property [group.id]. Property is randomly generated for you. Found [%s]", kafkaProperties.getProperty("group.id"));
}
if (kafkaProperties.containsKey("auto.offset.reset")) {
throw new IAE("Cannot set kafka property [auto.offset.reset]. Property will be forced to [smallest]. Found [%s]", kafkaProperties.getProperty("auto.offset.reset"));
}
Preconditions.checkNotNull(kafkaProperties.getProperty("zookeeper.connect"), "zookeeper.connect required property");
kafkaProperties.setProperty("group.id", factoryId);
final String topic = getKafkaTopic();
LOG.debug("About to listen to topic [%s] with group.id [%s]", topic, factoryId);
cacheHandler = cacheManager.createCache();
final Map<String, String> map = cacheHandler.getCache();
mapRef.set(map);
// Enable publish-subscribe
kafkaProperties.setProperty("auto.offset.reset", "smallest");
final CountDownLatch startingReads = new CountDownLatch(1);
final ListenableFuture<?> future = executorService.submit(new Runnable() {
@Override
public void run() {
while (!executorService.isShutdown()) {
consumerConnector = buildConnector(kafkaProperties);
try {
if (executorService.isShutdown()) {
break;
}
final List<KafkaStream<String, String>> streams = consumerConnector.createMessageStreamsByFilter(new Whitelist(Pattern.quote(topic)), 1, DEFAULT_STRING_DECODER, DEFAULT_STRING_DECODER);
if (streams == null || streams.isEmpty()) {
throw new IAE("Topic [%s] had no streams", topic);
}
if (streams.size() > 1) {
throw new ISE("Topic [%s] has %d streams! expected 1", topic, streams.size());
}
final KafkaStream<String, String> kafkaStream = streams.get(0);
startingReads.countDown();
for (final MessageAndMetadata<String, String> messageAndMetadata : kafkaStream) {
final String key = messageAndMetadata.key();
final String message = messageAndMetadata.message();
if (key == null || message == null) {
LOG.error("Bad key/message from topic [%s]: [%s]", topic, messageAndMetadata);
continue;
}
doubleEventCount.incrementAndGet();
map.put(key, message);
doubleEventCount.incrementAndGet();
LOG.trace("Placed key[%s] val[%s]", key, message);
}
} catch (Exception e) {
LOG.error(e, "Error reading stream for topic [%s]", topic);
} finally {
consumerConnector.shutdown();
}
}
}
});
Futures.addCallback(future, new FutureCallback<Object>() {
@Override
public void onSuccess(Object result) {
LOG.debug("Success listening to [%s]", topic);
}
@Override
public void onFailure(Throwable t) {
if (t instanceof CancellationException) {
LOG.debug("Topic [%s] cancelled", topic);
} else {
LOG.error(t, "Error in listening to [%s]", topic);
}
}
}, MoreExecutors.sameThreadExecutor());
this.future = future;
final Stopwatch stopwatch = Stopwatch.createStarted();
try {
while (!startingReads.await(100, TimeUnit.MILLISECONDS) && connectTimeout > 0L) {
// Don't return until we have actually connected
if (future.isDone()) {
future.get();
} else {
if (stopwatch.elapsed(TimeUnit.MILLISECONDS) > connectTimeout) {
throw new TimeoutException("Failed to connect to kafka in sufficient time");
}
}
}
} catch (InterruptedException | ExecutionException | TimeoutException e) {
executorService.shutdown();
if (!future.isDone() && !future.cancel(false)) {
LOG.warn("Could not cancel kafka listening thread");
}
LOG.error(e, "Failed to start kafka extraction factory");
cacheHandler.close();
return false;
}
started.set(true);
return true;
}
}
use of java.util.Properties in project druid by druid-io.
the class KafkaLookupExtractorFactoryTest method testStartStop.
@Test
public void testStartStop() {
final KafkaStream<String, String> kafkaStream = PowerMock.createStrictMock(KafkaStream.class);
final ConsumerIterator<String, String> consumerIterator = PowerMock.createStrictMock(ConsumerIterator.class);
final ConsumerConnector consumerConnector = PowerMock.createStrictMock(ConsumerConnector.class);
EasyMock.expect(consumerConnector.createMessageStreamsByFilter(EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), EasyMock.eq(DEFAULT_STRING_DECODER), EasyMock.eq(DEFAULT_STRING_DECODER))).andReturn(ImmutableList.of(kafkaStream)).once();
EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
EasyMock.expect(cacheManager.createCache()).andReturn(cacheHandler).once();
EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
cacheHandler.close();
EasyMock.expectLastCall();
final AtomicBoolean threadWasInterrupted = new AtomicBoolean(false);
consumerConnector.shutdown();
EasyMock.expectLastCall().andAnswer(new IAnswer<Object>() {
@Override
public Object answer() throws Throwable {
threadWasInterrupted.set(Thread.currentThread().isInterrupted());
return null;
}
}).times(2);
PowerMock.replay(cacheManager, cacheHandler, kafkaStream, consumerConnector, consumerIterator);
final KafkaLookupExtractorFactory factory = new KafkaLookupExtractorFactory(cacheManager, TOPIC, ImmutableMap.of("zookeeper.connect", "localhost"), 10_000L, false) {
@Override
ConsumerConnector buildConnector(Properties properties) {
return consumerConnector;
}
};
Assert.assertTrue(factory.start());
Assert.assertTrue(factory.close());
Assert.assertTrue(factory.getFuture().isDone());
Assert.assertFalse(threadWasInterrupted.get());
PowerMock.verify(cacheManager, cacheHandler);
}
use of java.util.Properties in project druid by druid-io.
the class KafkaLookupExtractorFactoryTest method testStartStopStart.
@Test
public void testStartStopStart() {
final KafkaStream<String, String> kafkaStream = PowerMock.createStrictMock(KafkaStream.class);
final ConsumerIterator<String, String> consumerIterator = PowerMock.createStrictMock(ConsumerIterator.class);
final ConsumerConnector consumerConnector = PowerMock.createStrictMock(ConsumerConnector.class);
EasyMock.expect(consumerConnector.createMessageStreamsByFilter(EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), EasyMock.eq(DEFAULT_STRING_DECODER), EasyMock.eq(DEFAULT_STRING_DECODER))).andReturn(ImmutableList.of(kafkaStream)).once();
EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
EasyMock.expect(cacheManager.createCache()).andReturn(cacheHandler).once();
EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
cacheHandler.close();
EasyMock.expectLastCall().once();
consumerConnector.shutdown();
EasyMock.expectLastCall().times(2);
PowerMock.replay(cacheManager, cacheHandler, kafkaStream, consumerConnector, consumerIterator);
final KafkaLookupExtractorFactory factory = new KafkaLookupExtractorFactory(cacheManager, TOPIC, ImmutableMap.of("zookeeper.connect", "localhost")) {
@Override
ConsumerConnector buildConnector(Properties properties) {
return consumerConnector;
}
};
Assert.assertTrue(factory.start());
Assert.assertTrue(factory.close());
Assert.assertFalse(factory.start());
PowerMock.verify(cacheManager, cacheHandler);
}
Aggregations