use of org.apache.druid.java.util.common.lifecycle.Lifecycle in project druid by druid-io.
the class HttpClientInit method createClient.
public static HttpClient createClient(HttpClientConfig config, Lifecycle lifecycle) {
try {
// We need to use the full constructor in order to set a ThreadNameDeterminer. The other parameters are taken
// from the defaults in HashedWheelTimer's other constructors.
final HashedWheelTimer timer = new HashedWheelTimer(new ThreadFactoryBuilder().setDaemon(true).setNameFormat("HttpClient-Timer-%s").build(), ThreadNameDeterminer.CURRENT, 100, TimeUnit.MILLISECONDS, 512);
lifecycle.addMaybeStartHandler(new Lifecycle.Handler() {
@Override
public void start() {
timer.start();
}
@Override
public void stop() {
timer.stop();
}
});
return lifecycle.addMaybeStartManagedInstance(new NettyHttpClient(new ResourcePool<>(new ChannelResourceFactory(createBootstrap(lifecycle, timer, config.getBossPoolSize(), config.getWorkerPoolSize()), config.getSslContext(), config.getProxyConfig(), timer, config.getSslHandshakeTimeout() == null ? -1 : config.getSslHandshakeTimeout().getMillis()), new ResourcePoolConfig(config.getNumConnections(), config.getUnusedConnectionTimeoutDuration().getMillis())), config.getReadTimeout(), config.getCompressionCodec(), timer));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of org.apache.druid.java.util.common.lifecycle.Lifecycle in project druid by druid-io.
the class LifecycleScopeTest method testAnnotatedAndExplicit.
/**
* This is a test for documentation purposes. It's there to show what weird things Guice will do when
* it sees both the annotation and an explicit binding.
*
* @throws Exception
*/
@Test
public void testAnnotatedAndExplicit() throws Exception {
final Injector injector = Guice.createInjector(new DruidGuiceExtensions(), new LifecycleModule(), new Module() {
@Override
public void configure(Binder binder) {
binder.bind(TestInterface.class).to(AnnotatedClass.class).in(ManageLifecycle.class);
}
});
final Lifecycle lifecycle = injector.getInstance(Lifecycle.class);
final TestInterface instance = injector.getInstance(TestInterface.class);
Assert.assertEquals(0, instance.getStarted());
Assert.assertEquals(0, instance.getStopped());
Assert.assertEquals(0, instance.getRan());
instance.run();
Assert.assertEquals(0, instance.getStarted());
Assert.assertEquals(0, instance.getStopped());
Assert.assertEquals(1, instance.getRan());
lifecycle.start();
Assert.assertEquals(2, instance.getStarted());
Assert.assertEquals(0, instance.getStopped());
Assert.assertEquals(1, instance.getRan());
// It's a singleton
injector.getInstance(TestInterface.class).run();
Assert.assertEquals(2, instance.getStarted());
Assert.assertEquals(0, instance.getStopped());
Assert.assertEquals(2, instance.getRan());
lifecycle.stop();
Assert.assertEquals(2, instance.getStarted());
Assert.assertEquals(2, instance.getStopped());
Assert.assertEquals(2, instance.getRan());
}
use of org.apache.druid.java.util.common.lifecycle.Lifecycle in project druid by druid-io.
the class UriCacheGeneratorTest method getParameters.
@Parameterized.Parameters(name = "{0}")
public static Iterable<Object[]> getParameters() {
final List<Object[]> compressionParams = ImmutableList.of(new Object[] { ".dat", new Function<File, OutputStream>() {
@Nullable
@Override
public OutputStream apply(@Nullable File outFile) {
try {
return new FileOutputStream(outFile);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
} }, new Object[] { ".gz", new Function<File, OutputStream>() {
@Nullable
@Override
public OutputStream apply(@Nullable File outFile) {
try {
final FileOutputStream fos = new FileOutputStream(outFile);
return new GZIPOutputStream(fos) {
@Override
public void close() throws IOException {
try {
super.close();
} finally {
fos.close();
}
}
};
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
} });
final List<Function<Lifecycle, NamespaceExtractionCacheManager>> cacheManagerCreators = ImmutableList.of(new Function<Lifecycle, NamespaceExtractionCacheManager>() {
@Override
public NamespaceExtractionCacheManager apply(Lifecycle lifecycle) {
return new OnHeapNamespaceExtractionCacheManager(lifecycle, new NoopServiceEmitter(), new NamespaceExtractionConfig());
}
}, new Function<Lifecycle, NamespaceExtractionCacheManager>() {
@Override
public NamespaceExtractionCacheManager apply(Lifecycle lifecycle) {
return new OffHeapNamespaceExtractionCacheManager(lifecycle, new NoopServiceEmitter(), new NamespaceExtractionConfig());
}
});
return new Iterable<Object[]>() {
@Override
public Iterator<Object[]> iterator() {
return new Iterator<Object[]>() {
Iterator<Object[]> compressionIt = compressionParams.iterator();
Iterator<Function<Lifecycle, NamespaceExtractionCacheManager>> cacheManagerCreatorsIt = cacheManagerCreators.iterator();
Object[] compressions = compressionIt.next();
@Override
public boolean hasNext() {
return compressionIt.hasNext() || cacheManagerCreatorsIt.hasNext();
}
@Override
public Object[] next() {
if (cacheManagerCreatorsIt.hasNext()) {
Function<Lifecycle, NamespaceExtractionCacheManager> cacheManagerCreator = cacheManagerCreatorsIt.next();
return new Object[] { compressions[0], compressions[1], cacheManagerCreator };
} else {
cacheManagerCreatorsIt = cacheManagerCreators.iterator();
compressions = compressionIt.next();
return next();
}
}
@Override
public void remove() {
throw new UOE("Cannot remove");
}
};
}
};
}
use of org.apache.druid.java.util.common.lifecycle.Lifecycle in project druid by druid-io.
the class StaticMapCacheGeneratorTest method setup.
@Before
public void setup() throws Exception {
lifecycle = new Lifecycle();
lifecycle.start();
NoopServiceEmitter noopServiceEmitter = new NoopServiceEmitter();
scheduler = new CacheScheduler(noopServiceEmitter, Collections.emptyMap(), new OnHeapNamespaceExtractionCacheManager(lifecycle, noopServiceEmitter, new NamespaceExtractionConfig()));
}
use of org.apache.druid.java.util.common.lifecycle.Lifecycle in project druid by druid-io.
the class CacheSchedulerTest method setUp.
@Before
public void setUp() throws Exception {
lifecycle = new Lifecycle();
lifecycle.start();
cacheManager = createCacheManager.apply(lifecycle);
final Path tmpDir = temporaryFolder.newFolder().toPath();
final CacheGenerator<UriExtractionNamespace> cacheGenerator = new CacheGenerator<UriExtractionNamespace>() {
@Override
public CacheScheduler.VersionedCache generateCache(final UriExtractionNamespace extractionNamespace, final CacheScheduler.EntryImpl<UriExtractionNamespace> id, final String lastVersion, final CacheScheduler scheduler) throws InterruptedException {
// To make absolutely sure there is a unique currentTimeMillis
Thread.sleep(2);
String version = Long.toString(System.currentTimeMillis());
CacheScheduler.VersionedCache versionedCache = scheduler.createVersionedCache(id, version);
// Don't actually read off disk because TravisCI doesn't like that
versionedCache.getCache().put(KEY, VALUE);
return versionedCache;
}
};
scheduler = new CacheScheduler(new NoopServiceEmitter(), ImmutableMap.of(UriExtractionNamespace.class, cacheGenerator), cacheManager);
tmpFile = Files.createTempFile(tmpDir, "druidTestURIExtractionNS", ".dat").toFile();
try (OutputStream ostream = new FileOutputStream(tmpFile)) {
try (OutputStreamWriter out = new OutputStreamWriter(ostream, StandardCharsets.UTF_8)) {
// Since Travis sucks with disk related stuff, we override the disk reading part above.
// This is safe and should shake out any problem areas that accidentally read the file.
out.write("SHOULDN'T TRY TO PARSE");
out.flush();
}
}
}
Aggregations