use of org.apache.druid.query.DruidProcessingConfig in project druid by druid-io.
the class BrokerProcessingModuleTest method testMemoryCheckThrowsException.
@Test(expected = ProvisionException.class)
public void testMemoryCheckThrowsException() {
// so this test only validates functionality for Java 8.
try {
JvmUtils.getRuntimeInfo().getDirectMemorySizeBytes();
} catch (UnsupportedOperationException e) {
Assume.assumeNoException(e);
}
BrokerProcessingModule module = new BrokerProcessingModule();
module.getMergeBufferPool(new DruidProcessingConfig() {
@Override
public String getFormatString() {
return "test";
}
@Override
public int intermediateComputeSizeBytes() {
return Integer.MAX_VALUE;
}
});
}
use of org.apache.druid.query.DruidProcessingConfig in project druid by druid-io.
the class DruidProcessingModuleTest method testMemoryCheckThrowsException.
@Test(expected = ProvisionException.class)
public void testMemoryCheckThrowsException() {
// so this test only validates functionality for Java 8.
try {
JvmUtils.getRuntimeInfo().getDirectMemorySizeBytes();
} catch (UnsupportedOperationException e) {
Assume.assumeNoException(e);
}
DruidProcessingModule module = new DruidProcessingModule();
module.getIntermediateResultsPool(new DruidProcessingConfig() {
@Override
public String getFormatString() {
return "test";
}
@Override
public int intermediateComputeSizeBytes() {
return Integer.MAX_VALUE;
}
});
}
use of org.apache.druid.query.DruidProcessingConfig in project druid by druid-io.
the class DruidProcessingModuleTest method testMemoryCheckIsChillByDefaultIfNothingSet.
@Test
public void testMemoryCheckIsChillByDefaultIfNothingSet() {
DruidProcessingConfig config = new DruidProcessingConfig() {
@Override
public String getFormatString() {
return "processing-test-%s";
}
};
DruidProcessingModule module = new DruidProcessingModule();
config.getNumInitalBuffersForIntermediatePool();
module.getIntermediateResultsPool(config);
}
use of org.apache.druid.query.DruidProcessingConfig in project druid by druid-io.
the class DefaultOfflineAppenderatorFactoryTest method testBuild.
@Test
public void testBuild() throws IOException, SegmentNotWritableException {
Injector injector = Initialization.makeInjectorWithModules(GuiceInjectors.makeStartupInjector(), ImmutableList.<Module>of(new Module() {
@Override
public void configure(Binder binder) {
binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/tool");
binder.bindConstant().annotatedWith(Names.named("servicePort")).to(9999);
binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1);
binder.bind(DruidProcessingConfig.class).toInstance(new DruidProcessingConfig() {
@Override
public String getFormatString() {
return "processing-%s";
}
@Override
public int intermediateComputeSizeBytes() {
return 100 * 1024 * 1024;
}
@Override
public int getNumThreads() {
return 1;
}
@Override
public int columnCacheSizeBytes() {
return 25 * 1024 * 1024;
}
});
binder.bind(ColumnConfig.class).to(DruidProcessingConfig.class);
}
}));
ObjectMapper objectMapper = injector.getInstance(ObjectMapper.class);
AppenderatorFactory defaultOfflineAppenderatorFactory = objectMapper.readerFor(AppenderatorFactory.class).readValue("{\"type\":\"offline\"}");
final Map<String, Object> parserMap = objectMapper.convertValue(new MapInputRowParser(new JSONParseSpec(new TimestampSpec("ts", "auto", null), DimensionsSpec.EMPTY, null, null, null)), Map.class);
DataSchema schema = new DataSchema("dataSourceName", parserMap, new AggregatorFactory[] { new CountAggregatorFactory("count"), new LongSumAggregatorFactory("met", "met") }, new UniformGranularitySpec(Granularities.MINUTE, Granularities.NONE, null), null, objectMapper);
RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig(null, 75000, null, null, null, null, temporaryFolder.newFolder(), null, null, null, null, null, null, 0, 0, null, null, null, null, null);
Appenderator appenderator = defaultOfflineAppenderatorFactory.build(schema, tuningConfig, new FireDepartmentMetrics());
try {
Assert.assertEquals("dataSourceName", appenderator.getDataSource());
Assert.assertEquals(null, appenderator.startJob());
SegmentIdWithShardSpec identifier = new SegmentIdWithShardSpec("dataSourceName", Intervals.of("2000/2001"), "A", new LinearShardSpec(0));
Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
appenderator.add(identifier, StreamAppenderatorTest.ir("2000", "bar", 1), null);
Assert.assertEquals(1, ((AppenderatorImpl) appenderator).getRowsInMemory());
appenderator.add(identifier, StreamAppenderatorTest.ir("2000", "baz", 1), null);
Assert.assertEquals(2, ((AppenderatorImpl) appenderator).getRowsInMemory());
appenderator.close();
Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
} finally {
appenderator.close();
}
}
Aggregations