use of com.fasterxml.jackson.dataformat.smile.SmileFactory in project druid by druid-io.
the class GroupByMultiSegmentTest method setupGroupByFactory.
private void setupGroupByFactory() {
executorService = Execs.multiThreaded(2, "GroupByThreadPool[%d]");
final CloseableStupidPool<ByteBuffer> bufferPool = new CloseableStupidPool<>("GroupByBenchmark-computeBufferPool", new OffheapBufferGenerator("compute", 10_000_000), 0, Integer.MAX_VALUE);
// limit of 2 is required since we simulate both historical merge and broker merge in the same process
final CloseableDefaultBlockingPool<ByteBuffer> mergePool = new CloseableDefaultBlockingPool<>(new OffheapBufferGenerator("merge", 10_000_000), 2);
resourceCloser.register(bufferPool);
resourceCloser.register(mergePool);
final GroupByQueryConfig config = new GroupByQueryConfig() {
@Override
public String getDefaultStrategy() {
return "v2";
}
@Override
public int getBufferGrouperInitialBuckets() {
return -1;
}
@Override
public long getMaxOnDiskStorage() {
return 1_000_000_000L;
}
};
config.setSingleThreaded(false);
config.setMaxIntermediateRows(Integer.MAX_VALUE);
config.setMaxResults(Integer.MAX_VALUE);
DruidProcessingConfig druidProcessingConfig = new DruidProcessingConfig() {
@Override
public int getNumThreads() {
// Used by "v2" strategy for concurrencyHint
return 2;
}
@Override
public String getFormatString() {
return null;
}
};
final Supplier<GroupByQueryConfig> configSupplier = Suppliers.ofInstance(config);
final GroupByStrategySelector strategySelector = new GroupByStrategySelector(configSupplier, new GroupByStrategyV1(configSupplier, new GroupByQueryEngine(configSupplier, bufferPool), NOOP_QUERYWATCHER), new GroupByStrategyV2(druidProcessingConfig, configSupplier, bufferPool, mergePool, new ObjectMapper(new SmileFactory()), NOOP_QUERYWATCHER));
groupByFactory = new GroupByQueryRunnerFactory(strategySelector, new GroupByQueryQueryToolChest(strategySelector));
}
use of com.fasterxml.jackson.dataformat.smile.SmileFactory in project druid by druid-io.
the class NestedQueryPushDownTest method setupGroupByFactory.
private void setupGroupByFactory() {
executorService = Execs.multiThreaded(3, "GroupByThreadPool[%d]");
NonBlockingPool<ByteBuffer> bufferPool = new StupidPool<>("GroupByBenchmark-computeBufferPool", new OffheapBufferGenerator("compute", 10_000_000), 0, Integer.MAX_VALUE);
// limit of 3 is required since we simulate running historical running nested query and broker doing the final merge
BlockingPool<ByteBuffer> mergePool = new DefaultBlockingPool<>(new OffheapBufferGenerator("merge", 10_000_000), 10);
// limit of 3 is required since we simulate running historical running nested query and broker doing the final merge
BlockingPool<ByteBuffer> mergePool2 = new DefaultBlockingPool<>(new OffheapBufferGenerator("merge", 10_000_000), 10);
final GroupByQueryConfig config = new GroupByQueryConfig() {
@Override
public String getDefaultStrategy() {
return "v2";
}
@Override
public int getBufferGrouperInitialBuckets() {
return -1;
}
@Override
public long getMaxOnDiskStorage() {
return 1_000_000_000L;
}
};
config.setSingleThreaded(false);
config.setMaxIntermediateRows(Integer.MAX_VALUE);
config.setMaxResults(Integer.MAX_VALUE);
DruidProcessingConfig druidProcessingConfig = new DruidProcessingConfig() {
@Override
public int getNumThreads() {
// Used by "v2" strategy for concurrencyHint
return 2;
}
@Override
public String getFormatString() {
return null;
}
};
final Supplier<GroupByQueryConfig> configSupplier = Suppliers.ofInstance(config);
final GroupByStrategySelector strategySelector = new GroupByStrategySelector(configSupplier, new GroupByStrategyV1(configSupplier, new GroupByQueryEngine(configSupplier, bufferPool), NOOP_QUERYWATCHER), new GroupByStrategyV2(druidProcessingConfig, configSupplier, bufferPool, mergePool, new ObjectMapper(new SmileFactory()), NOOP_QUERYWATCHER));
final GroupByStrategySelector strategySelector2 = new GroupByStrategySelector(configSupplier, new GroupByStrategyV1(configSupplier, new GroupByQueryEngine(configSupplier, bufferPool), NOOP_QUERYWATCHER), new GroupByStrategyV2(druidProcessingConfig, configSupplier, bufferPool, mergePool2, new ObjectMapper(new SmileFactory()), NOOP_QUERYWATCHER));
groupByFactory = new GroupByQueryRunnerFactory(strategySelector, new GroupByQueryQueryToolChest(strategySelector));
groupByFactory2 = new GroupByQueryRunnerFactory(strategySelector2, new GroupByQueryQueryToolChest(strategySelector2));
}
use of com.fasterxml.jackson.dataformat.smile.SmileFactory in project druid by druid-io.
the class JacksonModule method smileMapper.
@Provides
@LazySingleton
@Smile
public ObjectMapper smileMapper() {
final SmileFactory smileFactory = new SmileFactory();
smileFactory.configure(SmileGenerator.Feature.ENCODE_BINARY_AS_7BIT, false);
smileFactory.delegateToTextual(true);
final ObjectMapper retVal = new DefaultObjectMapper(smileFactory);
retVal.getFactory().setCodec(retVal);
return retVal;
}
use of com.fasterxml.jackson.dataformat.smile.SmileFactory in project immutables by immutables.
the class GsonJacksonBridgeSerializationTest method smileFactoryTest.
@Test
public void smileFactoryTest() throws IOException {
TestObject value = createTestObject();
SmileFactory factory = new SmileFactory();
Class<TestObject> clazz = TestObject.class;
ByteArrayOutputStream outputStream = testWriting(value, factory, clazz);
TestObject value2 = testReading(factory, clazz, outputStream);
Assert.assertEquals(value2.toString(), value.toString());
}
use of com.fasterxml.jackson.dataformat.smile.SmileFactory in project druid by druid-io.
the class JsonIteratorTest method testSerde.
@Test
public void testSerde() throws IOException {
final ObjectMapper mapper = new ObjectMapper(new SmileFactory());
List<Map<String, Object>> expectedList = ImmutableList.of(ImmutableMap.of("key1", "value1", "key2", 2));
File testFile = File.createTempFile("testfile", "");
TypeReference<Map<String, Object>> type = new TypeReference<Map<String, Object>>() {
};
try (FileOutputStream fos = new FileOutputStream(testFile)) {
final JsonGenerator jg = mapper.getFactory().createGenerator(fos);
jg.writeStartArray();
for (Map<String, Object> mapFromList : expectedList) {
jg.writeObject(mapFromList);
}
jg.writeEndArray();
jg.close();
}
JsonIterator<Map<String, Object>> testJsonIterator = new JsonIterator<>(type, new FileInputStream(testFile), () -> {
}, mapper);
List<Map<String, Object>> actualList = new ArrayList<>();
while (testJsonIterator.hasNext()) {
actualList.add(testJsonIterator.next());
}
testJsonIterator.close();
Assert.assertEquals(expectedList, actualList);
}
Aggregations