use of org.apache.druid.jackson.SegmentizerModule in project druid by druid-io.
the class BroadcastSegmentIndexedTableTest method setup.
@Before
public void setup() throws IOException, SegmentLoadingException {
final ObjectMapper mapper = new DefaultObjectMapper();
mapper.registerModule(new SegmentizerModule());
final IndexIO indexIO = new IndexIO(mapper, () -> 0);
mapper.setInjectableValues(new InjectableValues.Std().addValue(ExprMacroTable.class.getName(), TestExprMacroTable.INSTANCE).addValue(ObjectMapper.class.getName(), mapper).addValue(IndexIO.class, indexIO).addValue(DataSegment.PruneSpecsHolder.class, DataSegment.PruneSpecsHolder.DEFAULT));
final IndexMerger indexMerger = new IndexMergerV9(mapper, indexIO, OffHeapMemorySegmentWriteOutMediumFactory.instance());
Interval testInterval = Intervals.of("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z");
IncrementalIndex data = TestIndex.makeRealtimeIndex("druid.sample.numeric.tsv");
File segment = new File(temporaryFolder.newFolder(), "segment");
File persisted = indexMerger.persist(data, testInterval, segment, new IndexSpec(), null);
File factoryJson = new File(persisted, "factory.json");
Assert.assertTrue(factoryJson.exists());
SegmentizerFactory factory = mapper.readValue(factoryJson, SegmentizerFactory.class);
Assert.assertTrue(factory instanceof MMappedQueryableSegmentizerFactory);
DataSegment dataSegment = new DataSegment(DATASOURCE, testInterval, DateTimes.nowUtc().toString(), ImmutableMap.of(), columnNames, ImmutableList.of(), null, null, segment.getTotalSpace());
backingSegment = (QueryableIndexSegment) factory.factorize(dataSegment, segment, false, SegmentLazyLoadFailCallback.NOOP);
columnNames = ImmutableList.<String>builder().add(ColumnHolder.TIME_COLUMN_NAME).addAll(backingSegment.asQueryableIndex().getColumnNames()).build();
broadcastTable = new BroadcastSegmentIndexedTable(backingSegment, keyColumns, dataSegment.getVersion());
}
use of org.apache.druid.jackson.SegmentizerModule in project druid by druid-io.
the class BroadcastJoinableMMappedQueryableSegmentizerFactoryTest method testSegmentizer.
@Test
public void testSegmentizer() throws IOException, SegmentLoadingException {
final ObjectMapper mapper = new DefaultObjectMapper();
mapper.registerModule(new SegmentizerModule());
final IndexIO indexIO = new IndexIO(mapper, () -> 0);
mapper.setInjectableValues(new InjectableValues.Std().addValue(ExprMacroTable.class.getName(), TestExprMacroTable.INSTANCE).addValue(ObjectMapper.class.getName(), mapper).addValue(IndexIO.class, indexIO).addValue(DataSegment.PruneSpecsHolder.class, DataSegment.PruneSpecsHolder.DEFAULT));
IndexMerger indexMerger = new IndexMergerV9(mapper, indexIO, OffHeapMemorySegmentWriteOutMediumFactory.instance());
SegmentizerFactory expectedFactory = new BroadcastJoinableMMappedQueryableSegmentizerFactory(indexIO, KEY_COLUMNS);
Interval testInterval = Intervals.of("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z");
IncrementalIndex data = TestIndex.makeRealtimeIndex("druid.sample.numeric.tsv");
List<String> columnNames = data.getColumnNames();
File segment = new File(temporaryFolder.newFolder(), "segment");
File persistedSegmentRoot = indexMerger.persist(data, testInterval, segment, new IndexSpec(null, null, null, null, expectedFactory), null);
File factoryJson = new File(persistedSegmentRoot, "factory.json");
Assert.assertTrue(factoryJson.exists());
SegmentizerFactory factory = mapper.readValue(factoryJson, SegmentizerFactory.class);
Assert.assertTrue(factory instanceof BroadcastJoinableMMappedQueryableSegmentizerFactory);
Assert.assertEquals(expectedFactory, factory);
// load a segment
final DataSegment dataSegment = new DataSegment(TABLE_NAME, testInterval, DateTimes.nowUtc().toString(), ImmutableMap.of(), columnNames, ImmutableList.of(), null, null, persistedSegmentRoot.getTotalSpace());
final Segment loaded = factory.factorize(dataSegment, persistedSegmentRoot, false, SegmentLazyLoadFailCallback.NOOP);
final BroadcastSegmentIndexedTable table = (BroadcastSegmentIndexedTable) loaded.as(IndexedTable.class);
Assert.assertNotNull(table);
}
use of org.apache.druid.jackson.SegmentizerModule in project druid by druid-io.
the class SegmentManagerBroadcastJoinIndexedTableTest method setup.
@Before
public void setup() throws IOException {
segmentPuller = new LocalDataSegmentPuller();
objectMapper = new DefaultObjectMapper().registerModule(new SegmentizerModule()).registerModule(new SimpleModule().registerSubtypes(new NamedType(LocalLoadSpec.class, "local")));
indexIO = new IndexIO(objectMapper, () -> 0);
objectMapper.setInjectableValues(new InjectableValues.Std().addValue(LocalDataSegmentPuller.class, segmentPuller).addValue(ExprMacroTable.class.getName(), TestExprMacroTable.INSTANCE).addValue(ObjectMapper.class.getName(), objectMapper).addValue(IndexIO.class, indexIO));
segmentCacheDir = temporaryFolder.newFolder();
segmentDeepStorageDir = temporaryFolder.newFolder();
segmentCacheManager = new SegmentLocalCacheManager(new SegmentLoaderConfig() {
@Override
public List<StorageLocationConfig> getLocations() {
return Collections.singletonList(new StorageLocationConfig(segmentCacheDir, null, null));
}
}, objectMapper);
segmentManager = new SegmentManager(new SegmentLocalCacheLoader(segmentCacheManager, indexIO, objectMapper));
joinableFactory = new BroadcastTableJoinableFactory(segmentManager);
EmittingLogger.registerEmitter(new NoopServiceEmitter());
}
use of org.apache.druid.jackson.SegmentizerModule in project druid by druid-io.
the class SegmentizerFactoryTest method testFactory.
@Test
public void testFactory() throws IOException {
File factoryFile = Files.createTempFile("", "factory.json").toFile();
FileOutputStream fos = new FileOutputStream(factoryFile);
ObjectMapper mapper = new DefaultObjectMapper();
mapper.registerModule(new SegmentizerModule());
IndexIO indexIO = new IndexIO(mapper, new ColumnConfig() {
@Override
public int columnCacheSizeBytes() {
return 777;
}
});
mapper.setInjectableValues(new InjectableValues.Std().addValue(IndexIO.class, indexIO));
mapper.writeValue(fos, new MMappedQueryableSegmentizerFactory(indexIO));
fos.close();
SegmentizerFactory factory = mapper.readValue(factoryFile, SegmentizerFactory.class);
Assert.assertTrue(factory instanceof MMappedQueryableSegmentizerFactory);
}
use of org.apache.druid.jackson.SegmentizerModule in project druid by druid-io.
the class CustomSegmentizerFactoryTest method setup.
@BeforeClass
public static void setup() {
final ObjectMapper mapper = new DefaultObjectMapper();
mapper.registerModule(new SegmentizerModule());
mapper.registerSubtypes(new NamedType(CustomSegmentizerFactory.class, "customSegmentFactory"));
final IndexIO indexIO = new IndexIO(mapper, () -> 0);
mapper.setInjectableValues(new InjectableValues.Std().addValue(ExprMacroTable.class.getName(), TestExprMacroTable.INSTANCE).addValue(ObjectMapper.class.getName(), mapper).addValue(IndexIO.class, indexIO).addValue(DataSegment.PruneSpecsHolder.class, DataSegment.PruneSpecsHolder.DEFAULT));
JSON_MAPPER = mapper;
INDEX_IO = indexIO;
INDEX_MERGER = new IndexMergerV9(mapper, indexIO, OffHeapMemorySegmentWriteOutMediumFactory.instance());
}
Aggregations