use of io.druid.timeline.partition.SingleDimensionShardSpec in project druid by druid-io.
the class CachingClusteredClientTest method makeMockSingleDimensionSelector.
private ServerSelector makeMockSingleDimensionSelector(DruidServer server, String dimension, String start, String end, int partitionNum) {
DataSegment segment = EasyMock.createNiceMock(DataSegment.class);
EasyMock.expect(segment.getIdentifier()).andReturn(DATA_SOURCE).anyTimes();
EasyMock.expect(segment.getShardSpec()).andReturn(new SingleDimensionShardSpec(dimension, start, end, partitionNum)).anyTimes();
EasyMock.replay(segment);
ServerSelector selector = new ServerSelector(segment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy()));
selector.addServerAndUpdateSegment(new QueryableDruidServer(server, null), segment);
return selector;
}
use of io.druid.timeline.partition.SingleDimensionShardSpec in project druid by druid-io.
the class CachingClusteredClientTest method populateTimeline.
private List<Map<DruidServer, ServerExpectations>> populateTimeline(List<Interval> queryIntervals, List<List<Iterable<Result<Object>>>> expectedResults, int numQueryIntervals, List<Object> mocks) {
timeline = new VersionedIntervalTimeline<>(Ordering.natural());
final List<Map<DruidServer, ServerExpectations>> serverExpectationList = Lists.newArrayList();
for (int k = 0; k < numQueryIntervals + 1; ++k) {
final int numChunks = expectedResults.get(k).size();
final TreeMap<DruidServer, ServerExpectations> serverExpectations = Maps.newTreeMap();
serverExpectationList.add(serverExpectations);
for (int j = 0; j < numChunks; ++j) {
DruidServer lastServer = servers[random.nextInt(servers.length)];
if (!serverExpectations.containsKey(lastServer)) {
serverExpectations.put(lastServer, new ServerExpectations(lastServer, makeMock(mocks, QueryRunner.class)));
}
DataSegment mockSegment = makeMock(mocks, DataSegment.class);
ServerExpectation expectation = new ServerExpectation(// interval/chunk
String.format("%s_%s", k, j), queryIntervals.get(k), mockSegment, expectedResults.get(k).get(j));
serverExpectations.get(lastServer).addExpectation(expectation);
ServerSelector selector = new ServerSelector(expectation.getSegment(), new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy()));
selector.addServerAndUpdateSegment(new QueryableDruidServer(lastServer, null), selector.getSegment());
final ShardSpec shardSpec;
if (numChunks == 1) {
shardSpec = new SingleDimensionShardSpec("dimAll", null, null, 0);
} else {
String start = null;
String end = null;
if (j > 0) {
start = String.valueOf(j);
}
if (j + 1 < numChunks) {
end = String.valueOf(j + 1);
}
shardSpec = new SingleDimensionShardSpec("dim" + k, start, end, j);
}
EasyMock.expect(mockSegment.getShardSpec()).andReturn(shardSpec).anyTimes();
timeline.add(queryIntervals.get(k), String.valueOf(k), shardSpec.createChunk(selector));
}
}
return serverExpectationList;
}
use of io.druid.timeline.partition.SingleDimensionShardSpec in project druid by druid-io.
the class DataSegmentTest method testIdentifierWithZeroPartition.
@Test
public void testIdentifierWithZeroPartition() {
final DataSegment segment = DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/2012-01-02")).version(new DateTime("2012-01-01T11:22:33.444Z").toString()).shardSpec(new SingleDimensionShardSpec("bar", null, "abc", 0)).build();
Assert.assertEquals("foo_2012-01-01T00:00:00.000Z_2012-01-02T00:00:00.000Z_2012-01-01T11:22:33.444Z", segment.getIdentifier());
}
use of io.druid.timeline.partition.SingleDimensionShardSpec in project druid by druid-io.
the class SingleDimensionShardSpecTest method testIsInChunk.
@Test
public void testIsInChunk() throws Exception {
Map<SingleDimensionShardSpec, List<Pair<Boolean, Map<String, String>>>> tests = ImmutableMap.<SingleDimensionShardSpec, List<Pair<Boolean, Map<String, String>>>>builder().put(makeSpec(null, null), makeList(true, null, true, "a", true, "h", true, "p", true, "y")).put(makeSpec(null, "m"), makeList(true, null, true, "a", true, "h", false, "p", false, "y")).put(makeSpec("a", "h"), makeList(false, null, true, "a", false, "h", false, "p", false, "y")).put(makeSpec("d", "u"), makeList(false, null, false, "a", true, "h", true, "p", false, "y")).put(makeSpec("h", null), makeList(false, null, false, "a", true, "h", true, "p", true, "y")).build();
for (Map.Entry<SingleDimensionShardSpec, List<Pair<Boolean, Map<String, String>>>> entry : tests.entrySet()) {
SingleDimensionShardSpec spec = entry.getKey();
for (Pair<Boolean, Map<String, String>> pair : entry.getValue()) {
final InputRow inputRow = new MapBasedInputRow(0, ImmutableList.of("billy"), Maps.transformValues(pair.rhs, new Function<String, Object>() {
@Override
public Object apply(String input) {
return input;
}
}));
Assert.assertEquals(String.format("spec[%s], row[%s]", spec, inputRow), pair.lhs, spec.isInChunk(inputRow.getTimestampFromEpoch(), inputRow));
}
}
}
use of io.druid.timeline.partition.SingleDimensionShardSpec in project druid by druid-io.
the class DeterminePartitionsJobTest method testPartitionJob.
@Test
public void testPartitionJob() {
DeterminePartitionsJob job = new DeterminePartitionsJob(config);
job.run();
int shardNum = 0;
int segmentNum = 0;
Assert.assertEquals(expectedNumOfSegments, config.getSchema().getTuningConfig().getShardSpecs().size());
for (Map.Entry<Long, List<HadoopyShardSpec>> entry : config.getSchema().getTuningConfig().getShardSpecs().entrySet()) {
int partitionNum = 0;
List<HadoopyShardSpec> specs = entry.getValue();
Assert.assertEquals(expectedNumOfShardsForEachSegment[segmentNum], specs.size());
for (HadoopyShardSpec spec : specs) {
SingleDimensionShardSpec actualSpec = (SingleDimensionShardSpec) spec.getActualSpec();
Assert.assertEquals(shardNum, spec.getShardNum());
Assert.assertEquals(expectedStartEndForEachShard[segmentNum][partitionNum][0], actualSpec.getStart());
Assert.assertEquals(expectedStartEndForEachShard[segmentNum][partitionNum][1], actualSpec.getEnd());
Assert.assertEquals(partitionNum, actualSpec.getPartitionNum());
shardNum++;
partitionNum++;
}
segmentNum++;
}
}
Aggregations