use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableMap in project druid by druid-io.
the class RealtimeManagerTest method setUp.
@Before
public void setUp() throws Exception {
final List<TestInputRowHolder> rows = Arrays.asList(makeRow(new DateTime("9000-01-01").getMillis()), makeRow(new ParseException("parse error")), null, makeRow(new DateTime().getMillis()));
ObjectMapper jsonMapper = new DefaultObjectMapper();
schema = new DataSchema("test", null, new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
schema2 = new DataSchema("testV2", null, new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
RealtimeIOConfig ioConfig = new RealtimeIOConfig(new FirehoseFactory() {
@Override
public Firehose connect(InputRowParser parser) throws IOException {
return new TestFirehose(rows.iterator());
}
}, new PlumberSchool() {
@Override
public Plumber findPlumber(DataSchema schema, RealtimeTuningConfig config, FireDepartmentMetrics metrics) {
return plumber;
}
}, null);
RealtimeIOConfig ioConfig2 = new RealtimeIOConfig(null, new PlumberSchool() {
@Override
public Plumber findPlumber(DataSchema schema, RealtimeTuningConfig config, FireDepartmentMetrics metrics) {
return plumber2;
}
}, new FirehoseFactoryV2() {
@Override
public FirehoseV2 connect(InputRowParser parser, Object arg1) throws IOException, ParseException {
return new TestFirehoseV2(rows.iterator());
}
});
RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig(1, new Period("P1Y"), null, null, null, null, null, null, null, null, 0, 0, null, null);
plumber = new TestPlumber(new Sink(new Interval("0/P5000Y"), schema, tuningConfig.getShardSpec(), new DateTime().toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions()));
realtimeManager = new RealtimeManager(Arrays.<FireDepartment>asList(new FireDepartment(schema, ioConfig, tuningConfig)), null);
plumber2 = new TestPlumber(new Sink(new Interval("0/P5000Y"), schema2, tuningConfig.getShardSpec(), new DateTime().toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions()));
realtimeManager2 = new RealtimeManager(Arrays.<FireDepartment>asList(new FireDepartment(schema2, ioConfig2, tuningConfig)), null);
tuningConfig_0 = new RealtimeTuningConfig(1, new Period("P1Y"), null, null, null, null, null, new LinearShardSpec(0), null, null, 0, 0, null, null);
tuningConfig_1 = new RealtimeTuningConfig(1, new Period("P1Y"), null, null, null, null, null, new LinearShardSpec(1), null, null, 0, 0, null, null);
schema3 = new DataSchema("testing", null, new AggregatorFactory[] { new CountAggregatorFactory("ignore") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
FireDepartment department_0 = new FireDepartment(schema3, ioConfig, tuningConfig_0);
FireDepartment department_1 = new FireDepartment(schema3, ioConfig2, tuningConfig_1);
QueryRunnerFactoryConglomerate conglomerate = new QueryRunnerFactoryConglomerate() {
@Override
public <T, QueryType extends Query<T>> QueryRunnerFactory<T, QueryType> findFactory(QueryType query) {
return factory;
}
};
chiefStartedLatch = new CountDownLatch(2);
RealtimeManager.FireChief fireChief_0 = new RealtimeManager.FireChief(department_0, conglomerate) {
@Override
public void run() {
super.initPlumber();
chiefStartedLatch.countDown();
}
};
RealtimeManager.FireChief fireChief_1 = new RealtimeManager.FireChief(department_1, conglomerate) {
@Override
public void run() {
super.initPlumber();
chiefStartedLatch.countDown();
}
};
realtimeManager3 = new RealtimeManager(Arrays.asList(department_0, department_1), conglomerate, ImmutableMap.<String, Map<Integer, RealtimeManager.FireChief>>of("testing", ImmutableMap.of(0, fireChief_0, 1, fireChief_1)));
startFireChiefWithPartitionNum(fireChief_0, 0);
startFireChiefWithPartitionNum(fireChief_1, 1);
}
use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableMap in project druid by druid-io.
the class LookupCoordinatorResourceTest method testMissingGetTier.
@Test
public void testMissingGetTier() {
final String tier = "some tier";
final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock(LookupCoordinatorManager.class);
final Map<String, Map<String, Map<String, Object>>> retVal = ImmutableMap.<String, Map<String, Map<String, Object>>>of();
EasyMock.expect(lookupCoordinatorManager.getKnownLookups()).andReturn(retVal).once();
EasyMock.replay(lookupCoordinatorManager);
final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource(lookupCoordinatorManager, mapper, mapper);
final Response response = lookupCoordinatorResource.getSpecificTier(tier);
Assert.assertEquals(404, response.getStatus());
EasyMock.verify(lookupCoordinatorManager);
}
use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableMap in project druid by druid-io.
the class SingleDimensionShardSpecTest method testIsInChunk.
@Test
public void testIsInChunk() throws Exception {
Map<SingleDimensionShardSpec, List<Pair<Boolean, Map<String, String>>>> tests = ImmutableMap.<SingleDimensionShardSpec, List<Pair<Boolean, Map<String, String>>>>builder().put(makeSpec(null, null), makeList(true, null, true, "a", true, "h", true, "p", true, "y")).put(makeSpec(null, "m"), makeList(true, null, true, "a", true, "h", false, "p", false, "y")).put(makeSpec("a", "h"), makeList(false, null, true, "a", false, "h", false, "p", false, "y")).put(makeSpec("d", "u"), makeList(false, null, false, "a", true, "h", true, "p", false, "y")).put(makeSpec("h", null), makeList(false, null, false, "a", true, "h", true, "p", true, "y")).build();
for (Map.Entry<SingleDimensionShardSpec, List<Pair<Boolean, Map<String, String>>>> entry : tests.entrySet()) {
SingleDimensionShardSpec spec = entry.getKey();
for (Pair<Boolean, Map<String, String>> pair : entry.getValue()) {
final InputRow inputRow = new MapBasedInputRow(0, ImmutableList.of("billy"), Maps.transformValues(pair.rhs, new Function<String, Object>() {
@Override
public Object apply(String input) {
return input;
}
}));
Assert.assertEquals(String.format("spec[%s], row[%s]", spec, inputRow), pair.lhs, spec.isInChunk(inputRow.getTimestampFromEpoch(), inputRow));
}
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableMap in project druid by druid-io.
the class LookupCoordinatorManagerTest method testDeleteLookupIgnoresMissing.
@Test
public void testDeleteLookupIgnoresMissing() throws Exception {
final Map<String, Object> ignore = ImmutableMap.<String, Object>of("lookup", "ignore");
final LookupCoordinatorManager manager = new LookupCoordinatorManager(client, discoverer, mapper, configManager, lookupCoordinatorManagerConfig) {
@Override
public Map<String, Map<String, Map<String, Object>>> getKnownLookups() {
return ImmutableMap.<String, Map<String, Map<String, Object>>>of(LOOKUP_TIER, ImmutableMap.of("ignore", ignore));
}
};
final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost");
Assert.assertFalse(manager.deleteLookup(LOOKUP_TIER, "foo", auditInfo));
}
use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableMap in project druid by druid-io.
the class LookupCoordinatorManagerTest method testUpdateLookupsAddsNewTier.
@Test
public void testUpdateLookupsAddsNewTier() throws Exception {
final Map<String, Object> ignore = ImmutableMap.<String, Object>of("prop", "old");
final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost");
final LookupCoordinatorManager manager = new LookupCoordinatorManager(client, discoverer, mapper, configManager, lookupCoordinatorManagerConfig) {
@Override
public Map<String, Map<String, Map<String, Object>>> getKnownLookups() {
return ImmutableMap.<String, Map<String, Map<String, Object>>>of(LOOKUP_TIER + "2", ImmutableMap.of("ignore", ignore));
}
};
final Map<String, Object> newSpec = ImmutableMap.<String, Object>of("prop", "new");
EasyMock.reset(configManager);
EasyMock.expect(configManager.set(EasyMock.eq(LookupCoordinatorManager.LOOKUP_CONFIG_KEY), EasyMock.eq(ImmutableMap.<String, Map<String, Map<String, Object>>>of(LOOKUP_TIER + "1", ImmutableMap.of("foo", newSpec), LOOKUP_TIER + "2", ImmutableMap.of("ignore", ignore))), EasyMock.eq(auditInfo))).andReturn(true).once();
EasyMock.replay(configManager);
Assert.assertTrue(manager.updateLookups(ImmutableMap.<String, Map<String, Map<String, Object>>>of(LOOKUP_TIER + "1", ImmutableMap.<String, Map<String, Object>>of("foo", newSpec)), auditInfo));
EasyMock.verify(configManager);
}
Aggregations