Search in sources :

Example 96 with ImmutableMap

use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableMap in project druid by druid-io.

the class RealtimeManagerTest method setUp.

@Before
public void setUp() throws Exception {
    final List<TestInputRowHolder> rows = Arrays.asList(makeRow(new DateTime("9000-01-01").getMillis()), makeRow(new ParseException("parse error")), null, makeRow(new DateTime().getMillis()));
    ObjectMapper jsonMapper = new DefaultObjectMapper();
    schema = new DataSchema("test", null, new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
    schema2 = new DataSchema("testV2", null, new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
    RealtimeIOConfig ioConfig = new RealtimeIOConfig(new FirehoseFactory() {

        @Override
        public Firehose connect(InputRowParser parser) throws IOException {
            return new TestFirehose(rows.iterator());
        }
    }, new PlumberSchool() {

        @Override
        public Plumber findPlumber(DataSchema schema, RealtimeTuningConfig config, FireDepartmentMetrics metrics) {
            return plumber;
        }
    }, null);
    RealtimeIOConfig ioConfig2 = new RealtimeIOConfig(null, new PlumberSchool() {

        @Override
        public Plumber findPlumber(DataSchema schema, RealtimeTuningConfig config, FireDepartmentMetrics metrics) {
            return plumber2;
        }
    }, new FirehoseFactoryV2() {

        @Override
        public FirehoseV2 connect(InputRowParser parser, Object arg1) throws IOException, ParseException {
            return new TestFirehoseV2(rows.iterator());
        }
    });
    RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig(1, new Period("P1Y"), null, null, null, null, null, null, null, null, 0, 0, null, null);
    plumber = new TestPlumber(new Sink(new Interval("0/P5000Y"), schema, tuningConfig.getShardSpec(), new DateTime().toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions()));
    realtimeManager = new RealtimeManager(Arrays.<FireDepartment>asList(new FireDepartment(schema, ioConfig, tuningConfig)), null);
    plumber2 = new TestPlumber(new Sink(new Interval("0/P5000Y"), schema2, tuningConfig.getShardSpec(), new DateTime().toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions()));
    realtimeManager2 = new RealtimeManager(Arrays.<FireDepartment>asList(new FireDepartment(schema2, ioConfig2, tuningConfig)), null);
    tuningConfig_0 = new RealtimeTuningConfig(1, new Period("P1Y"), null, null, null, null, null, new LinearShardSpec(0), null, null, 0, 0, null, null);
    tuningConfig_1 = new RealtimeTuningConfig(1, new Period("P1Y"), null, null, null, null, null, new LinearShardSpec(1), null, null, 0, 0, null, null);
    schema3 = new DataSchema("testing", null, new AggregatorFactory[] { new CountAggregatorFactory("ignore") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
    FireDepartment department_0 = new FireDepartment(schema3, ioConfig, tuningConfig_0);
    FireDepartment department_1 = new FireDepartment(schema3, ioConfig2, tuningConfig_1);
    QueryRunnerFactoryConglomerate conglomerate = new QueryRunnerFactoryConglomerate() {

        @Override
        public <T, QueryType extends Query<T>> QueryRunnerFactory<T, QueryType> findFactory(QueryType query) {
            return factory;
        }
    };
    chiefStartedLatch = new CountDownLatch(2);
    RealtimeManager.FireChief fireChief_0 = new RealtimeManager.FireChief(department_0, conglomerate) {

        @Override
        public void run() {
            super.initPlumber();
            chiefStartedLatch.countDown();
        }
    };
    RealtimeManager.FireChief fireChief_1 = new RealtimeManager.FireChief(department_1, conglomerate) {

        @Override
        public void run() {
            super.initPlumber();
            chiefStartedLatch.countDown();
        }
    };
    realtimeManager3 = new RealtimeManager(Arrays.asList(department_0, department_1), conglomerate, ImmutableMap.<String, Map<Integer, RealtimeManager.FireChief>>of("testing", ImmutableMap.of(0, fireChief_0, 1, fireChief_1)));
    startFireChiefWithPartitionNum(fireChief_0, 0);
    startFireChiefWithPartitionNum(fireChief_1, 1);
}
Also used : FirehoseV2(io.druid.data.input.FirehoseV2) RealtimeIOConfig(io.druid.segment.indexing.RealtimeIOConfig) BaseQuery(io.druid.query.BaseQuery) Query(io.druid.query.Query) GroupByQuery(io.druid.query.groupby.GroupByQuery) FirehoseFactory(io.druid.data.input.FirehoseFactory) LinearShardSpec(io.druid.timeline.partition.LinearShardSpec) DateTime(org.joda.time.DateTime) UniformGranularitySpec(io.druid.segment.indexing.granularity.UniformGranularitySpec) QueryRunnerFactoryConglomerate(io.druid.query.QueryRunnerFactoryConglomerate) Sink(io.druid.segment.realtime.plumber.Sink) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Firehose(io.druid.data.input.Firehose) Period(org.joda.time.Period) IOException(java.io.IOException) PlumberSchool(io.druid.segment.realtime.plumber.PlumberSchool) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) RealtimeTuningConfig(io.druid.segment.indexing.RealtimeTuningConfig) CountDownLatch(java.util.concurrent.CountDownLatch) DataSchema(io.druid.segment.indexing.DataSchema) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) Plumber(io.druid.segment.realtime.plumber.Plumber) ParseException(io.druid.java.util.common.parsers.ParseException) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) InputRowParser(io.druid.data.input.impl.InputRowParser) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) FirehoseFactoryV2(io.druid.data.input.FirehoseFactoryV2) Interval(org.joda.time.Interval) Before(org.junit.Before)

Example 97 with ImmutableMap

use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableMap in project druid by druid-io.

the class LookupCoordinatorResourceTest method testMissingGetTier.

@Test
public void testMissingGetTier() {
    final String tier = "some tier";
    final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock(LookupCoordinatorManager.class);
    final Map<String, Map<String, Map<String, Object>>> retVal = ImmutableMap.<String, Map<String, Map<String, Object>>>of();
    EasyMock.expect(lookupCoordinatorManager.getKnownLookups()).andReturn(retVal).once();
    EasyMock.replay(lookupCoordinatorManager);
    final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource(lookupCoordinatorManager, mapper, mapper);
    final Response response = lookupCoordinatorResource.getSpecificTier(tier);
    Assert.assertEquals(404, response.getStatus());
    EasyMock.verify(lookupCoordinatorManager);
}
Also used : Response(javax.ws.rs.core.Response) LookupCoordinatorManager(io.druid.server.lookup.cache.LookupCoordinatorManager) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) Map(java.util.Map) Test(org.junit.Test)

Example 98 with ImmutableMap

use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableMap in project druid by druid-io.

the class SingleDimensionShardSpecTest method testIsInChunk.

@Test
public void testIsInChunk() throws Exception {
    Map<SingleDimensionShardSpec, List<Pair<Boolean, Map<String, String>>>> tests = ImmutableMap.<SingleDimensionShardSpec, List<Pair<Boolean, Map<String, String>>>>builder().put(makeSpec(null, null), makeList(true, null, true, "a", true, "h", true, "p", true, "y")).put(makeSpec(null, "m"), makeList(true, null, true, "a", true, "h", false, "p", false, "y")).put(makeSpec("a", "h"), makeList(false, null, true, "a", false, "h", false, "p", false, "y")).put(makeSpec("d", "u"), makeList(false, null, false, "a", true, "h", true, "p", false, "y")).put(makeSpec("h", null), makeList(false, null, false, "a", true, "h", true, "p", true, "y")).build();
    for (Map.Entry<SingleDimensionShardSpec, List<Pair<Boolean, Map<String, String>>>> entry : tests.entrySet()) {
        SingleDimensionShardSpec spec = entry.getKey();
        for (Pair<Boolean, Map<String, String>> pair : entry.getValue()) {
            final InputRow inputRow = new MapBasedInputRow(0, ImmutableList.of("billy"), Maps.transformValues(pair.rhs, new Function<String, Object>() {

                @Override
                public Object apply(String input) {
                    return input;
                }
            }));
            Assert.assertEquals(String.format("spec[%s], row[%s]", spec, inputRow), pair.lhs, spec.isInChunk(inputRow.getTimestampFromEpoch(), inputRow));
        }
    }
}
Also used : Function(com.google.common.base.Function) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) InputRow(io.druid.data.input.InputRow) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) SingleDimensionShardSpec(io.druid.timeline.partition.SingleDimensionShardSpec) ImmutableMap(com.google.common.collect.ImmutableMap) Map(java.util.Map) Test(org.junit.Test)

Example 99 with ImmutableMap

use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableMap in project druid by druid-io.

the class LookupCoordinatorManagerTest method testDeleteLookupIgnoresMissing.

@Test
public void testDeleteLookupIgnoresMissing() throws Exception {
    final Map<String, Object> ignore = ImmutableMap.<String, Object>of("lookup", "ignore");
    final LookupCoordinatorManager manager = new LookupCoordinatorManager(client, discoverer, mapper, configManager, lookupCoordinatorManagerConfig) {

        @Override
        public Map<String, Map<String, Map<String, Object>>> getKnownLookups() {
            return ImmutableMap.<String, Map<String, Map<String, Object>>>of(LOOKUP_TIER, ImmutableMap.of("ignore", ignore));
        }
    };
    final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost");
    Assert.assertFalse(manager.deleteLookup(LOOKUP_TIER, "foo", auditInfo));
}
Also used : AuditInfo(io.druid.audit.AuditInfo) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) Test(org.junit.Test)

Example 100 with ImmutableMap

use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableMap in project druid by druid-io.

the class LookupCoordinatorManagerTest method testUpdateLookupsAddsNewTier.

@Test
public void testUpdateLookupsAddsNewTier() throws Exception {
    final Map<String, Object> ignore = ImmutableMap.<String, Object>of("prop", "old");
    final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost");
    final LookupCoordinatorManager manager = new LookupCoordinatorManager(client, discoverer, mapper, configManager, lookupCoordinatorManagerConfig) {

        @Override
        public Map<String, Map<String, Map<String, Object>>> getKnownLookups() {
            return ImmutableMap.<String, Map<String, Map<String, Object>>>of(LOOKUP_TIER + "2", ImmutableMap.of("ignore", ignore));
        }
    };
    final Map<String, Object> newSpec = ImmutableMap.<String, Object>of("prop", "new");
    EasyMock.reset(configManager);
    EasyMock.expect(configManager.set(EasyMock.eq(LookupCoordinatorManager.LOOKUP_CONFIG_KEY), EasyMock.eq(ImmutableMap.<String, Map<String, Map<String, Object>>>of(LOOKUP_TIER + "1", ImmutableMap.of("foo", newSpec), LOOKUP_TIER + "2", ImmutableMap.of("ignore", ignore))), EasyMock.eq(auditInfo))).andReturn(true).once();
    EasyMock.replay(configManager);
    Assert.assertTrue(manager.updateLookups(ImmutableMap.<String, Map<String, Map<String, Object>>>of(LOOKUP_TIER + "1", ImmutableMap.<String, Map<String, Object>>of("foo", newSpec)), auditInfo));
    EasyMock.verify(configManager);
}
Also used : AuditInfo(io.druid.audit.AuditInfo) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) Test(org.junit.Test)

Aggregations

ImmutableMap (com.google.common.collect.ImmutableMap)1261 Map (java.util.Map)662 Test (org.junit.Test)309 ImmutableList (com.google.common.collect.ImmutableList)300 List (java.util.List)288 HashMap (java.util.HashMap)269 ImmutableSet (com.google.common.collect.ImmutableSet)211 IOException (java.io.IOException)202 Optional (java.util.Optional)190 Set (java.util.Set)168 ArrayList (java.util.ArrayList)158 Path (java.nio.file.Path)151 Collectors (java.util.stream.Collectors)133 File (java.io.File)117 ImmutableMap.toImmutableMap (com.google.common.collect.ImmutableMap.toImmutableMap)102 Collection (java.util.Collection)93 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)85 Test (org.testng.annotations.Test)85 HashSet (java.util.HashSet)83 Collections (java.util.Collections)78