Search in sources :

Example 11 with MapMaker

use of com.google.common.collect.MapMaker in project druid by druid-io.

the class RetryQueryRunnerTest method testNoDuplicateRetry.

@Test
public void testNoDuplicateRetry() throws Exception {
    Map<String, Object> context = new MapMaker().makeMap();
    context.put("count", 0);
    context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
    RetryQueryRunner<Result<TimeseriesResultValue>> runner = new RetryQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {

        @Override
        public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> context) {
            if ((int) context.get("count") == 0) {
                // assume 2 missing segments at first run
                ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1));
                ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 2));
                context.put("count", 1);
                return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
            } else if ((int) context.get("count") == 1) {
                // this is first retry
                Assert.assertTrue("Should retry with 2 missing segments", ((MultipleSpecificSegmentSpec) ((BaseQuery) query).getQuerySegmentSpec()).getDescriptors().size() == 2);
                // assume only left 1 missing at first retry
                ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 2));
                context.put("count", 2);
                return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
            } else {
                // this is second retry
                Assert.assertTrue("Should retry with 1 missing segments", ((MultipleSpecificSegmentSpec) ((BaseQuery) query).getQuerySegmentSpec()).getDescriptors().size() == 1);
                // assume no more missing at second retry
                context.put("count", 3);
                return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
            }
        }
    }, (QueryToolChest) new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new RetryQueryRunnerConfig() {

        private int numTries = 2;

        private boolean returnPartialResults = false;

        public int getNumTries() {
            return numTries;
        }

        public boolean returnPartialResults() {
            return returnPartialResults;
        }
    }, jsonMapper);
    Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
    Assert.assertTrue("Should return a list with 3 elements", ((List) actualResults).size() == 3);
    Assert.assertTrue("Should have nothing in missingSegment list", ((List) context.get(Result.MISSING_SEGMENTS_KEY)).size() == 0);
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) MapMaker(com.google.common.collect.MapMaker) Sequence(io.druid.java.util.common.guava.Sequence) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) DateTime(org.joda.time.DateTime) List(java.util.List) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 12 with MapMaker

use of com.google.common.collect.MapMaker in project druid by druid-io.

the class RetryQueryRunnerTest method testRetryMultiple.

@Test
public void testRetryMultiple() throws Exception {
    Map<String, Object> context = new MapMaker().makeMap();
    context.put("count", 0);
    context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
    RetryQueryRunner<Result<TimeseriesResultValue>> runner = new RetryQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {

        @Override
        public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> context) {
            if ((int) context.get("count") < 3) {
                ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1));
                context.put("count", (int) context.get("count") + 1);
                return Sequences.empty();
            } else {
                return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
            }
        }
    }, (QueryToolChest) new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new RetryQueryRunnerConfig() {

        private int numTries = 4;

        private boolean returnPartialResults = true;

        public int getNumTries() {
            return numTries;
        }

        public boolean returnPartialResults() {
            return returnPartialResults;
        }
    }, jsonMapper);
    Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
    Assert.assertTrue("Should return a list with one element", ((List) actualResults).size() == 1);
    Assert.assertTrue("Should have nothing in missingSegment list", ((List) context.get(Result.MISSING_SEGMENTS_KEY)).size() == 0);
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) MapMaker(com.google.common.collect.MapMaker) Sequence(io.druid.java.util.common.guava.Sequence) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) DateTime(org.joda.time.DateTime) List(java.util.List) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 13 with MapMaker

use of com.google.common.collect.MapMaker in project druid by druid-io.

the class DataSourceMetadataQueryTest method testMaxIngestedEventTime.

@Test
public void testMaxIngestedEventTime() throws Exception {
    final IncrementalIndex rtIndex = new OnheapIncrementalIndex(0L, Granularities.NONE, new AggregatorFactory[] { new CountAggregatorFactory("count") }, 1000);
    ;
    final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner((QueryRunnerFactory) new DataSourceMetadataQueryRunnerFactory(QueryRunnerTestHelper.NOOP_QUERYWATCHER), new IncrementalIndexSegment(rtIndex, "test"), null);
    DateTime timestamp = new DateTime(System.currentTimeMillis());
    rtIndex.add(new MapBasedInputRow(timestamp.getMillis(), ImmutableList.of("dim1"), ImmutableMap.<String, Object>of("dim1", "x")));
    DataSourceMetadataQuery dataSourceMetadataQuery = Druids.newDataSourceMetadataQueryBuilder().dataSource("testing").build();
    Map<String, Object> context = new MapMaker().makeMap();
    context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
    Iterable<Result<DataSourceMetadataResultValue>> results = Sequences.toList(runner.run(dataSourceMetadataQuery, context), Lists.<Result<DataSourceMetadataResultValue>>newArrayList());
    DataSourceMetadataResultValue val = results.iterator().next().getValue();
    DateTime maxIngestedEventTime = val.getMaxIngestedEventTime();
    Assert.assertEquals(timestamp, maxIngestedEventTime);
}
Also used : IncrementalIndex(io.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) IncrementalIndexSegment(io.druid.segment.IncrementalIndexSegment) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) MapMaker(com.google.common.collect.MapMaker) QueryRunner(io.druid.query.QueryRunner) DateTime(org.joda.time.DateTime) Result(io.druid.query.Result) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) Test(org.junit.Test)

Example 14 with MapMaker

use of com.google.common.collect.MapMaker in project druid by druid-io.

the class TimeBoundaryQueryRunnerTest method testTimeBoundaryMin.

@Test
@SuppressWarnings("unchecked")
public void testTimeBoundaryMin() {
    TimeBoundaryQuery timeBoundaryQuery = Druids.newTimeBoundaryQueryBuilder().dataSource("testing").bound(TimeBoundaryQuery.MIN_TIME).build();
    Map<String, Object> context = new MapMaker().makeMap();
    context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
    Iterable<Result<TimeBoundaryResultValue>> results = Sequences.toList(runner.run(timeBoundaryQuery, context), Lists.<Result<TimeBoundaryResultValue>>newArrayList());
    TimeBoundaryResultValue val = results.iterator().next().getValue();
    DateTime minTime = val.getMinTime();
    DateTime maxTime = val.getMaxTime();
    Assert.assertEquals(new DateTime("2011-01-12T00:00:00.000Z"), minTime);
    Assert.assertNull(maxTime);
}
Also used : MapMaker(com.google.common.collect.MapMaker) DateTime(org.joda.time.DateTime) Result(io.druid.query.Result) Test(org.junit.Test)

Example 15 with MapMaker

use of com.google.common.collect.MapMaker in project hbase by apache.

the class AccessController method start.

/* ---- MasterObserver implementation ---- */
@Override
public void start(CoprocessorEnvironment env) throws IOException {
    CompoundConfiguration conf = new CompoundConfiguration();
    conf.add(env.getConfiguration());
    authorizationEnabled = isAuthorizationSupported(conf);
    if (!authorizationEnabled) {
        LOG.warn("The AccessController has been loaded with authorization checks disabled.");
    }
    shouldCheckExecPermission = conf.getBoolean(AccessControlConstants.EXEC_PERMISSION_CHECKS_KEY, AccessControlConstants.DEFAULT_EXEC_PERMISSION_CHECKS);
    cellFeaturesEnabled = (HFile.getFormatVersion(conf) >= HFile.MIN_FORMAT_VERSION_WITH_TAGS);
    if (!cellFeaturesEnabled) {
        LOG.info("A minimum HFile version of " + HFile.MIN_FORMAT_VERSION_WITH_TAGS + " is required to persist cell ACLs. Consider setting " + HFile.FORMAT_VERSION_KEY + " accordingly.");
    }
    ZooKeeperWatcher zk = null;
    if (env instanceof MasterCoprocessorEnvironment) {
        // if running on HMaster
        MasterCoprocessorEnvironment mEnv = (MasterCoprocessorEnvironment) env;
        zk = mEnv.getMasterServices().getZooKeeper();
    } else if (env instanceof RegionServerCoprocessorEnvironment) {
        RegionServerCoprocessorEnvironment rsEnv = (RegionServerCoprocessorEnvironment) env;
        zk = rsEnv.getRegionServerServices().getZooKeeper();
    } else if (env instanceof RegionCoprocessorEnvironment) {
        // if running at region
        regionEnv = (RegionCoprocessorEnvironment) env;
        conf.addStringMap(regionEnv.getRegion().getTableDesc().getConfiguration());
        zk = regionEnv.getRegionServerServices().getZooKeeper();
        compatibleEarlyTermination = conf.getBoolean(AccessControlConstants.CF_ATTRIBUTE_EARLY_OUT, AccessControlConstants.DEFAULT_ATTRIBUTE_EARLY_OUT);
    }
    // set the user-provider.
    this.userProvider = UserProvider.instantiate(env.getConfiguration());
    // throw RuntimeException so that the coprocessor is unloaded.
    if (zk != null) {
        try {
            this.authManager = TableAuthManager.getOrCreate(zk, env.getConfiguration());
        } catch (IOException ioe) {
            throw new RuntimeException("Error obtaining TableAuthManager", ioe);
        }
    } else {
        throw new RuntimeException("Error obtaining TableAuthManager, zk found null.");
    }
    tableAcls = new MapMaker().weakValues().makeMap();
}
Also used : RegionServerCoprocessorEnvironment(org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment) RegionCoprocessorEnvironment(org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment) ZooKeeperWatcher(org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher) MapMaker(com.google.common.collect.MapMaker) CompoundConfiguration(org.apache.hadoop.hbase.CompoundConfiguration) MasterCoprocessorEnvironment(org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment) IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException)

Aggregations

MapMaker (com.google.common.collect.MapMaker)32 EventData (com.alibaba.otter.shared.etl.model.EventData)9 Test (org.junit.Test)9 BaseDbTest (com.alibaba.otter.node.etl.BaseDbTest)8 RowKey (com.alibaba.otter.node.etl.load.loader.db.DbLoadMerger.RowKey)8 EventColumn (com.alibaba.otter.shared.etl.model.EventColumn)8 Test (org.testng.annotations.Test)8 List (java.util.List)7 DateTime (org.joda.time.DateTime)7 Sequence (io.druid.java.util.common.guava.Sequence)6 TimeseriesQueryQueryToolChest (io.druid.query.timeseries.TimeseriesQueryQueryToolChest)5 TimeseriesResultValue (io.druid.query.timeseries.TimeseriesResultValue)5 Interval (org.joda.time.Interval)5 Function (com.google.common.base.Function)4 Result (io.druid.query.Result)3 ByteBuffer (java.nio.ByteBuffer)3 ConfigException (com.alibaba.otter.shared.common.model.config.ConfigException)2 Channel (com.alibaba.otter.shared.common.model.config.channel.Channel)2 Node (com.alibaba.otter.shared.common.model.config.node.Node)2 ComputeFunction (com.alibaba.otter.shared.common.utils.cache.RefreshMemoryMirror.ComputeFunction)2