Search in sources :

Example 46 with DataSegment

use of io.druid.timeline.DataSegment in project druid by druid-io.

the class CoordinatorBasedSegmentHandoffNotifierTest method testHandoffCallbackNotCalled.

@Test
public void testHandoffCallbackNotCalled() throws IOException, InterruptedException {
    Interval interval = new Interval("2011-04-01/2011-04-02");
    SegmentDescriptor descriptor = new SegmentDescriptor(interval, "v1", 2);
    DataSegment segment = new DataSegment("test_ds", interval, "v1", null, null, null, new NumberedShardSpec(2, 3), 0, 0);
    CoordinatorClient coordinatorClient = EasyMock.createMock(CoordinatorClient.class);
    EasyMock.expect(coordinatorClient.fetchServerView("test_ds", interval, true)).andReturn(Lists.newArrayList(new ImmutableSegmentLoadInfo(segment, Sets.newHashSet(createRealtimeServerMetadata("a1"))))).anyTimes();
    EasyMock.replay(coordinatorClient);
    CoordinatorBasedSegmentHandoffNotifier notifier = new CoordinatorBasedSegmentHandoffNotifier("test_ds", coordinatorClient, notifierConfig);
    final AtomicBoolean callbackCalled = new AtomicBoolean(false);
    notifier.registerSegmentHandoffCallback(descriptor, MoreExecutors.sameThreadExecutor(), new Runnable() {

        @Override
        public void run() {
            callbackCalled.set(true);
        }
    });
    notifier.checkForSegmentHandoffs();
    // callback should have registered
    Assert.assertEquals(1, notifier.getHandOffCallbacks().size());
    Assert.assertTrue(notifier.getHandOffCallbacks().containsKey(descriptor));
    Assert.assertFalse(callbackCalled.get());
    EasyMock.verify(coordinatorClient);
}
Also used : AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) SegmentDescriptor(io.druid.query.SegmentDescriptor) ImmutableSegmentLoadInfo(io.druid.client.ImmutableSegmentLoadInfo) CoordinatorClient(io.druid.client.coordinator.CoordinatorClient) DataSegment(io.druid.timeline.DataSegment) NumberedShardSpec(io.druid.timeline.partition.NumberedShardSpec) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 47 with DataSegment

use of io.druid.timeline.DataSegment in project druid by druid-io.

the class ClientInfoResourceTest method addSegment.

private void addSegment(VersionedIntervalTimeline<String, ServerSelector> timeline, DruidServer server, String interval, List<String> dims, List<String> metrics, String version) {
    DataSegment segment = DataSegment.builder().dataSource(dataSource).interval(new Interval(interval)).version(version).dimensions(dims).metrics(metrics).size(1).build();
    server.addDataSegment(segment.getIdentifier(), segment);
    ServerSelector ss = new ServerSelector(segment, null);
    timeline.add(new Interval(interval), version, new SingleElementPartitionChunk<ServerSelector>(ss));
}
Also used : ServerSelector(io.druid.client.selector.ServerSelector) DataSegment(io.druid.timeline.DataSegment) Interval(org.joda.time.Interval)

Example 48 with DataSegment

use of io.druid.timeline.DataSegment in project druid by druid-io.

the class ServerManagerTest method setUp.

@Before
public void setUp() throws IOException {
    EmittingLogger.registerEmitter(new NoopServiceEmitter());
    queryWaitLatch = new CountDownLatch(1);
    queryWaitYieldLatch = new CountDownLatch(1);
    queryNotifyLatch = new CountDownLatch(1);
    factory = new MyQueryRunnerFactory(queryWaitLatch, queryWaitYieldLatch, queryNotifyLatch);
    serverManagerExec = Executors.newFixedThreadPool(2);
    serverManager = new ServerManager(new SegmentLoader() {

        @Override
        public boolean isSegmentLoaded(DataSegment segment) throws SegmentLoadingException {
            return false;
        }

        @Override
        public Segment getSegment(final DataSegment segment) {
            return new SegmentForTesting(MapUtils.getString(segment.getLoadSpec(), "version"), (Interval) segment.getLoadSpec().get("interval"));
        }

        @Override
        public File getSegmentFiles(DataSegment segment) throws SegmentLoadingException {
            throw new UnsupportedOperationException();
        }

        @Override
        public void cleanup(DataSegment segment) throws SegmentLoadingException {
        }
    }, new QueryRunnerFactoryConglomerate() {

        @Override
        public <T, QueryType extends Query<T>> QueryRunnerFactory<T, QueryType> findFactory(QueryType query) {
            return (QueryRunnerFactory) factory;
        }
    }, new NoopServiceEmitter(), serverManagerExec, MoreExecutors.sameThreadExecutor(), new DefaultObjectMapper(), new LocalCacheProvider().get(), new CacheConfig());
    loadQueryable("test", "1", new Interval("P1d/2011-04-01"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-02"));
    loadQueryable("test", "2", new Interval("P1d/2011-04-02"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-03"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-04"));
    loadQueryable("test", "1", new Interval("P1d/2011-04-05"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T01"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T02"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T03"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T05"));
    loadQueryable("test", "2", new Interval("PT1h/2011-04-04T06"));
    loadQueryable("test2", "1", new Interval("P1d/2011-04-01"));
    loadQueryable("test2", "1", new Interval("P1d/2011-04-02"));
}
Also used : Query(io.druid.query.Query) SearchQuery(io.druid.query.search.search.SearchQuery) NoopServiceEmitter(io.druid.server.metrics.NoopServiceEmitter) CountDownLatch(java.util.concurrent.CountDownLatch) DataSegment(io.druid.timeline.DataSegment) SegmentLoader(io.druid.segment.loading.SegmentLoader) QueryRunnerFactoryConglomerate(io.druid.query.QueryRunnerFactoryConglomerate) QueryRunnerFactory(io.druid.query.QueryRunnerFactory) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) LocalCacheProvider(io.druid.client.cache.LocalCacheProvider) CacheConfig(io.druid.client.cache.CacheConfig) Interval(org.joda.time.Interval) Before(org.junit.Before)

Example 49 with DataSegment

use of io.druid.timeline.DataSegment in project druid by druid-io.

the class ZkCoordinatorTest method testSegmentLoading1.

/**
   * Steps:
   * 1. removeSegment() schedules a delete runnable that deletes segment files,
   * 2. addSegment() succesfully loads the segment and annouces it
   * 3. scheduled delete task executes and realizes it should not delete the segment files.
   */
@Test
public void testSegmentLoading1() throws Exception {
    zkCoordinator.start();
    final DataSegment segment = makeSegment("test", "1", new Interval("P1d/2011-04-01"));
    zkCoordinator.removeSegment(segment, new DataSegmentChangeCallback() {

        @Override
        public void execute() {
        // do nothing
        }
    });
    Assert.assertFalse(segmentsAnnouncedByMe.contains(segment));
    zkCoordinator.addSegment(segment, new DataSegmentChangeCallback() {

        @Override
        public void execute() {
        // do nothing
        }
    });
    /*
       make sure the scheduled runnable that "deletes" segment files has been executed.
       Because another addSegment() call is executed, which removes the segment from segmentsToDelete field in
       ZkCoordinator, the scheduled runnable will not actually delete segment files.
     */
    for (Runnable runnable : scheduledRunnable) {
        runnable.run();
    }
    Assert.assertTrue(segmentsAnnouncedByMe.contains(segment));
    Assert.assertFalse("segment files shouldn't be deleted", segmentLoader.getSegmentsInTrash().contains(segment));
    zkCoordinator.stop();
}
Also used : DataSegment(io.druid.timeline.DataSegment) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 50 with DataSegment

use of io.druid.timeline.DataSegment in project druid by druid-io.

the class ZkCoordinatorTest method testLoadCache.

@Test
public void testLoadCache() throws Exception {
    List<DataSegment> segments = Lists.newLinkedList();
    for (int i = 0; i < COUNT; ++i) {
        segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-01")));
        segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-02")));
        segments.add(makeSegment("test" + i, "2", new Interval("P1d/2011-04-02")));
        segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-03")));
        segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-04")));
        segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-05")));
        segments.add(makeSegment("test" + i, "2", new Interval("PT1h/2011-04-04T01")));
        segments.add(makeSegment("test" + i, "2", new Interval("PT1h/2011-04-04T02")));
        segments.add(makeSegment("test" + i, "2", new Interval("PT1h/2011-04-04T03")));
        segments.add(makeSegment("test" + i, "2", new Interval("PT1h/2011-04-04T05")));
        segments.add(makeSegment("test" + i, "2", new Interval("PT1h/2011-04-04T06")));
        segments.add(makeSegment("test_two" + i, "1", new Interval("P1d/2011-04-01")));
        segments.add(makeSegment("test_two" + i, "1", new Interval("P1d/2011-04-02")));
    }
    Collections.sort(segments);
    for (DataSegment segment : segments) {
        writeSegmentToCache(segment);
    }
    checkCache(segments);
    Assert.assertTrue(serverManager.getDataSourceCounts().isEmpty());
    zkCoordinator.start();
    Assert.assertTrue(!serverManager.getDataSourceCounts().isEmpty());
    for (int i = 0; i < COUNT; ++i) {
        Assert.assertEquals(11L, serverManager.getDataSourceCounts().get("test" + i).longValue());
        Assert.assertEquals(2L, serverManager.getDataSourceCounts().get("test_two" + i).longValue());
    }
    Assert.assertEquals(13 * COUNT, announceCount.get());
    zkCoordinator.stop();
    for (DataSegment segment : segments) {
        deleteSegmentFromCache(segment);
    }
    Assert.assertEquals(0, infoDir.listFiles().length);
    Assert.assertTrue(infoDir.delete());
}
Also used : DataSegment(io.druid.timeline.DataSegment) Interval(org.joda.time.Interval) Test(org.junit.Test)

Aggregations

DataSegment (io.druid.timeline.DataSegment)293 Test (org.junit.Test)151 Interval (org.joda.time.Interval)136 File (java.io.File)56 DateTime (org.joda.time.DateTime)52 IOException (java.io.IOException)37 DruidServer (io.druid.client.DruidServer)36 Map (java.util.Map)35 DruidDataSource (io.druid.client.DruidDataSource)19 ListeningExecutorService (com.google.common.util.concurrent.ListeningExecutorService)18 List (java.util.List)17 DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)16 Rule (io.druid.server.coordinator.rules.Rule)16 ImmutableMap (com.google.common.collect.ImmutableMap)15 ForeverLoadRule (io.druid.server.coordinator.rules.ForeverLoadRule)14 IntervalDropRule (io.druid.server.coordinator.rules.IntervalDropRule)13 IntervalLoadRule (io.druid.server.coordinator.rules.IntervalLoadRule)13 CountDownLatch (java.util.concurrent.CountDownLatch)13 GET (javax.ws.rs.GET)13 Produces (javax.ws.rs.Produces)13