Search in sources :

Example 36 with DruidNode

use of org.apache.druid.server.DruidNode in project druid by druid-io.

the class ForkingTaskRunnerTest method testTaskStatusWhenTaskProcessFails.

@Test
public void testTaskStatusWhenTaskProcessFails() throws ExecutionException, InterruptedException {
    ForkingTaskRunner forkingTaskRunner = new ForkingTaskRunner(new ForkingTaskRunnerConfig(), new TaskConfig(null, null, null, null, ImmutableList.of(), false, new Period("PT0S"), new Period("PT10S"), ImmutableList.of(), false, false, TaskConfig.BATCH_PROCESSING_MODE_DEFAULT.name()), new WorkerConfig(), new Properties(), new NoopTaskLogs(), new DefaultObjectMapper(), new DruidNode("middleManager", "host", false, 8091, null, true, false), new StartupLoggingConfig()) {

        @Override
        ProcessHolder runTaskProcess(List<String> command, File logFile, TaskLocation taskLocation) {
            ProcessHolder processHolder = Mockito.mock(ProcessHolder.class);
            Mockito.doNothing().when(processHolder).registerWithCloser(ArgumentMatchers.any());
            Mockito.doNothing().when(processHolder).shutdown();
            return processHolder;
        }

        @Override
        int waitForTaskProcessToComplete(Task task, ProcessHolder processHolder, File logFile, File reportsFile) {
            // Emulate task process failure
            return 1;
        }
    };
    final TaskStatus status = forkingTaskRunner.run(NoopTask.create()).get();
    Assert.assertEquals(TaskState.FAILED, status.getStatusCode());
    Assert.assertEquals("Task execution process exited unsuccessfully with code[1]. See middleManager logs for more details.", status.getErrorMsg());
}
Also used : NoopTaskLogs(org.apache.druid.tasklogs.NoopTaskLogs) Task(org.apache.druid.indexing.common.task.Task) NoopTask(org.apache.druid.indexing.common.task.NoopTask) Period(org.joda.time.Period) TaskConfig(org.apache.druid.indexing.common.config.TaskConfig) Properties(java.util.Properties) TaskStatus(org.apache.druid.indexer.TaskStatus) TaskLocation(org.apache.druid.indexer.TaskLocation) StartupLoggingConfig(org.apache.druid.server.log.StartupLoggingConfig) ForkingTaskRunnerConfig(org.apache.druid.indexing.overlord.config.ForkingTaskRunnerConfig) WorkerConfig(org.apache.druid.indexing.worker.config.WorkerConfig) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) DruidNode(org.apache.druid.server.DruidNode) File(java.io.File) Test(org.junit.Test)

Example 37 with DruidNode

use of org.apache.druid.server.DruidNode in project druid by druid-io.

the class EmitterModule method getServiceEmitter.

@Provides
@ManageLifecycle
public ServiceEmitter getServiceEmitter(@Self Supplier<DruidNode> configSupplier, Emitter emitter, @ExtraServiceDimensions Map<String, String> extraServiceDimensions) {
    final DruidNode config = configSupplier.get();
    log.info("Using emitter [%s] for metrics and alerts, with dimensions [%s].", emitter, extraServiceDimensions);
    final ServiceEmitter retVal = new ServiceEmitter(config.getServiceName(), config.getHostAndPortToUse(), emitter, ImmutableMap.copyOf(extraServiceDimensions));
    EmittingLogger.registerEmitter(retVal);
    return retVal;
}
Also used : ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) DruidNode(org.apache.druid.server.DruidNode) ManageLifecycle(org.apache.druid.guice.ManageLifecycle) Provides(com.google.inject.Provides)

Example 38 with DruidNode

use of org.apache.druid.server.DruidNode in project druid by druid-io.

the class HttpServerInventoryViewTest method testSimple.

@Test(timeout = 60_000L)
public void testSimple() throws Exception {
    ObjectMapper jsonMapper = TestHelper.makeJsonMapper();
    TestDruidNodeDiscovery druidNodeDiscovery = new TestDruidNodeDiscovery();
    DruidNodeDiscoveryProvider druidNodeDiscoveryProvider = EasyMock.createMock(DruidNodeDiscoveryProvider.class);
    EasyMock.expect(druidNodeDiscoveryProvider.getForService(DataNodeService.DISCOVERY_SERVICE_KEY)).andReturn(druidNodeDiscovery);
    EasyMock.replay(druidNodeDiscoveryProvider);
    final DataSegment segment1 = new DataSegment("test1", Intervals.of("2014/2015"), "v1", null, null, null, null, 0, 0);
    final DataSegment segment2 = new DataSegment("test2", Intervals.of("2014/2015"), "v1", null, null, null, null, 0, 0);
    final DataSegment segment3 = new DataSegment("test3", Intervals.of("2014/2015"), "v1", null, null, null, null, 0, 0);
    final DataSegment segment4 = new DataSegment("test4", Intervals.of("2014/2015"), "v1", null, null, null, null, 0, 0);
    final DataSegment segment5 = new DataSegment("non-loading-datasource", Intervals.of("2014/2015"), "v1", null, null, null, null, 0, 0);
    TestHttpClient httpClient = new TestHttpClient(ImmutableList.of(Futures.immediateFuture(new ByteArrayInputStream(jsonMapper.writerWithType(HttpServerInventoryView.SEGMENT_LIST_RESP_TYPE_REF).writeValueAsBytes(new ChangeRequestsSnapshot(false, null, ChangeRequestHistory.Counter.ZERO, ImmutableList.of(new SegmentChangeRequestLoad(segment1)))))), Futures.immediateFuture(new ByteArrayInputStream(jsonMapper.writerWithType(HttpServerInventoryView.SEGMENT_LIST_RESP_TYPE_REF).writeValueAsBytes(new ChangeRequestsSnapshot(false, null, ChangeRequestHistory.Counter.ZERO, ImmutableList.of(new SegmentChangeRequestDrop(segment1), new SegmentChangeRequestLoad(segment2), new SegmentChangeRequestLoad(segment3)))))), Futures.immediateFuture(new ByteArrayInputStream(jsonMapper.writerWithType(HttpServerInventoryView.SEGMENT_LIST_RESP_TYPE_REF).writeValueAsBytes(new ChangeRequestsSnapshot(true, "force reset counter", ChangeRequestHistory.Counter.ZERO, ImmutableList.of())))), Futures.immediateFuture(new ByteArrayInputStream(jsonMapper.writerWithType(HttpServerInventoryView.SEGMENT_LIST_RESP_TYPE_REF).writeValueAsBytes(new ChangeRequestsSnapshot(false, null, ChangeRequestHistory.Counter.ZERO, ImmutableList.of(new SegmentChangeRequestLoad(segment3), new SegmentChangeRequestLoad(segment4), new SegmentChangeRequestLoad(segment5))))))));
    DiscoveryDruidNode druidNode = new DiscoveryDruidNode(new DruidNode("service", "host", false, 8080, null, true, false), NodeRole.HISTORICAL, ImmutableMap.of(DataNodeService.DISCOVERY_SERVICE_KEY, new DataNodeService("tier", 1000, ServerType.HISTORICAL, 0)));
    HttpServerInventoryView httpServerInventoryView = new HttpServerInventoryView(jsonMapper, httpClient, druidNodeDiscoveryProvider, (pair) -> !pair.rhs.getDataSource().equals("non-loading-datasource"), new HttpServerInventoryViewConfig(null, null, null), "test");
    CountDownLatch initializeCallback1 = new CountDownLatch(1);
    Map<SegmentId, CountDownLatch> segmentAddLathces = ImmutableMap.of(segment1.getId(), new CountDownLatch(1), segment2.getId(), new CountDownLatch(1), segment3.getId(), new CountDownLatch(1), segment4.getId(), new CountDownLatch(1));
    Map<SegmentId, CountDownLatch> segmentDropLatches = ImmutableMap.of(segment1.getId(), new CountDownLatch(1), segment2.getId(), new CountDownLatch(1));
    httpServerInventoryView.registerSegmentCallback(Execs.directExecutor(), new ServerView.SegmentCallback() {

        @Override
        public ServerView.CallbackAction segmentAdded(DruidServerMetadata server, DataSegment segment) {
            segmentAddLathces.get(segment.getId()).countDown();
            return ServerView.CallbackAction.CONTINUE;
        }

        @Override
        public ServerView.CallbackAction segmentRemoved(DruidServerMetadata server, DataSegment segment) {
            segmentDropLatches.get(segment.getId()).countDown();
            return ServerView.CallbackAction.CONTINUE;
        }

        @Override
        public ServerView.CallbackAction segmentViewInitialized() {
            initializeCallback1.countDown();
            return ServerView.CallbackAction.CONTINUE;
        }
    });
    final CountDownLatch serverRemovedCalled = new CountDownLatch(1);
    httpServerInventoryView.registerServerRemovedCallback(Execs.directExecutor(), new ServerView.ServerRemovedCallback() {

        @Override
        public ServerView.CallbackAction serverRemoved(DruidServer server) {
            if (server.getName().equals("host:8080")) {
                serverRemovedCalled.countDown();
                return ServerView.CallbackAction.CONTINUE;
            } else {
                throw new RE("Unknown server [%s]", server.getName());
            }
        }
    });
    httpServerInventoryView.start();
    druidNodeDiscovery.listener.nodesAdded(ImmutableList.of(druidNode));
    initializeCallback1.await();
    segmentAddLathces.get(segment1.getId()).await();
    segmentDropLatches.get(segment1.getId()).await();
    segmentAddLathces.get(segment2.getId()).await();
    segmentAddLathces.get(segment3.getId()).await();
    segmentAddLathces.get(segment4.getId()).await();
    segmentDropLatches.get(segment2.getId()).await();
    DruidServer druidServer = httpServerInventoryView.getInventoryValue("host:8080");
    Assert.assertEquals(ImmutableMap.of(segment3.getId(), segment3, segment4.getId(), segment4), Maps.uniqueIndex(druidServer.iterateAllSegments(), DataSegment::getId));
    druidNodeDiscovery.listener.nodesRemoved(ImmutableList.of(druidNode));
    serverRemovedCalled.await();
    Assert.assertNull(httpServerInventoryView.getInventoryValue("host:8080"));
    EasyMock.verify(druidNodeDiscoveryProvider);
    httpServerInventoryView.stop();
}
Also used : DataSegment(org.apache.druid.timeline.DataSegment) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) SegmentChangeRequestLoad(org.apache.druid.server.coordination.SegmentChangeRequestLoad) SegmentId(org.apache.druid.timeline.SegmentId) DruidServerMetadata(org.apache.druid.server.coordination.DruidServerMetadata) CountDownLatch(java.util.concurrent.CountDownLatch) RE(org.apache.druid.java.util.common.RE) SegmentChangeRequestDrop(org.apache.druid.server.coordination.SegmentChangeRequestDrop) DiscoveryDruidNode(org.apache.druid.discovery.DiscoveryDruidNode) ByteArrayInputStream(java.io.ByteArrayInputStream) DruidNodeDiscoveryProvider(org.apache.druid.discovery.DruidNodeDiscoveryProvider) ChangeRequestsSnapshot(org.apache.druid.server.coordination.ChangeRequestsSnapshot) DiscoveryDruidNode(org.apache.druid.discovery.DiscoveryDruidNode) DruidNode(org.apache.druid.server.DruidNode) DataNodeService(org.apache.druid.discovery.DataNodeService) Test(org.junit.Test)

Example 39 with DruidNode

use of org.apache.druid.server.DruidNode in project druid by druid-io.

the class DiscoveryDruidNodeTest method testSerdeWithDataNodeAndLookupNodeServices.

@Test
public void testSerdeWithDataNodeAndLookupNodeServices() throws JsonProcessingException {
    final ObjectMapper mapper = createObjectMapper(ImmutableList.of());
    final DiscoveryDruidNode node = new DiscoveryDruidNode(new DruidNode("druid/broker", "druid-broker", false, 8082, -1, 8282, true, true), NodeRole.BROKER, ImmutableMap.of(DataNodeService.DISCOVERY_SERVICE_KEY, new DataNodeService("_default_tier", 1000000000, ServerType.BROKER, 0), LookupNodeService.DISCOVERY_SERVICE_KEY, new LookupNodeService("lookup_tier")));
    final String json = mapper.writeValueAsString(node);
    Assert.assertEquals(node, mapper.readValue(json, DiscoveryDruidNode.class));
}
Also used : DruidNode(org.apache.druid.server.DruidNode) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Test(org.junit.Test)

Example 40 with DruidNode

use of org.apache.druid.server.DruidNode in project druid by druid-io.

the class DiscoveryDruidNodeTest method testDeserialize_duplicateKeysWithDifferentValus_shouldIgnoreDataNodeService.

@Test
public void testDeserialize_duplicateKeysWithDifferentValus_shouldIgnoreDataNodeService() throws JsonProcessingException {
    final ObjectMapper mapper = createObjectMapper(ImmutableList.of());
    final String json = "{\n" + "  \"druidNode\" : {\n" + "    \"service\" : \"druid/broker\",\n" + "    \"host\" : \"druid-broker\",\n" + "    \"bindOnHost\" : false,\n" + "    \"plaintextPort\" : 8082,\n" + "    \"port\" : -1,\n" + "    \"tlsPort\" : 8282,\n" + "    \"enablePlaintextPort\" : true,\n" + "    \"enableTlsPort\" : true\n" + "  },\n" + "  \"nodeType\" : \"broker\",\n" + "  \"services\" : {\n" + "    \"dataNodeService\" : {\n" + "      \"type\" : \"dataNodeService\",\n" + "      \"tier\" : \"_default_tier\",\n" + "      \"maxSize\" : 1000000000,\n" + "      \"maxSize\" : 10,\n" + "      \"serverType\" : \"broker\",\n" + "      \"priority\" : 0\n" + "    }\n" + "  }\n" + "}";
    Assert.assertEquals(new DiscoveryDruidNode(new DruidNode("druid/broker", "druid-broker", false, 8082, -1, 8282, true, true), NodeRole.BROKER, ImmutableMap.of()), mapper.readValue(json, DiscoveryDruidNode.class));
}
Also used : DruidNode(org.apache.druid.server.DruidNode) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Test(org.junit.Test)

Aggregations

DruidNode (org.apache.druid.server.DruidNode)61 Test (org.junit.Test)41 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)25 Self (org.apache.druid.guice.annotations.Self)19 ImmutableList (com.google.common.collect.ImmutableList)18 List (java.util.List)18 DefaultObjectMapper (org.apache.druid.jackson.DefaultObjectMapper)16 ZkPathsConfig (org.apache.druid.server.initialization.ZkPathsConfig)16 Binder (com.google.inject.Binder)15 Injector (com.google.inject.Injector)15 AtomicReference (java.util.concurrent.atomic.AtomicReference)15 DiscoveryDruidNode (org.apache.druid.discovery.DiscoveryDruidNode)14 HashSet (java.util.HashSet)12 ScheduledExecutorService (java.util.concurrent.ScheduledExecutorService)12 DruidNodeDiscoveryProvider (org.apache.druid.discovery.DruidNodeDiscoveryProvider)12 TaskStatus (org.apache.druid.indexer.TaskStatus)12 Module (com.google.inject.Module)11 CuratorFramework (org.apache.curator.framework.CuratorFramework)11 NoopTask (org.apache.druid.indexing.common.task.NoopTask)11 Task (org.apache.druid.indexing.common.task.Task)11