Search in sources :

Example 6 with DruidDataSource

use of io.druid.client.DruidDataSource in project druid by druid-io.

the class InventoryViewUtils method getSecuredDataSources.

public static Set<DruidDataSource> getSecuredDataSources(InventoryView inventoryView, final AuthorizationInfo authorizationInfo) {
    if (authorizationInfo == null) {
        throw new ISE("Invalid to call a secured method with null AuthorizationInfo!!");
    } else {
        final Map<Pair<Resource, Action>, Access> resourceAccessMap = new HashMap<>();
        return ImmutableSet.copyOf(Iterables.filter(getDataSources(inventoryView), new Predicate<DruidDataSource>() {

            @Override
            public boolean apply(DruidDataSource input) {
                Resource resource = new Resource(input.getName(), ResourceType.DATASOURCE);
                Action action = Action.READ;
                Pair<Resource, Action> key = new Pair<>(resource, action);
                if (resourceAccessMap.containsKey(key)) {
                    return resourceAccessMap.get(key).isAllowed();
                } else {
                    Access access = authorizationInfo.isAuthorized(key.lhs, key.rhs);
                    resourceAccessMap.put(key, access);
                    return access.isAllowed();
                }
            }
        }));
    }
}
Also used : Action(io.druid.server.security.Action) HashMap(java.util.HashMap) Access(io.druid.server.security.Access) Resource(io.druid.server.security.Resource) ISE(io.druid.java.util.common.ISE) DruidDataSource(io.druid.client.DruidDataSource) Pair(io.druid.java.util.common.Pair) Predicate(com.google.common.base.Predicate)

Example 7 with DruidDataSource

use of io.druid.client.DruidDataSource in project druid by druid-io.

the class MetadataResource method getDatabaseSegmentDataSourceSegments.

@GET
@Path("/datasources/{dataSourceName}/segments")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getDatabaseSegmentDataSourceSegments(@PathParam("dataSourceName") String dataSourceName, @QueryParam("full") String full) {
    DruidDataSource dataSource = metadataSegmentManager.getInventoryValue(dataSourceName);
    if (dataSource == null) {
        return Response.status(Response.Status.NOT_FOUND).build();
    }
    Response.ResponseBuilder builder = Response.status(Response.Status.OK);
    if (full != null) {
        return builder.entity(dataSource.getSegments()).build();
    }
    return builder.entity(Iterables.transform(dataSource.getSegments(), new Function<DataSegment, String>() {

        @Override
        public String apply(DataSegment segment) {
            return segment.getIdentifier();
        }
    })).build();
}
Also used : Response(javax.ws.rs.core.Response) DruidDataSource(io.druid.client.DruidDataSource) DataSegment(io.druid.timeline.DataSegment) Path(javax.ws.rs.Path) ResourceFilters(com.sun.jersey.spi.container.ResourceFilters) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 8 with DruidDataSource

use of io.druid.client.DruidDataSource in project druid by druid-io.

the class TiersResource method getTierDatasources.

@GET
@Path("/{tierName}")
@Produces(MediaType.APPLICATION_JSON)
public Response getTierDatasources(@PathParam("tierName") String tierName, @QueryParam("simple") String simple) {
    if (simple != null) {
        Table<String, Interval, Map<String, Object>> retVal = HashBasedTable.create();
        for (DruidServer druidServer : serverInventoryView.getInventory()) {
            if (druidServer.getTier().equalsIgnoreCase(tierName)) {
                for (DataSegment dataSegment : druidServer.getSegments().values()) {
                    Map<String, Object> properties = retVal.get(dataSegment.getDataSource(), dataSegment.getInterval());
                    if (properties == null) {
                        properties = Maps.newHashMap();
                        retVal.put(dataSegment.getDataSource(), dataSegment.getInterval(), properties);
                    }
                    properties.put("size", MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize());
                    properties.put("count", MapUtils.getInt(properties, "count", 0) + 1);
                }
            }
        }
        return Response.ok(retVal.rowMap()).build();
    }
    Set<String> retVal = Sets.newHashSet();
    for (DruidServer druidServer : serverInventoryView.getInventory()) {
        if (druidServer.getTier().equalsIgnoreCase(tierName)) {
            retVal.addAll(Lists.newArrayList(Iterables.transform(druidServer.getDataSources(), new Function<DruidDataSource, String>() {

                @Override
                public String apply(DruidDataSource input) {
                    return input.getName();
                }
            })));
        }
    }
    return Response.ok(retVal).build();
}
Also used : DruidServer(io.druid.client.DruidServer) Map(java.util.Map) DataSegment(io.druid.timeline.DataSegment) DruidDataSource(io.druid.client.DruidDataSource) Interval(org.joda.time.Interval) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 9 with DruidDataSource

use of io.druid.client.DruidDataSource in project druid by druid-io.

the class DruidCoordinatorTest method testCoordinatorRun.

@Test(timeout = 60_000L)
public void testCoordinatorRun() throws Exception {
    String dataSource = "dataSource1";
    String tier = "hot";
    // Setup MetadataRuleManager
    Rule foreverLoadRule = new ForeverLoadRule(ImmutableMap.of(tier, 2));
    EasyMock.expect(metadataRuleManager.getRulesWithDefault(EasyMock.anyString())).andReturn(ImmutableList.of(foreverLoadRule)).atLeastOnce();
    metadataRuleManager.stop();
    EasyMock.expectLastCall().once();
    EasyMock.replay(metadataRuleManager);
    // Setup MetadataSegmentManager
    DruidDataSource[] druidDataSources = { new DruidDataSource(dataSource, Collections.<String, String>emptyMap()) };
    final DataSegment dataSegment = new DataSegment(dataSource, new Interval("2010-01-01/P1D"), "v1", null, null, null, null, 0x9, 0);
    druidDataSources[0].addSegment("0", dataSegment);
    EasyMock.expect(databaseSegmentManager.isStarted()).andReturn(true).anyTimes();
    EasyMock.expect(databaseSegmentManager.getInventory()).andReturn(ImmutableList.of(druidDataSources[0])).atLeastOnce();
    EasyMock.replay(databaseSegmentManager);
    ImmutableDruidDataSource immutableDruidDataSource = EasyMock.createNiceMock(ImmutableDruidDataSource.class);
    EasyMock.expect(immutableDruidDataSource.getSegments()).andReturn(ImmutableSet.of(dataSegment)).atLeastOnce();
    EasyMock.replay(immutableDruidDataSource);
    // Setup ServerInventoryView
    druidServer = new DruidServer("server1", "localhost", 5L, "historical", tier, 0);
    loadManagementPeons.put("server1", loadQueuePeon);
    EasyMock.expect(serverInventoryView.getInventory()).andReturn(ImmutableList.of(druidServer)).atLeastOnce();
    EasyMock.expect(serverInventoryView.isStarted()).andReturn(true).anyTimes();
    EasyMock.replay(serverInventoryView);
    coordinator.start();
    // Wait for this coordinator to become leader
    leaderAnnouncerLatch.await();
    // This coordinator should be leader by now
    Assert.assertTrue(coordinator.isLeader());
    Assert.assertEquals(druidNode.getHostAndPort(), coordinator.getCurrentLeader());
    final CountDownLatch assignSegmentLatch = new CountDownLatch(1);
    pathChildrenCache.getListenable().addListener(new PathChildrenCacheListener() {

        @Override
        public void childEvent(CuratorFramework curatorFramework, PathChildrenCacheEvent pathChildrenCacheEvent) throws Exception {
            if (pathChildrenCacheEvent.getType().equals(PathChildrenCacheEvent.Type.CHILD_ADDED)) {
                //Coordinator should try to assign segment to druidServer historical
                //Simulate historical loading segment
                druidServer.addDataSegment(dataSegment.getIdentifier(), dataSegment);
                assignSegmentLatch.countDown();
            }
        }
    });
    pathChildrenCache.start();
    assignSegmentLatch.await();
    Assert.assertEquals(ImmutableMap.of(dataSource, 100.0), coordinator.getLoadStatus());
    curator.delete().guaranteed().forPath(ZKPaths.makePath(LOADPATH, dataSegment.getIdentifier()));
    // Wait for coordinator thread to run so that replication status is updated
    while (coordinator.getSegmentAvailability().snapshot().get(dataSource) != 0) {
        Thread.sleep(50);
    }
    Map segmentAvailability = coordinator.getSegmentAvailability().snapshot();
    Assert.assertEquals(1, segmentAvailability.size());
    Assert.assertEquals(0L, segmentAvailability.get(dataSource));
    while (coordinator.getLoadPendingDatasources().get(dataSource).get() > 0) {
        Thread.sleep(50);
    }
    // wait historical data to be updated
    long startMillis = System.currentTimeMillis();
    long coordinatorRunPeriodMillis = druidCoordinatorConfig.getCoordinatorPeriod().getMillis();
    while (System.currentTimeMillis() - startMillis < coordinatorRunPeriodMillis) {
        Thread.sleep(100);
    }
    Map<String, CountingMap<String>> replicationStatus = coordinator.getReplicationStatus();
    Assert.assertNotNull(replicationStatus);
    Assert.assertEquals(1, replicationStatus.entrySet().size());
    CountingMap<String> dataSourceMap = replicationStatus.get(tier);
    Assert.assertNotNull(dataSourceMap);
    Assert.assertEquals(1, dataSourceMap.size());
    Assert.assertNotNull(dataSourceMap.get(dataSource));
    // The load rules asks for 2 replicas, therefore 1 replica should still be pending
    while (dataSourceMap.get(dataSource).get() != 1L) {
        Thread.sleep(50);
    }
    coordinator.stop();
    leaderUnannouncerLatch.await();
    Assert.assertFalse(coordinator.isLeader());
    Assert.assertNull(coordinator.getCurrentLeader());
    EasyMock.verify(serverInventoryView);
    EasyMock.verify(metadataRuleManager);
}
Also used : ImmutableDruidDataSource(io.druid.client.ImmutableDruidDataSource) PathChildrenCacheListener(org.apache.curator.framework.recipes.cache.PathChildrenCacheListener) PathChildrenCacheEvent(org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent) ImmutableDruidServer(io.druid.client.ImmutableDruidServer) DruidServer(io.druid.client.DruidServer) CountDownLatch(java.util.concurrent.CountDownLatch) ImmutableDruidDataSource(io.druid.client.ImmutableDruidDataSource) DruidDataSource(io.druid.client.DruidDataSource) DataSegment(io.druid.timeline.DataSegment) CountingMap(io.druid.collections.CountingMap) CuratorFramework(org.apache.curator.framework.CuratorFramework) ForeverLoadRule(io.druid.server.coordinator.rules.ForeverLoadRule) ForeverLoadRule(io.druid.server.coordinator.rules.ForeverLoadRule) Rule(io.druid.server.coordinator.rules.Rule) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) CountingMap(io.druid.collections.CountingMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 10 with DruidDataSource

use of io.druid.client.DruidDataSource in project druid by druid-io.

the class DatasourcesResourceTest method setUp.

@Before
public void setUp() {
    request = EasyMock.createStrictMock(HttpServletRequest.class);
    inventoryView = EasyMock.createStrictMock(CoordinatorServerView.class);
    server = EasyMock.createStrictMock(DruidServer.class);
    dataSegmentList = new ArrayList<>();
    dataSegmentList.add(new DataSegment("datasource1", new Interval("2010-01-01/P1D"), null, null, null, null, null, 0x9, 10));
    dataSegmentList.add(new DataSegment("datasource1", new Interval("2010-01-22/P1D"), null, null, null, null, null, 0x9, 20));
    dataSegmentList.add(new DataSegment("datasource2", new Interval("2010-01-01/P1D"), null, null, null, null, null, 0x9, 30));
    listDataSources = new ArrayList<>();
    listDataSources.add(new DruidDataSource("datasource1", new HashMap()).addSegment("part1", dataSegmentList.get(0)));
    listDataSources.add(new DruidDataSource("datasource2", new HashMap()).addSegment("part1", dataSegmentList.get(1)));
}
Also used : HttpServletRequest(javax.servlet.http.HttpServletRequest) HashMap(java.util.HashMap) DruidServer(io.druid.client.DruidServer) CoordinatorServerView(io.druid.client.CoordinatorServerView) DataSegment(io.druid.timeline.DataSegment) DruidDataSource(io.druid.client.DruidDataSource) Interval(org.joda.time.Interval) Before(org.junit.Before)

Aggregations

DruidDataSource (io.druid.client.DruidDataSource)26 DataSegment (io.druid.timeline.DataSegment)19 Map (java.util.Map)9 GET (javax.ws.rs.GET)9 Produces (javax.ws.rs.Produces)9 Interval (org.joda.time.Interval)9 DruidServer (io.druid.client.DruidServer)8 Path (javax.ws.rs.Path)8 HashMap (java.util.HashMap)7 Set (java.util.Set)6 Response (javax.ws.rs.core.Response)6 Test (org.junit.Test)6 ResourceFilters (com.sun.jersey.spi.container.ResourceFilters)5 List (java.util.List)5 ImmutableMap (com.google.common.collect.ImmutableMap)4 AuthConfig (io.druid.server.security.AuthConfig)4 AuthorizationInfo (io.druid.server.security.AuthorizationInfo)4 ImmutableDruidDataSource (io.druid.client.ImmutableDruidDataSource)3 ImmutableDruidServer (io.druid.client.ImmutableDruidServer)3 Access (io.druid.server.security.Access)3