Search in sources :

Example 1 with Object2LongOpenHashMap

use of it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap in project druid by druid-io.

the class DataSourcesResourceTest method testGetDatasourceLoadstatusFullAndComputeUsingClusterView.

@Test
public void testGetDatasourceLoadstatusFullAndComputeUsingClusterView() {
    DataSegment datasource1Segment1 = new DataSegment("datasource1", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 10);
    DataSegment datasource1Segment2 = new DataSegment("datasource1", Intervals.of("2010-01-22/P1D"), "", null, null, null, null, 0x9, 20);
    List<DataSegment> segments = ImmutableList.of(datasource1Segment1, datasource1Segment2);
    final Map<String, Object2LongMap<String>> underReplicationCountsPerDataSourcePerTier = new HashMap<>();
    Object2LongMap<String> tier1 = new Object2LongOpenHashMap<>();
    tier1.put("datasource1", 0L);
    Object2LongMap<String> tier2 = new Object2LongOpenHashMap<>();
    tier2.put("datasource1", 3L);
    underReplicationCountsPerDataSourcePerTier.put("tier1", tier1);
    underReplicationCountsPerDataSourcePerTier.put("tier2", tier2);
    // Test when datasource fully loaded
    EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
    DruidCoordinator druidCoordinator = EasyMock.createMock(DruidCoordinator.class);
    EasyMock.expect(druidCoordinator.computeUnderReplicationCountsPerDataSourcePerTierForSegmentsUsingClusterView(segments)).andReturn(underReplicationCountsPerDataSourcePerTier).once();
    EasyMock.replay(segmentsMetadataManager, druidCoordinator);
    DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, druidCoordinator);
    Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, "full", "computeUsingClusterView");
    Assert.assertEquals(200, response.getStatus());
    Assert.assertNotNull(response.getEntity());
    Assert.assertEquals(2, ((Map) response.getEntity()).size());
    Assert.assertEquals(1, ((Map) ((Map) response.getEntity()).get("tier1")).size());
    Assert.assertEquals(1, ((Map) ((Map) response.getEntity()).get("tier2")).size());
    Assert.assertEquals(0L, ((Map) ((Map) response.getEntity()).get("tier1")).get("datasource1"));
    Assert.assertEquals(3L, ((Map) ((Map) response.getEntity()).get("tier2")).get("datasource1"));
    EasyMock.verify(segmentsMetadataManager);
}
Also used : Response(javax.ws.rs.core.Response) Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap) Object2LongMap(it.unimi.dsi.fastutil.objects.Object2LongMap) HashMap(java.util.HashMap) Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap) DataSegment(org.apache.druid.timeline.DataSegment) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) Object2LongMap(it.unimi.dsi.fastutil.objects.Object2LongMap) TreeMap(java.util.TreeMap) Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap) DruidCoordinator(org.apache.druid.server.coordinator.DruidCoordinator) Test(org.junit.Test)

Example 2 with Object2LongOpenHashMap

use of it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap in project druid by druid-io.

the class DataSourcesResourceTest method testGetDatasourceLoadstatusFull.

@Test
public void testGetDatasourceLoadstatusFull() {
    DataSegment datasource1Segment1 = new DataSegment("datasource1", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 10);
    DataSegment datasource1Segment2 = new DataSegment("datasource1", Intervals.of("2010-01-22/P1D"), "", null, null, null, null, 0x9, 20);
    List<DataSegment> segments = ImmutableList.of(datasource1Segment1, datasource1Segment2);
    final Map<String, Object2LongMap<String>> underReplicationCountsPerDataSourcePerTier = new HashMap<>();
    Object2LongMap<String> tier1 = new Object2LongOpenHashMap<>();
    tier1.put("datasource1", 0L);
    Object2LongMap<String> tier2 = new Object2LongOpenHashMap<>();
    tier2.put("datasource1", 3L);
    underReplicationCountsPerDataSourcePerTier.put("tier1", tier1);
    underReplicationCountsPerDataSourcePerTier.put("tier2", tier2);
    // Test when datasource fully loaded
    EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())).andReturn(Optional.of(segments)).once();
    DruidCoordinator druidCoordinator = EasyMock.createMock(DruidCoordinator.class);
    EasyMock.expect(druidCoordinator.computeUnderReplicationCountsPerDataSourcePerTierForSegments(segments)).andReturn(underReplicationCountsPerDataSourcePerTier).once();
    EasyMock.replay(segmentsMetadataManager, druidCoordinator);
    DataSourcesResource dataSourcesResource = new DataSourcesResource(inventoryView, segmentsMetadataManager, null, null, null, druidCoordinator);
    Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, "full", null);
    Assert.assertEquals(200, response.getStatus());
    Assert.assertNotNull(response.getEntity());
    Assert.assertEquals(2, ((Map) response.getEntity()).size());
    Assert.assertEquals(1, ((Map) ((Map) response.getEntity()).get("tier1")).size());
    Assert.assertEquals(1, ((Map) ((Map) response.getEntity()).get("tier2")).size());
    Assert.assertEquals(0L, ((Map) ((Map) response.getEntity()).get("tier1")).get("datasource1"));
    Assert.assertEquals(3L, ((Map) ((Map) response.getEntity()).get("tier2")).get("datasource1"));
    EasyMock.verify(segmentsMetadataManager);
}
Also used : Response(javax.ws.rs.core.Response) Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap) Object2LongMap(it.unimi.dsi.fastutil.objects.Object2LongMap) HashMap(java.util.HashMap) Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap) DataSegment(org.apache.druid.timeline.DataSegment) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) Object2LongMap(it.unimi.dsi.fastutil.objects.Object2LongMap) TreeMap(java.util.TreeMap) Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap) DruidCoordinator(org.apache.druid.server.coordinator.DruidCoordinator) Test(org.junit.Test)

Example 3 with Object2LongOpenHashMap

use of it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap in project geode by apache.

the class HeapEvictor method getAllSortedRegionList.

private List<LocalRegion> getAllSortedRegionList() {
    List<LocalRegion> allRegionList = getAllRegionList();
    // Capture the sizes so that they do not change while sorting
    final Object2LongOpenHashMap sizes = new Object2LongOpenHashMap(allRegionList.size());
    for (LocalRegion r : allRegionList) {
        long size = r instanceof BucketRegion ? ((BucketRegion) r).getSizeForEviction() : r.size();
        sizes.put(r, size);
    }
    // Sort with respect to other PR buckets also in case of multiple PRs
    Collections.sort(allRegionList, new Comparator<LocalRegion>() {

        public int compare(LocalRegion r1, LocalRegion r2) {
            long numEntries1 = sizes.get(r1);
            long numEntries2 = sizes.get(r2);
            if (numEntries1 > numEntries2) {
                return -1;
            } else if (numEntries1 < numEntries2) {
                return 1;
            }
            return 0;
        }
    });
    return allRegionList;
}
Also used : Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap)

Example 4 with Object2LongOpenHashMap

use of it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap in project druid by druid-io.

the class HistoricalMetricsMonitor method doMonitor.

@Override
public boolean doMonitor(ServiceEmitter emitter) {
    emitter.emit(new ServiceMetricEvent.Builder().build("segment/max", serverConfig.getMaxSize()));
    final Object2LongOpenHashMap<String> pendingDeleteSizes = new Object2LongOpenHashMap<>();
    for (DataSegment segment : segmentLoadDropMgr.getPendingDeleteSnapshot()) {
        pendingDeleteSizes.addTo(segment.getDataSource(), segment.getSize());
    }
    for (final Object2LongMap.Entry<String> entry : pendingDeleteSizes.object2LongEntrySet()) {
        final String dataSource = entry.getKey();
        final long pendingDeleteSize = entry.getLongValue();
        emitter.emit(new ServiceMetricEvent.Builder().setDimension(DruidMetrics.DATASOURCE, dataSource).setDimension("tier", serverConfig.getTier()).setDimension("priority", String.valueOf(serverConfig.getPriority())).build("segment/pendingDelete", pendingDeleteSize));
    }
    for (Map.Entry<String, Long> entry : segmentManager.getDataSourceSizes().entrySet()) {
        String dataSource = entry.getKey();
        long used = entry.getValue();
        final ServiceMetricEvent.Builder builder = new ServiceMetricEvent.Builder().setDimension(DruidMetrics.DATASOURCE, dataSource).setDimension("tier", serverConfig.getTier()).setDimension("priority", String.valueOf(serverConfig.getPriority()));
        emitter.emit(builder.build("segment/used", used));
        final double usedPercent = serverConfig.getMaxSize() == 0 ? 0 : used / (double) serverConfig.getMaxSize();
        emitter.emit(builder.build("segment/usedPercent", usedPercent));
    }
    for (Map.Entry<String, Long> entry : segmentManager.getDataSourceCounts().entrySet()) {
        String dataSource = entry.getKey();
        long count = entry.getValue();
        final ServiceMetricEvent.Builder builder = new ServiceMetricEvent.Builder().setDimension(DruidMetrics.DATASOURCE, dataSource).setDimension("tier", serverConfig.getTier()).setDimension("priority", String.valueOf(serverConfig.getPriority()));
        emitter.emit(builder.build("segment/count", count));
    }
    return true;
}
Also used : Object2LongMap(it.unimi.dsi.fastutil.objects.Object2LongMap) DataSegment(org.apache.druid.timeline.DataSegment) Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap) ServiceMetricEvent(org.apache.druid.java.util.emitter.service.ServiceMetricEvent) Object2LongMap(it.unimi.dsi.fastutil.objects.Object2LongMap) Map(java.util.Map) Object2LongOpenHashMap(it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap)

Aggregations

Object2LongOpenHashMap (it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap)4 Object2LongMap (it.unimi.dsi.fastutil.objects.Object2LongMap)3 Map (java.util.Map)3 DataSegment (org.apache.druid.timeline.DataSegment)3 ImmutableMap (com.google.common.collect.ImmutableMap)2 HashMap (java.util.HashMap)2 TreeMap (java.util.TreeMap)2 Response (javax.ws.rs.core.Response)2 DruidCoordinator (org.apache.druid.server.coordinator.DruidCoordinator)2 Test (org.junit.Test)2 ServiceMetricEvent (org.apache.druid.java.util.emitter.service.ServiceMetricEvent)1