use of io.druid.timeline.DataSegment in project druid by druid-io.
the class BatchDataSegmentAnnouncer method announceSegments.
@Override
public void announceSegments(Iterable<DataSegment> segments) throws IOException {
Iterable<DataSegment> toAnnounce = Iterables.transform(segments, segmentTransformer);
SegmentZNode segmentZNode = new SegmentZNode(makeServedSegmentPath());
Set<DataSegment> batch = Sets.newHashSet();
int byteSize = 0;
int count = 0;
for (DataSegment segment : toAnnounce) {
int newBytesLen = jsonMapper.writeValueAsBytes(segment).length;
if (newBytesLen > config.getMaxBytesPerNode()) {
throw new ISE("byte size %,d exceeds %,d", newBytesLen, config.getMaxBytesPerNode());
}
if (count >= config.getSegmentsPerNode() || byteSize + newBytesLen > config.getMaxBytesPerNode()) {
segmentZNode.addSegments(batch);
announcer.announce(segmentZNode.getPath(), segmentZNode.getBytes());
segmentZNode = new SegmentZNode(makeServedSegmentPath());
batch = Sets.newHashSet();
count = 0;
byteSize = 0;
}
log.info("Announcing segment[%s] at path[%s]", segment.getIdentifier(), segmentZNode.getPath());
segmentLookup.put(segment, segmentZNode);
batch.add(segment);
count++;
byteSize += newBytesLen;
}
segmentZNode.addSegments(batch);
announcer.announce(segmentZNode.getPath(), segmentZNode.getBytes());
}
use of io.druid.timeline.DataSegment in project druid by druid-io.
the class ServerManager method loadSegment.
/**
* Load a single segment.
*
* @param segment segment to load
*
* @return true if the segment was newly loaded, false if it was already loaded
*
* @throws SegmentLoadingException if the segment cannot be loaded
*/
public boolean loadSegment(final DataSegment segment) throws SegmentLoadingException {
final Segment adapter;
try {
adapter = segmentLoader.getSegment(segment);
} catch (SegmentLoadingException e) {
try {
segmentLoader.cleanup(segment);
} catch (SegmentLoadingException e1) {
// ignore
}
throw e;
}
if (adapter == null) {
throw new SegmentLoadingException("Null adapter from loadSpec[%s]", segment.getLoadSpec());
}
synchronized (lock) {
String dataSource = segment.getDataSource();
VersionedIntervalTimeline<String, ReferenceCountingSegment> loadedIntervals = dataSources.get(dataSource);
if (loadedIntervals == null) {
loadedIntervals = new VersionedIntervalTimeline<>(Ordering.natural());
dataSources.put(dataSource, loadedIntervals);
}
PartitionHolder<ReferenceCountingSegment> entry = loadedIntervals.findEntry(segment.getInterval(), segment.getVersion());
if ((entry != null) && (entry.getChunk(segment.getShardSpec().getPartitionNum()) != null)) {
log.warn("Told to load a adapter for a segment[%s] that already exists", segment.getIdentifier());
return false;
}
loadedIntervals.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(new ReferenceCountingSegment(adapter)));
synchronized (dataSourceSizes) {
dataSourceSizes.add(dataSource, segment.getSize());
}
synchronized (dataSourceCounts) {
dataSourceCounts.add(dataSource, 1L);
}
return true;
}
}
use of io.druid.timeline.DataSegment in project druid by druid-io.
the class ZkCoordinator method addSegments.
private void addSegments(Collection<DataSegment> segments, final DataSegmentChangeCallback callback) {
ExecutorService loadingExecutor = null;
try (final BackgroundSegmentAnnouncer backgroundSegmentAnnouncer = new BackgroundSegmentAnnouncer(announcer, exec, config.getAnnounceIntervalMillis())) {
backgroundSegmentAnnouncer.startAnnouncing();
loadingExecutor = Execs.multiThreaded(config.getNumBootstrapThreads(), "ZkCoordinator-loading-%s");
final int numSegments = segments.size();
final CountDownLatch latch = new CountDownLatch(numSegments);
final AtomicInteger counter = new AtomicInteger(0);
final CopyOnWriteArrayList<DataSegment> failedSegments = new CopyOnWriteArrayList<>();
for (final DataSegment segment : segments) {
loadingExecutor.submit(new Runnable() {
@Override
public void run() {
try {
log.info("Loading segment[%d/%d][%s]", counter.getAndIncrement(), numSegments, segment.getIdentifier());
loadSegment(segment, callback);
if (!announcer.isAnnounced(segment)) {
try {
backgroundSegmentAnnouncer.announceSegment(segment);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new SegmentLoadingException(e, "Loading Interrupted");
}
}
} catch (SegmentLoadingException e) {
log.error(e, "[%s] failed to load", segment.getIdentifier());
failedSegments.add(segment);
} finally {
latch.countDown();
}
}
});
}
try {
latch.await();
if (failedSegments.size() > 0) {
log.makeAlert("%,d errors seen while loading segments", failedSegments.size()).addData("failedSegments", failedSegments);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
log.makeAlert(e, "LoadingInterrupted");
}
backgroundSegmentAnnouncer.finishAnnouncing();
} catch (SegmentLoadingException e) {
log.makeAlert(e, "Failed to load segments -- likely problem with announcing.").addData("numSegments", segments.size()).emit();
} finally {
callback.execute();
if (loadingExecutor != null) {
loadingExecutor.shutdownNow();
}
}
}
use of io.druid.timeline.DataSegment in project druid by druid-io.
the class DatasourcesResource method getSegment.
private Pair<DataSegment, Set<String>> getSegment(String segmentId) {
DataSegment theSegment = null;
Set<String> servers = Sets.newHashSet();
for (DruidServer druidServer : serverInventoryView.getInventory()) {
DataSegment currSegment = druidServer.getSegments().get(segmentId);
if (currSegment != null) {
theSegment = currSegment;
servers.add(druidServer.getHost());
}
}
if (theSegment == null) {
return null;
}
return new Pair<>(theSegment, servers);
}
use of io.druid.timeline.DataSegment in project druid by druid-io.
the class DatasourcesResource method getSegmentDataSourceIntervals.
@GET
@Path("/{dataSourceName}/intervals")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getSegmentDataSourceIntervals(@PathParam("dataSourceName") String dataSourceName, @QueryParam("simple") String simple, @QueryParam("full") String full) {
final DruidDataSource dataSource = getDataSource(dataSourceName);
if (dataSource == null) {
return Response.noContent().build();
}
final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd());
if (full != null) {
final Map<Interval, Map<String, Object>> retVal = Maps.newTreeMap(comparator);
for (DataSegment dataSegment : dataSource.getSegments()) {
Map<String, Object> segments = retVal.get(dataSegment.getInterval());
if (segments == null) {
segments = Maps.newHashMap();
retVal.put(dataSegment.getInterval(), segments);
}
Pair<DataSegment, Set<String>> val = getSegment(dataSegment.getIdentifier());
segments.put(dataSegment.getIdentifier(), ImmutableMap.of("metadata", val.lhs, "servers", val.rhs));
}
return Response.ok(retVal).build();
}
if (simple != null) {
final Map<Interval, Map<String, Object>> retVal = Maps.newTreeMap(comparator);
for (DataSegment dataSegment : dataSource.getSegments()) {
Map<String, Object> properties = retVal.get(dataSegment.getInterval());
if (properties == null) {
properties = Maps.newHashMap();
properties.put("size", dataSegment.getSize());
properties.put("count", 1);
retVal.put(dataSegment.getInterval(), properties);
} else {
properties.put("size", MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize());
properties.put("count", MapUtils.getInt(properties, "count", 0) + 1);
}
}
return Response.ok(retVal).build();
}
final Set<Interval> intervals = Sets.newTreeSet(comparator);
for (DataSegment dataSegment : dataSource.getSegments()) {
intervals.add(dataSegment.getInterval());
}
return Response.ok(intervals).build();
}
Aggregations