use of com.google.common.util.concurrent.AsyncFunction in project thingsboard by thingsboard.
the class BaseAssetService method findAssetsByQuery.
@Override
public ListenableFuture<List<Asset>> findAssetsByQuery(AssetSearchQuery query) {
ListenableFuture<List<EntityRelation>> relations = relationService.findByQuery(query.toEntitySearchQuery());
ListenableFuture<List<Asset>> assets = Futures.transform(relations, (AsyncFunction<List<EntityRelation>, List<Asset>>) relations1 -> {
EntitySearchDirection direction = query.toEntitySearchQuery().getParameters().getDirection();
List<ListenableFuture<Asset>> futures = new ArrayList<>();
for (EntityRelation relation : relations1) {
EntityId entityId = direction == EntitySearchDirection.FROM ? relation.getTo() : relation.getFrom();
if (entityId.getEntityType() == EntityType.ASSET) {
futures.add(findAssetByIdAsync(new AssetId(entityId.getId())));
}
}
return Futures.successfulAsList(futures);
});
assets = Futures.transform(assets, (Function<List<Asset>, List<Asset>>) assetList -> assetList == null ? Collections.emptyList() : assetList.stream().filter(asset -> query.getAssetTypes().contains(asset.getType())).collect(Collectors.toList()));
return assets;
}
use of com.google.common.util.concurrent.AsyncFunction in project druid by druid-io.
the class BatchAppenderatorDriver method pushAndClear.
private SegmentsAndCommitMetadata pushAndClear(Collection<String> sequenceNames, long pushAndClearTimeoutMs) throws InterruptedException, ExecutionException, TimeoutException {
final Set<SegmentIdWithShardSpec> requestedSegmentIdsForSequences = getAppendingSegments(sequenceNames);
final ListenableFuture<SegmentsAndCommitMetadata> future = Futures.transform(pushInBackground(null, requestedSegmentIdsForSequences, false), (AsyncFunction<SegmentsAndCommitMetadata, SegmentsAndCommitMetadata>) this::dropInBackground);
final SegmentsAndCommitMetadata segmentsAndCommitMetadata = pushAndClearTimeoutMs == 0L ? future.get() : future.get(pushAndClearTimeoutMs, TimeUnit.MILLISECONDS);
// Sanity check
final Map<SegmentIdWithShardSpec, DataSegment> pushedSegmentIdToSegmentMap = segmentsAndCommitMetadata.getSegments().stream().collect(Collectors.toMap(SegmentIdWithShardSpec::fromDataSegment, Function.identity()));
if (!pushedSegmentIdToSegmentMap.keySet().equals(requestedSegmentIdsForSequences)) {
throw new ISE("Pushed segments[%s] are different from the requested ones[%s]", pushedSegmentIdToSegmentMap.keySet(), requestedSegmentIdsForSequences);
}
synchronized (segments) {
for (String sequenceName : sequenceNames) {
final SegmentsForSequence segmentsForSequence = segments.get(sequenceName);
if (segmentsForSequence == null) {
throw new ISE("Can't find segmentsForSequence for sequence[%s]", sequenceName);
}
segmentsForSequence.getAllSegmentsOfInterval().forEach(segmentsOfInterval -> {
final SegmentWithState appendingSegment = segmentsOfInterval.getAppendingSegment();
if (appendingSegment != null) {
final DataSegment pushedSegment = pushedSegmentIdToSegmentMap.get(appendingSegment.getSegmentIdentifier());
if (pushedSegment == null) {
throw new ISE("Can't find pushedSegments for segment[%s]", appendingSegment.getSegmentIdentifier());
}
segmentsOfInterval.finishAppendingToCurrentActiveSegment(segmentWithState -> segmentWithState.pushAndDrop(pushedSegment));
}
});
}
}
return segmentsAndCommitMetadata;
}
Aggregations