use of com.helospark.tactview.core.timeline.framemerge.RenderFrameData in project tactview by helospark.
the class TimelineManagerRenderService method getFrame.
public TimelineRenderResult getFrame(TimelineManagerFramesRequest request) {
List<TimelineClip> allClips = timelineManager.channels.stream().map(channel -> channel.getDataAt(request.getPosition())).flatMap(Optional::stream).collect(Collectors.toList());
Map<String, TimelineClip> clipsToRender = allClips.stream().collect(Collectors.toMap(a -> a.getId(), a -> a));
List<String> renderOrder = allClips.stream().filter(a -> a.isEnabled(request.getPosition())).map(a -> a.getId()).collect(Collectors.toList());
List<TreeNode> tree = buildRenderTree(clipsToRender, request.getPosition());
List<List<TimelineClip>> layers = new ArrayList<>();
recursiveLayering(tree, 0, layers);
Map<String, RenderFrameData> clipsToFrames = new ConcurrentHashMap<>();
Map<String, AudioFrameResult> audioToFrames = new ConcurrentHashMap<>();
Map<String, RegularRectangle> clipToExpandedPosition = new ConcurrentHashMap<>();
for (int i = 0; i < layers.size(); ++i) {
List<CompletableFuture<Void>> futures = new ArrayList<>();
for (var clip : layers.get(i)) {
if (clip instanceof VisualTimelineClip && request.isNeedVideo()) {
// TODO: rest later
VisualTimelineClip visualClip = (VisualTimelineClip) clip;
futures.add(CompletableFuture.supplyAsync(() -> {
Map<String, ReadOnlyClipImage> requiredClips = visualClip.getClipDependency(request.getPosition()).stream().filter(a -> clipsToFrames.containsKey(a)).map(a -> clipsToFrames.get(a)).collect(Collectors.toMap(a -> a.id, a -> a.clipFrameResult));
Map<String, ReadOnlyClipImage> channelCopiedClips = visualClip.getChannelDependency(request.getPosition()).stream().flatMap(channelId -> timelineManagerAccessor.findChannelWithId(channelId).stream()).flatMap(channel -> channel.getDataAt(request.getPosition()).stream()).filter(a -> clipsToFrames.containsKey(a.getId())).map(a -> clipsToFrames.get(a.getId())).collect(Collectors.toMap(a -> a.channelId, a -> a.clipFrameResult, (a, b) -> a, HashMap::new));
ReadOnlyClipImage adjustmentImage = null;
if (clip instanceof AdjustmentLayerProceduralClip) {
Map<String, RenderFrameData> framesBelow = new TreeMap<>();
int startChannel = timelineManagerAccessor.findChannelIndexForClipId(visualClip.getId()).get() + 1;
for (int k = startChannel; k < timelineManager.channels.size(); ++k) {
Optional<TimelineClip> clipAtChannel = timelineManager.channels.get(k).getDataAt(request.getPosition());
if (clipAtChannel.isPresent()) {
String clipId = clipAtChannel.get().getId();
framesBelow.put(clipId, clipsToFrames.get(clipId));
}
}
adjustmentImage = renderBelowLayers(request, renderOrder, framesBelow);
channelCopiedClips.put(AdjustmentLayerProceduralClip.LAYER_ID, adjustmentImage);
}
GetFrameRequest frameRequest = GetFrameRequest.builder().withScale(request.getScale()).withPosition(request.getPosition()).withExpectedWidth(request.getPreviewWidth()).withExpectedHeight(request.getPreviewHeight()).withApplyEffects(request.isEffectsEnabled()).withRequestedClips(requiredClips).withRequestedChannelClips(channelCopiedClips).withLowResolutionPreview(request.isLowResolutionPreview()).build();
ReadOnlyClipImage frameResult = visualClip.getFrame(frameRequest);
ReadOnlyClipImage expandedFrame = expandFrame(request, visualClip, frameResult, clipToExpandedPosition);
BlendModeStrategy blendMode = visualClip.getBlendModeAt(request.getPosition());
double alpha = visualClip.getAlpha(request.getPosition());
GlobalMemoryManagerAccessor.memoryManager.returnBuffer(frameResult.getBuffer());
if (adjustmentImage != null) {
GlobalMemoryManagerAccessor.memoryManager.returnBuffer(adjustmentImage.getBuffer());
}
String channelId = timelineManagerAccessor.findChannelForClipId(visualClip.getId()).get().getId();
return new RenderFrameData(visualClip.getId(), alpha, blendMode, expandedFrame, clip.getEffectsAtGlobalPosition(request.getPosition(), AbstractVideoTransitionEffect.class), channelId);
}, executorService).thenAccept(a -> {
clipsToFrames.put(visualClip.getId(), a);
}).exceptionally(e -> {
logger.error("Unable to render", e);
return null;
}));
} else if (clip instanceof AudibleTimelineClip && request.isNeedSound()) {
AudibleTimelineClip audibleClip = (AudibleTimelineClip) clip;
futures.add(CompletableFuture.supplyAsync(() -> {
int sampleRateToUse = request.getAudioSampleRate().orElse(projectRepository.getSampleRate());
int bytesPerSampleToUse = request.getAudioBytesPerSample().orElse(projectRepository.getBytesPerSample());
int numberOfChannels = request.getNumberOfChannels().orElse(projectRepository.getNumberOfChannels());
TimelineLength defaultLength = new TimelineLength(projectRepository.getFrameTime());
TimelineLength length = request.getAudioLength().orElse(defaultLength);
AudioRequest audioRequest = AudioRequest.builder().withApplyEffects(request.isEffectsEnabled()).withPosition(request.getPosition()).withLength(length).withSampleRate(sampleRateToUse).withBytesPerSample(bytesPerSampleToUse).withNumberOfChannels(numberOfChannels).build();
return audibleClip.requestAudioFrame(audioRequest);
}, executorService).exceptionally(e -> {
logger.error("Unable to get audio", e);
return null;
}).thenAccept(a -> {
if (a == null) {
logger.error("Unable to get audio");
} else {
audioToFrames.put(audibleClip.getId(), a);
}
}));
}
}
CompletableFuture.allOf(futures.toArray(new CompletableFuture[futures.size()])).join();
}
ReadOnlyClipImage finalImage = request.isNeedVideo() ? renderVideo(request, renderOrder, clipsToFrames) : null;
AudioFrameResult audioBuffer = renderAudio(renderOrder, audioToFrames, request);
clipsToFrames.values().stream().forEach(a -> GlobalMemoryManagerAccessor.memoryManager.returnBuffer(a.clipFrameResult.getBuffer()));
audioToFrames.values().stream().flatMap(a -> a.getChannels().stream()).forEach(a -> GlobalMemoryManagerAccessor.memoryManager.returnBuffer(a));
ReadOnlyClipImage finalResult = executeGlobalEffectsOn(finalImage);
return new TimelineRenderResult(new AudioVideoFragment(finalResult, audioBuffer), new HashMap<>(clipToExpandedPosition));
}
Aggregations