use of com.helospark.tactview.core.timeline.TimelineRenderResult.RegularRectangle in project tactview by helospark.
the class FrameExtender method expandFrame.
public ClipImage expandFrame(FrameExtendRequest request) {
ReadOnlyClipImage frameResult = request.getFrameResult();
TimelinePosition timelinePosition = request.getTimelinePosition();
VisualTimelineClip clip = request.getClip();
int previewHeight = request.getPreviewHeight();
int previewWidth = request.getPreviewWidth();
int anchorOffsetX = clip.getHorizontalAlignment(timelinePosition).apply(frameResult.getWidth(), previewWidth);
int anchorOffsetY = clip.getVerticalAlignment(timelinePosition).apply(frameResult.getHeight(), previewHeight);
double scale = request.getScale();
GetPositionParameters getPositionParameters = new GetPositionParameters(timelinePosition, scale, previewWidth, previewHeight);
int requestedXPosition = anchorOffsetX + clip.getXPosition(getPositionParameters);
int requestedYPosition = anchorOffsetY + clip.getYPosition(getPositionParameters);
request.outBoundPositions.put(clip.getId(), new RegularRectangle(requestedXPosition, requestedYPosition, frameResult.getWidth(), frameResult.getHeight()));
return expandAndTranslate(frameResult, previewWidth, previewHeight, requestedXPosition, requestedYPosition);
}
use of com.helospark.tactview.core.timeline.TimelineRenderResult.RegularRectangle in project tactview by helospark.
the class InputModeRepository method createMouseHandler.
private EventHandler<? super MouseEvent> createMouseHandler(Consumer<StrategyMouseInput> function, Consumer<GeneralCanvasOperationsMouseRequest> fallbackHandler) {
return e -> {
JavaDisplayableAudioVideoFragment cacheCurrentImage = displayUpdaterService.getCacheCurrentImage();
double unmodifiedX = e.getX() - canvasStateHolder.getTranslateX();
double unmodifiedY = e.getY() - canvasStateHolder.getTranslateY();
if (inputModeInput != null) {
double x = (sizeFunctionImplementation.scalePreviewDataUsingSizeFunction(unmodifiedX, inputModeInput.sizeFunction, projectRepository.getPreviewWidth()));
double y = (sizeFunctionImplementation.scalePreviewDataUsingSizeFunction(unmodifiedY, inputModeInput.sizeFunction, projectRepository.getPreviewHeight()));
StrategyMouseInput strategyInput = StrategyMouseInput.builder().withx(x).withy(y).withMouseEvent(e).withUnscaledX(unmodifiedX).withUnscaledY(unmodifiedY).withCanvasImage(() -> {
return playbackController.getVideoFrameAt(timelineManager.getCurrentPosition(), Optional.empty()).getImage();
}).withCurrentlyPressedKeyRepository(currentlyPressedKeyRepository).build();
function.accept(strategyInput);
if (inputModeInput.currentStrategy.getResultType() == ResultType.PARTIAL) {
handleStrategyHasResult();
}
if (inputModeInput.currentStrategy.getResultType() == ResultType.DONE) {
handleStrategyHasResult();
reset();
}
updateCanvas(e);
} else if (cacheCurrentImage != null) {
Map<String, RegularRectangle> filteredRectangles = cacheCurrentImage.getClipRectangle().entrySet().stream().filter(entry -> selectedNodeRepository.getSelectedClipIds().contains(entry.getKey())).collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue()));
GeneralCanvasOperationsMouseRequest fallbackRequest = GeneralCanvasOperationsMouseRequest.builder().withCanvas(canvas).withUnscaledX(unmodifiedX).withUnscaledY(unmodifiedY).withCanvasRelativeX(e.getX()).withCanvasRelativeY(e.getY()).withx(unmodifiedX / projectRepository.getPreviewWidth()).withy(unmodifiedY / projectRepository.getPreviewHeight()).withMouseEvent(e).withRectangles(filteredRectangles).build();
fallbackHandler.accept(fallbackRequest);
}
};
}
use of com.helospark.tactview.core.timeline.TimelineRenderResult.RegularRectangle in project tactview by helospark.
the class GeneralCanvasOperationStrategy method findDragData.
private DragData findDragData(GeneralCanvasOperationsMouseRequest input) {
String draggedClip = null;
DragPointType dragPointType = null;
for (var entry : input.rectangles.entrySet()) {
RegularRectangle rectangle = entry.getValue();
if (isPointClose(input.unscaledX, input.unscaledY, rectangle.getX(), rectangle.getY())) {
draggedClip = entry.getKey();
dragPointType = DragPointType.TOP_LEFT;
break;
}
if (isPointClose(input.unscaledX, input.unscaledY, rectangle.getX() + rectangle.getWidth(), rectangle.getY())) {
draggedClip = entry.getKey();
dragPointType = DragPointType.TOP_RIGHT;
break;
}
if (isPointClose(input.unscaledX, input.unscaledY, rectangle.getX(), rectangle.getY() + rectangle.getHeight())) {
draggedClip = entry.getKey();
dragPointType = DragPointType.BOTTOM_LEFT;
break;
}
if (isPointClose(input.unscaledX, input.unscaledY, rectangle.getX() + rectangle.getWidth(), rectangle.getY() + rectangle.getHeight())) {
draggedClip = entry.getKey();
dragPointType = DragPointType.BOTTOM_RIGHT;
break;
}
if (Math.abs(input.unscaledX - rectangle.getX()) < CLOSE_THRESHOLD && isYWithinRectangleRange(input.unscaledY, rectangle)) {
draggedClip = entry.getKey();
dragPointType = DragPointType.LEFT;
break;
}
if (Math.abs(input.unscaledX - (rectangle.getX() + rectangle.getWidth())) < CLOSE_THRESHOLD && isYWithinRectangleRange(input.unscaledY, rectangle)) {
draggedClip = entry.getKey();
dragPointType = DragPointType.RIGHT;
break;
}
if (Math.abs(input.unscaledY - (rectangle.getY() + rectangle.getHeight())) < CLOSE_THRESHOLD && isXWithinRectangleRange(input.unscaledX, rectangle)) {
draggedClip = entry.getKey();
dragPointType = DragPointType.BOTTOM;
break;
}
if (Math.abs(input.unscaledY - rectangle.getY()) < CLOSE_THRESHOLD && isXWithinRectangleRange(input.unscaledX, rectangle)) {
draggedClip = entry.getKey();
dragPointType = DragPointType.TOP;
break;
}
if (isPointInRectangle(input.unscaledX, input.unscaledY, rectangle)) {
draggedClip = entry.getKey();
dragPointType = DragPointType.CENTER;
break;
}
}
if (draggedClip != null && dragPointType != null) {
ValueProviderDescriptor translateElement = effectParametersRepository.findDescriptorForLabelAndClipId(draggedClip, "translate").get();
Point originalPosition = ((PointProvider) translateElement.getKeyframeableEffect()).getValueAt(uiTimelineManager.getCurrentPosition());
Point lastScale = new Point(1.0, 1.0);
ScaleEffect scaleEffect = findOptionalScale(draggedClip);
if (scaleEffect != null) {
double xScale = (double) effectParametersRepository.findDescriptorForLabelAndClipId(scaleEffect.getId(), "width scale").get().getKeyframeableEffect().getValueAt(uiTimelineManager.getCurrentPosition());
double yScale = (double) effectParametersRepository.findDescriptorForLabelAndClipId(scaleEffect.getId(), "height scale").get().getKeyframeableEffect().getValueAt(uiTimelineManager.getCurrentPosition());
lastScale = new Point(xScale, yScale);
}
return new DragData(draggedClip, dragPointType, originalPosition, lastScale, input.rectangles.get(draggedClip));
} else {
return null;
}
}
use of com.helospark.tactview.core.timeline.TimelineRenderResult.RegularRectangle in project tactview by helospark.
the class TimelineManagerRenderService method getFrame.
public TimelineRenderResult getFrame(TimelineManagerFramesRequest request) {
List<TimelineClip> allClips = timelineManager.channels.stream().map(channel -> channel.getDataAt(request.getPosition())).flatMap(Optional::stream).collect(Collectors.toList());
Map<String, TimelineClip> clipsToRender = allClips.stream().collect(Collectors.toMap(a -> a.getId(), a -> a));
List<String> renderOrder = allClips.stream().filter(a -> a.isEnabled(request.getPosition())).map(a -> a.getId()).collect(Collectors.toList());
List<TreeNode> tree = buildRenderTree(clipsToRender, request.getPosition());
List<List<TimelineClip>> layers = new ArrayList<>();
recursiveLayering(tree, 0, layers);
Map<String, RenderFrameData> clipsToFrames = new ConcurrentHashMap<>();
Map<String, AudioFrameResult> audioToFrames = new ConcurrentHashMap<>();
Map<String, RegularRectangle> clipToExpandedPosition = new ConcurrentHashMap<>();
for (int i = 0; i < layers.size(); ++i) {
List<CompletableFuture<Void>> futures = new ArrayList<>();
for (var clip : layers.get(i)) {
if (clip instanceof VisualTimelineClip && request.isNeedVideo()) {
// TODO: rest later
VisualTimelineClip visualClip = (VisualTimelineClip) clip;
futures.add(CompletableFuture.supplyAsync(() -> {
Map<String, ReadOnlyClipImage> requiredClips = visualClip.getClipDependency(request.getPosition()).stream().filter(a -> clipsToFrames.containsKey(a)).map(a -> clipsToFrames.get(a)).collect(Collectors.toMap(a -> a.id, a -> a.clipFrameResult));
Map<String, ReadOnlyClipImage> channelCopiedClips = visualClip.getChannelDependency(request.getPosition()).stream().flatMap(channelId -> timelineManagerAccessor.findChannelWithId(channelId).stream()).flatMap(channel -> channel.getDataAt(request.getPosition()).stream()).filter(a -> clipsToFrames.containsKey(a.getId())).map(a -> clipsToFrames.get(a.getId())).collect(Collectors.toMap(a -> a.channelId, a -> a.clipFrameResult, (a, b) -> a, HashMap::new));
ReadOnlyClipImage adjustmentImage = null;
if (clip instanceof AdjustmentLayerProceduralClip) {
Map<String, RenderFrameData> framesBelow = new TreeMap<>();
int startChannel = timelineManagerAccessor.findChannelIndexForClipId(visualClip.getId()).get() + 1;
for (int k = startChannel; k < timelineManager.channels.size(); ++k) {
Optional<TimelineClip> clipAtChannel = timelineManager.channels.get(k).getDataAt(request.getPosition());
if (clipAtChannel.isPresent()) {
String clipId = clipAtChannel.get().getId();
framesBelow.put(clipId, clipsToFrames.get(clipId));
}
}
adjustmentImage = renderBelowLayers(request, renderOrder, framesBelow);
channelCopiedClips.put(AdjustmentLayerProceduralClip.LAYER_ID, adjustmentImage);
}
GetFrameRequest frameRequest = GetFrameRequest.builder().withScale(request.getScale()).withPosition(request.getPosition()).withExpectedWidth(request.getPreviewWidth()).withExpectedHeight(request.getPreviewHeight()).withApplyEffects(request.isEffectsEnabled()).withRequestedClips(requiredClips).withRequestedChannelClips(channelCopiedClips).withLowResolutionPreview(request.isLowResolutionPreview()).build();
ReadOnlyClipImage frameResult = visualClip.getFrame(frameRequest);
ReadOnlyClipImage expandedFrame = expandFrame(request, visualClip, frameResult, clipToExpandedPosition);
BlendModeStrategy blendMode = visualClip.getBlendModeAt(request.getPosition());
double alpha = visualClip.getAlpha(request.getPosition());
GlobalMemoryManagerAccessor.memoryManager.returnBuffer(frameResult.getBuffer());
if (adjustmentImage != null) {
GlobalMemoryManagerAccessor.memoryManager.returnBuffer(adjustmentImage.getBuffer());
}
String channelId = timelineManagerAccessor.findChannelForClipId(visualClip.getId()).get().getId();
return new RenderFrameData(visualClip.getId(), alpha, blendMode, expandedFrame, clip.getEffectsAtGlobalPosition(request.getPosition(), AbstractVideoTransitionEffect.class), channelId);
}, executorService).thenAccept(a -> {
clipsToFrames.put(visualClip.getId(), a);
}).exceptionally(e -> {
logger.error("Unable to render", e);
return null;
}));
} else if (clip instanceof AudibleTimelineClip && request.isNeedSound()) {
AudibleTimelineClip audibleClip = (AudibleTimelineClip) clip;
futures.add(CompletableFuture.supplyAsync(() -> {
int sampleRateToUse = request.getAudioSampleRate().orElse(projectRepository.getSampleRate());
int bytesPerSampleToUse = request.getAudioBytesPerSample().orElse(projectRepository.getBytesPerSample());
int numberOfChannels = request.getNumberOfChannels().orElse(projectRepository.getNumberOfChannels());
TimelineLength defaultLength = new TimelineLength(projectRepository.getFrameTime());
TimelineLength length = request.getAudioLength().orElse(defaultLength);
AudioRequest audioRequest = AudioRequest.builder().withApplyEffects(request.isEffectsEnabled()).withPosition(request.getPosition()).withLength(length).withSampleRate(sampleRateToUse).withBytesPerSample(bytesPerSampleToUse).withNumberOfChannels(numberOfChannels).build();
return audibleClip.requestAudioFrame(audioRequest);
}, executorService).exceptionally(e -> {
logger.error("Unable to get audio", e);
return null;
}).thenAccept(a -> {
if (a == null) {
logger.error("Unable to get audio");
} else {
audioToFrames.put(audibleClip.getId(), a);
}
}));
}
}
CompletableFuture.allOf(futures.toArray(new CompletableFuture[futures.size()])).join();
}
ReadOnlyClipImage finalImage = request.isNeedVideo() ? renderVideo(request, renderOrder, clipsToFrames) : null;
AudioFrameResult audioBuffer = renderAudio(renderOrder, audioToFrames, request);
clipsToFrames.values().stream().forEach(a -> GlobalMemoryManagerAccessor.memoryManager.returnBuffer(a.clipFrameResult.getBuffer()));
audioToFrames.values().stream().flatMap(a -> a.getChannels().stream()).forEach(a -> GlobalMemoryManagerAccessor.memoryManager.returnBuffer(a));
ReadOnlyClipImage finalResult = executeGlobalEffectsOn(finalImage);
return new TimelineRenderResult(new AudioVideoFragment(finalResult, audioBuffer), new HashMap<>(clipToExpandedPosition));
}
use of com.helospark.tactview.core.timeline.TimelineRenderResult.RegularRectangle in project tactview by helospark.
the class DisplayUpdaterService method drawSelectionRectangle.
private void drawSelectionRectangle(JavaDisplayableAudioVideoFragment actualAudioVideoFragment, GraphicsContext gc) {
if (actualAudioVideoFragment != null) {
boolean foundSelection = false;
for (var rectangle : actualAudioVideoFragment.getClipRectangle().entrySet()) {
if (selectedNodeRepository.getSelectedClipIds().contains(rectangle.getKey())) {
RegularRectangle rect = rectangle.getValue();
drawRectangleWithDashedLine(gc, rect);
foundSelection = true;
}
}
hasSelectedElement = foundSelection;
}
}
Aggregations