use of com.helospark.tactview.core.timeline.GetFrameRequest in project tactview by helospark.
the class GhostingEffect method createFrame.
@Override
public ReadOnlyClipImage createFrame(StatelessEffectRequest request) {
VisualTimelineClip clip = request.getCurrentTimelineClip();
ReadOnlyClipImage currentFrame = request.getCurrentFrame();
ClipImage result = ClipImage.sameSizeAs(currentFrame);
double endAlpha = alphaProvider.getValueAt(request.getEffectPosition());
int numberOfGhosts = numberOfGhostProvider.getValueAt(request.getEffectPosition());
double timeStep = ghostTimeProvider.getValueAt(request.getEffectPosition());
BigDecimal timeBetweenGhosts = BigDecimal.valueOf(timeStep);
double alpha = endAlpha;
int startIndex = numberOfGhosts - 2;
for (int i = startIndex; i >= 0; --i) {
BigDecimal absoluteEffectPosition = request.getClipPosition().getSeconds().subtract(timeBetweenGhosts.multiply(BigDecimal.valueOf(i + 1)));
if (absoluteEffectPosition.compareTo(BigDecimal.ZERO) < 0) {
// TODO: has to be part of clip
break;
}
GetFrameRequest frameRequest = GetFrameRequest.builder().withApplyEffects(true).withApplyEffectsLessThanEffectChannel(Optional.of(request.getEffectChannel())).withExpectedWidth(currentFrame.getWidth()).withExpectedHeight(currentFrame.getHeight()).withPosition(new TimelinePosition(absoluteEffectPosition)).withScale(request.getScale()).build();
ReadOnlyClipImage frame = clip.getFrame(frameRequest);
if (i == startIndex) {
result.copyFrom(frame);
} else {
mergeWithAlpha(result, frame, alpha);
}
GlobalMemoryManagerAccessor.memoryManager.returnBuffer(frame.getBuffer());
}
mergeWithAlpha(result, currentFrame, endAlpha);
return result;
}
use of com.helospark.tactview.core.timeline.GetFrameRequest in project tactview by helospark.
the class TimelineImagePatternService method createTimelinePattern.
public Image createTimelinePattern(VisualTimelineClip videoClip, int expectedWidth, double visibleStartPosition, double visibleEndPosition) {
VisualMediaMetadata metadata = videoClip.getMediaMetadata();
int scaledFrameWidth = (int) ((double) metadata.getWidth() / metadata.getHeight() * PREVIEW_HEIGHT);
int scaledFrameHeight = PREVIEW_HEIGHT;
int numberOfFrames = (int) Math.ceil((double) (expectedWidth + BLACK_FILM_TAPE_LINE_WIDTH) / scaledFrameWidth);
double timejump = (visibleEndPosition - visibleStartPosition) / numberOfFrames;
BufferedImage result = new BufferedImage(expectedWidth, RECTANGLE_HEIGHT, TYPE_INT_RGB);
Graphics graphics = result.getGraphics();
graphics.setColor(Color.BLACK);
graphics.fillRect(0, 0, expectedWidth, FILM_TAPE_SIZE);
boolean isDynamicallyGenerated = videoClip instanceof ProceduralVisualClip;
int index = 0;
for (double seconds = visibleStartPosition; seconds < visibleEndPosition; seconds += timejump, ++index) {
TimelinePosition position = TimelinePosition.ofSeconds(seconds);
int width = isDynamicallyGenerated ? uiProjectRepository.getPreviewWidth() : videoClip.getMediaMetadata().getWidth();
int height = isDynamicallyGenerated ? uiProjectRepository.getPreviewHeight() : videoClip.getMediaMetadata().getHeight();
GetFrameRequest frameRequest = GetFrameRequest.builder().withApplyEffects(false).withUseApproximatePosition(true).withExpectedWidth(width).withExpectedHeight(height).withRelativePosition(position).withScale(uiProjectRepository.getScaleFactor()).build();
ReadOnlyClipImage frame = videoClip.getFrame(frameRequest);
if (isDynamicallyGenerated) {
FrameExtendRequest extendFrameRequest = FrameExtendRequest.builder().withClip(videoClip).withFrameResult(frame).withPreviewWidth(uiProjectRepository.getPreviewWidth()).withPreviewHeight(uiProjectRepository.getPreviewHeight()).withScale(uiProjectRepository.getScaleFactor()).withTimelinePosition(position.add(videoClip.getInterval().getStartPosition())).build();
ClipImage expandedFrame = frameExtender.expandFrame(extendFrameRequest);
BufferedImage bf = byteBufferToImageConverter.byteBufferToBufferedImage(expandedFrame.getBuffer(), expandedFrame.getWidth(), expandedFrame.getHeight());
java.awt.Image img = bf.getScaledInstance(scaledFrameWidth, scaledFrameHeight, BufferedImage.SCALE_SMOOTH);
GlobalMemoryManagerAccessor.memoryManager.returnBuffer(frame.getBuffer());
GlobalMemoryManagerAccessor.memoryManager.returnBuffer(expandedFrame.getBuffer());
graphics.drawImage(img, index * (scaledFrameWidth + BLACK_FILM_TAPE_LINE_WIDTH) + BLACK_FILM_TAPE_LINE_WIDTH, FILM_TAPE_SIZE, null);
} else {
BufferedImage bf = byteBufferToImageConverter.byteBufferToBufferedImage(frame.getBuffer(), frame.getWidth(), frame.getHeight());
java.awt.Image img = bf.getScaledInstance(scaledFrameWidth, scaledFrameHeight, BufferedImage.SCALE_SMOOTH);
graphics.drawImage(img, index * (scaledFrameWidth + BLACK_FILM_TAPE_LINE_WIDTH) + BLACK_FILM_TAPE_LINE_WIDTH, FILM_TAPE_SIZE, null);
GlobalMemoryManagerAccessor.memoryManager.returnBuffer(frame.getBuffer());
}
}
dragFilmEffect(expectedWidth, graphics);
return byteBufferToJavaFxImageConverter.convertToJavafxImage(result);
}
use of com.helospark.tactview.core.timeline.GetFrameRequest in project tactview by helospark.
the class DependentClipProviderChainItem method drawImage.
private Image drawImage(TimelineClip clip, TimelinePosition position) {
if (clip instanceof VisualTimelineClip) {
GetFrameRequest frameRequest = GetFrameRequest.builder().withApplyEffects(true).withUseApproximatePosition(true).withExpectedWidth(// TODO: aspect ratio
IMAGE_PREVIEW_SIZE).withExpectedHeight(27).withPosition(position).withScale(uiProjectRepository.getScaleFactor() / ((double) uiProjectRepository.getPreviewWidth() / IMAGE_PREVIEW_SIZE)).build();
ReadOnlyClipImage result = ((VisualTimelineClip) clip).getFrame(frameRequest);
return imageConverter.convertToJavafxImage(result.getBuffer(), result.getWidth(), result.getHeight());
} else {
throw new IllegalStateException("Other formats not supported");
}
}
use of com.helospark.tactview.core.timeline.GetFrameRequest in project tactview by helospark.
the class VisualTimelineClipElement method render.
@Override
public Map<ConnectionIndex, ReadOnlyClipImage> render(Map<ConnectionIndex, ReadOnlyClipImage> images, EffectGraphInputRequest request) {
Map<String, ReadOnlyClipImage> additionalClips = new LinkedHashMap<>();
for (var additionalClipIndex : additionalClipInideces) {
ReadOnlyClipImage providedImage = images.get(additionalClipIndex);
if (providedImage != null) {
additionalClips.put(additionalClipIndex.getId(), providedImage);
}
}
GetFrameRequest getFrameRequest = GetFrameRequest.builder().withApplyEffects(request.applyEffects).withLowResolutionPreview(request.lowResolutionPreview).withExpectedWidth(request.expectedWidth).withExpectedHeight(request.expectedHeight).withPosition(request.position).withRelativePosition(request.relativePosition).withScale(request.scale).withUseApproximatePosition(false).withRequestedClips(additionalClips).build();
ReadOnlyClipImage output = clip.getFrame(getFrameRequest);
return Map.of(outputIndex, output);
}
use of com.helospark.tactview.core.timeline.GetFrameRequest in project tactview by helospark.
the class FrameHoldEffect method createFrame.
@Override
public ReadOnlyClipImage createFrame(StatelessEffectRequest request) {
VisualTimelineClip clip = request.getCurrentTimelineClip();
ReadOnlyClipImage currentFrame = request.getCurrentFrame();
GetFrameRequest frameRequest = GetFrameRequest.builder().withApplyEffects(true).withApplyEffectsLessThanEffectChannel(Optional.of(request.getEffectChannel())).withExpectedWidth(currentFrame.getWidth()).withExpectedHeight(currentFrame.getHeight()).withPosition(getInterval().getStartPosition()).withScale(request.getScale()).build();
ReadOnlyClipImage frame = clip.getFrame(frameRequest);
return frame;
}
Aggregations