use of com.helospark.tactview.core.decoder.VisualMediaMetadata in project tactview by helospark.
the class VideoClip method requestFrame.
@Override
public ReadOnlyClipImage requestFrame(RequestFrameParameter frameRequest) {
VisualMediaMetadata metadataToUse;
VisualMediaSource mediaSourceToUse;
if (frameRequest.isLowResolutionPreview() && lowResolutionProxySource.isPresent()) {
metadataToUse = lowResolutionProxySource.get().mediaMetadata;
mediaSourceToUse = lowResolutionProxySource.get().source;
} else {
metadataToUse = mediaMetadata;
mediaSourceToUse = backingSource;
}
VideoMediaDataRequest request = VideoMediaDataRequest.builder().withFilePath(mediaSourceToUse.backingFile).withHeight(frameRequest.getHeight()).withWidth(frameRequest.getWidth()).withMetadata(metadataToUse).withStart(frameRequest.getPosition()).withUseApproximatePosition(frameRequest.useApproximatePosition()).build();
ByteBuffer frame = mediaSourceToUse.decoder.readFrames(request).getFrame();
ClipImage result = new ClipImage(frame, frameRequest.getWidth(), frameRequest.getHeight());
if (isRotationEnabledAt(frameRequest.getPosition()) && !MathUtil.fuzzyEquals(getRotationFromMetadata(), 0.0)) {
RotateServiceRequest serviceRequest = RotateServiceRequest.builder().withAngle(getRotationFromMetadata()).withImage(result).withCenterX(0.5).withCenterY(0.5).build();
ClipImage rotatedImage = rotateService.rotateExactSize(serviceRequest);
GlobalMemoryManagerAccessor.memoryManager.returnBuffer(result.getBuffer());
result = rotatedImage;
}
return result;
}
use of com.helospark.tactview.core.decoder.VisualMediaMetadata in project tactview by helospark.
the class TimelineImagePatternService method createTimelinePattern.
public Image createTimelinePattern(VisualTimelineClip videoClip, int expectedWidth, double visibleStartPosition, double visibleEndPosition) {
VisualMediaMetadata metadata = videoClip.getMediaMetadata();
int scaledFrameWidth = (int) ((double) metadata.getWidth() / metadata.getHeight() * PREVIEW_HEIGHT);
int scaledFrameHeight = PREVIEW_HEIGHT;
int numberOfFrames = (int) Math.ceil((double) (expectedWidth + BLACK_FILM_TAPE_LINE_WIDTH) / scaledFrameWidth);
double timejump = (visibleEndPosition - visibleStartPosition) / numberOfFrames;
BufferedImage result = new BufferedImage(expectedWidth, RECTANGLE_HEIGHT, TYPE_INT_RGB);
Graphics graphics = result.getGraphics();
graphics.setColor(Color.BLACK);
graphics.fillRect(0, 0, expectedWidth, FILM_TAPE_SIZE);
boolean isDynamicallyGenerated = videoClip instanceof ProceduralVisualClip;
int index = 0;
for (double seconds = visibleStartPosition; seconds < visibleEndPosition; seconds += timejump, ++index) {
TimelinePosition position = TimelinePosition.ofSeconds(seconds);
int width = isDynamicallyGenerated ? uiProjectRepository.getPreviewWidth() : videoClip.getMediaMetadata().getWidth();
int height = isDynamicallyGenerated ? uiProjectRepository.getPreviewHeight() : videoClip.getMediaMetadata().getHeight();
GetFrameRequest frameRequest = GetFrameRequest.builder().withApplyEffects(false).withUseApproximatePosition(true).withExpectedWidth(width).withExpectedHeight(height).withRelativePosition(position).withScale(uiProjectRepository.getScaleFactor()).build();
ReadOnlyClipImage frame = videoClip.getFrame(frameRequest);
if (isDynamicallyGenerated) {
FrameExtendRequest extendFrameRequest = FrameExtendRequest.builder().withClip(videoClip).withFrameResult(frame).withPreviewWidth(uiProjectRepository.getPreviewWidth()).withPreviewHeight(uiProjectRepository.getPreviewHeight()).withScale(uiProjectRepository.getScaleFactor()).withTimelinePosition(position.add(videoClip.getInterval().getStartPosition())).build();
ClipImage expandedFrame = frameExtender.expandFrame(extendFrameRequest);
BufferedImage bf = byteBufferToImageConverter.byteBufferToBufferedImage(expandedFrame.getBuffer(), expandedFrame.getWidth(), expandedFrame.getHeight());
java.awt.Image img = bf.getScaledInstance(scaledFrameWidth, scaledFrameHeight, BufferedImage.SCALE_SMOOTH);
GlobalMemoryManagerAccessor.memoryManager.returnBuffer(frame.getBuffer());
GlobalMemoryManagerAccessor.memoryManager.returnBuffer(expandedFrame.getBuffer());
graphics.drawImage(img, index * (scaledFrameWidth + BLACK_FILM_TAPE_LINE_WIDTH) + BLACK_FILM_TAPE_LINE_WIDTH, FILM_TAPE_SIZE, null);
} else {
BufferedImage bf = byteBufferToImageConverter.byteBufferToBufferedImage(frame.getBuffer(), frame.getWidth(), frame.getHeight());
java.awt.Image img = bf.getScaledInstance(scaledFrameWidth, scaledFrameHeight, BufferedImage.SCALE_SMOOTH);
graphics.drawImage(img, index * (scaledFrameWidth + BLACK_FILM_TAPE_LINE_WIDTH) + BLACK_FILM_TAPE_LINE_WIDTH, FILM_TAPE_SIZE, null);
GlobalMemoryManagerAccessor.memoryManager.returnBuffer(frame.getBuffer());
}
}
dragFilmEffect(expectedWidth, graphics);
return byteBufferToJavaFxImageConverter.convertToJavafxImage(result);
}
use of com.helospark.tactview.core.decoder.VisualMediaMetadata in project tactview by helospark.
the class GifMediaDecoder method readFrames.
@Override
public MediaDataResponse readFrames(VideoMediaDataRequest request) {
GifDecoder gifDecoder = gifFileReader.readFile(request.getFile().getAbsolutePath());
VisualMediaMetadata metadata = request.getMetadata();
int loopCount = ((GifVideoMetadata) metadata).getLoopCount();
int positionInMilliseconds;
BigDecimal startSeconds = request.getStart().getSeconds();
BigDecimal lengthSeconds = metadata.getLength().getSeconds();
if (loopCount == INFINITE_LOOP) {
positionInMilliseconds = getWrappedMilliseconds(startSeconds, lengthSeconds);
} else {
int actualLoopCount = startSeconds.divide(lengthSeconds, 1, RoundingMode.FLOOR).intValue();
if (actualLoopCount < loopCount) {
positionInMilliseconds = getWrappedMilliseconds(startSeconds, lengthSeconds);
} else {
positionInMilliseconds = startSeconds.multiply(BigDecimal.valueOf(1000)).intValue();
}
}
int currentFramePosition = 0;
for (int i = 0; i < gifDecoder.getFrameCount(); ++i) {
int newPosition = currentFramePosition + gifDecoder.getDelay(i);
if (positionInMilliseconds >= currentFramePosition && positionInMilliseconds < newPosition) {
return getImageAt(gifDecoder, i, request);
}
currentFramePosition = newPosition;
}
return getImageAt(gifDecoder, gifDecoder.getFrameCount() - 1, request);
}
use of com.helospark.tactview.core.decoder.VisualMediaMetadata in project tactview by helospark.
the class ClipAddedListener method initializeProjectOnFirstVideoClipAdded.
private void initializeProjectOnFirstVideoClipAdded(TimelineClip clip) {
if (!projectRepository.isVideoInitialized() && clip instanceof VisualTimelineClip) {
VisualTimelineClip visualClip = (VisualTimelineClip) clip;
VisualMediaMetadata metadata = visualClip.getMediaMetadata();
int width = metadata.getWidth();
int height = metadata.getHeight();
if (metadata instanceof VideoMetadata && visualClip instanceof VideoClip) {
double rotation = ((VideoMetadata) metadata).getRotation();
if (MathUtil.fuzzyEquals(Math.abs(rotation), 90.0) && ((VideoClip) visualClip).isRotationEnabledAt(TimelinePosition.ofZero())) {
int tmp = width;
width = height;
height = tmp;
}
}
BigDecimal fps = metadata instanceof VideoMetadata ? new BigDecimal(((VideoMetadata) metadata).getFps()) : new BigDecimal("30");
projectSizeInitializer.initializeProjectSize(width, height, fps);
}
if (!projectRepository.isAudioInitialized() && clip instanceof AudibleTimelineClip) {
AudibleTimelineClip audioClip = (AudibleTimelineClip) clip;
int sampleRate = audioClip.getMediaMetadata().getSampleRate();
int bytesPerSample = audioClip.getMediaMetadata().getBytesPerSample();
int numberOfChannels = audioClip.getMediaMetadata().getChannels();
projectRepository.initializeAudio(sampleRate, bytesPerSample, numberOfChannels);
}
}
use of com.helospark.tactview.core.decoder.VisualMediaMetadata in project tactview by helospark.
the class AddScaleCommand method execute.
@Override
public void execute() {
VisualTimelineClip clip = (VisualTimelineClip) timelineManager.findClipById(clipId).orElseThrow();
VisualMediaMetadata metadata = clip.getMediaMetadata();
double scaleX = (double) projectRepository.getWidth() / metadata.getWidth();
double scaleY = (double) projectRepository.getHeight() / metadata.getHeight();
CreateEffectRequest createEffectRequest = new CreateEffectRequest(TimelinePosition.ofZero(), scaleEffectFactory.getEffectId(), TimelineClipType.VIDEO, clip.getInterval());
addedEffect = (ScaleEffect) effectFactoryChain.createEffect(createEffectRequest);
addedEffect.setScale(scaleX, scaleY);
// due to relative position
addedEffect.setInterval(clip.getInterval().butMoveStartPostionTo(TimelinePosition.ofZero()));
timelineManager.addEffectForClip(clip, addedEffect);
}
Aggregations