use of org.fagu.fmv.ffmpeg.metadatas.VideoStream in project fmv by f-agu.
the class Bootstrap method isUpperThan720p.
/**
* @param metadatas
* @param logger
* @return
*/
private boolean isUpperThan720p(MovieMetadatas metadatas, Logger logger) {
if (!metadatas.contains(Type.VIDEO)) {
logger.log("It's not a video");
return false;
}
VideoStream videoStream = metadatas.getVideoStream();
Size size = videoStream.size();
if (size.getHeight() > Size.HD720.getHeight()) {
logger.log("It's upper than 720p: " + size);
return true;
}
logger.log("It's not upper than 720p: " + size);
return false;
}
use of org.fagu.fmv.ffmpeg.metadatas.VideoStream in project fmv by f-agu.
the class Ripper method encode.
/**
* @param vobFile
* @param mp4File
* @param mPlayerDump
* @param progressEncode
* @throws IOException
*/
private void encode(File vobFile, File mp4File, MPlayerDump mPlayerDump, AtomicInteger progressEncode, AtomicInteger currentEncoding, CountDownLatch encodingLatch) throws IOException {
FFMPEGExecutorBuilder builder = ffMPEGExecutorBuilderSupplier.get();
builder.hideBanner();
InputProcessor inputProcessor = builder.addMediaInputFile(vobFile);
MovieMetadatas movieMetadatas = inputProcessor.getMovieMetadatas();
OutputProcessor outputProcessor = builder.addMediaOutputFile(mp4File);
// video
for (VideoStream stream : movieMetadatas.getVideoStreams()) {
outputProcessor.map().streams(stream).input(inputProcessor);
}
// audio
filterAndMap(inputProcessor, outputProcessor, movieMetadatas.getAudioStreams().iterator(), mPlayerDump.getAudioStreams());
// subtitle
filterAndMap(inputProcessor, outputProcessor, movieMetadatas.getSubtitleStreams().iterator(), mPlayerDump.getSubtitles());
outputProcessor.codec(H264.findRecommanded().strict(Strict.EXPERIMENTAL).quality(21)).overwrite();
int nbFrames = 0;
OptionalInt countEstimateFrames = movieMetadatas.getVideoStream().countEstimateFrames();
if (countEstimateFrames.isPresent()) {
nbFrames = countEstimateFrames.getAsInt();
} else {
// TODO
}
builder.progressReadLine(new FFMpegProgress(progressEncode, nbFrames));
FFExecutor<Object> executor = builder.build();
logger.log(executor.getCommandLine());
ffmpegService.submit(() -> {
try {
currentEncoding.incrementAndGet();
executor.execute();
} catch (Exception e) {
logger.log(e);
} finally {
encodingLatch.countDown();
vobFile.delete();
}
});
}
use of org.fagu.fmv.ffmpeg.metadatas.VideoStream in project fmv by f-agu.
the class Test method concatFade1.
public static void concatFade1(File in1VideoFile, File in2VideoFile, Duration fadeDuration, File outFile) throws IOException {
FFMPEGExecutorBuilder builder = FFMPEGExecutorBuilder.create();
InputProcessor video1InputProcessor = builder.addMediaInputFile(in1VideoFile);
InputProcessor video2InputProcessor = builder.addMediaInputFile(in2VideoFile);
VideoStream videoStream1 = video1InputProcessor.getMovieMetadatas().getVideoStream();
VideoStream videoStream2 = video2InputProcessor.getMovieMetadatas().getVideoStream();
Time startTime_T1 = Time.valueOf(videoStream1.duration().get().toSeconds() - fadeDuration.toSeconds());
Duration duration_0_T1 = Duration.valueOf(startTime_T1.toSeconds());
Time startTime_T2 = Time.valueOf(videoStream2.duration().get().toSeconds() - fadeDuration.toSeconds());
Duration duration_T2_END = Duration.valueOf(startTime_T2.toSeconds());
// source 1
NullSourceVideo nullSourceVideo1 = NullSourceVideo.build().size(videoStream1.size()).duration(duration_T2_END);
AudioGenerator audioGenerator1 = AudioGenerator.build().silence().duration(duration_T2_END);
Concat concat1 = Concat.create(builder, video1InputProcessor, FilterComplex.create(nullSourceVideo1), FilterComplex.create(audioGenerator1));
FilterComplex fadeAudio1 = FilterComplex.create(FadeAudio.out().startTime(startTime_T1).duration(fadeDuration)).addInput(concat1);
// source 2
NullSourceVideo nullSourceVideo2 = NullSourceVideo.build().size(videoStream2.size()).duration(duration_0_T1);
AudioGenerator audioGenerator2 = AudioGenerator.build().silence().duration(duration_0_T1);
Concat concat2 = Concat.create(builder, FilterComplex.create(nullSourceVideo2), FilterComplex.create(audioGenerator2), video2InputProcessor);
FilterComplex fadeAudio2 = FilterComplex.create(FadeAudio.in().startTime(startTime_T1).duration(fadeDuration)).addInput(concat2);
// blend for fade / merge
// video
SetSAR setSAR = SetSAR.toRatio("1");
Format formatRGBA = Format.with(PixelFormat.RGBA);
FilterComplex vfc1 = FilterComplex.create(setSAR, formatRGBA).addInput(concat1);
FilterComplex vfc2 = FilterComplex.create(setSAR, formatRGBA).addInput(concat2);
Blend blend = Blend.build().mode(Mode.ADDITION).repeatLast(true).opacity(1).exprFade(startTime_T1, fadeDuration);
Format formatYUV = Format.with(PixelFormat.YUVA422P10LE);
FilterComplex vfcBlend = FilterComplex.create(blend, formatYUV).addInput(vfc1).addInput(vfc2);
builder.filter(vfcBlend);
// audio
FilterComplex audioMix = AudioMix.build().duration(MixAudioDuration.SHORTEST).addInput(fadeAudio1).addInput(fadeAudio2);
builder.filter(audioMix);
// out
OutputProcessor outputProcessor = builder.addMediaOutputFile(outFile);
outputProcessor.overwrite();
FFExecutor<Object> executor = builder.build();
System.out.println(executor.getCommandLine());
FilterGraphUI.show(builder.getFFMPEGOperation());
// executor.execute();
}
use of org.fagu.fmv.ffmpeg.metadatas.VideoStream in project fmv by f-agu.
the class Test method concatFade2.
public static void concatFade2(File in1VideoFile, File in2VideoFile, Duration fadeDuration, File outFile) throws IOException {
FFMPEGExecutorBuilder builder = FFMPEGExecutorBuilder.create();
InputProcessor video1InputProcessor = builder.addMediaInputFile(in1VideoFile);
InputProcessor video2InputProcessor = builder.addMediaInputFile(in2VideoFile);
VideoStream videoStream1 = video1InputProcessor.getMovieMetadatas().getVideoStream();
VideoStream videoStream2 = video2InputProcessor.getMovieMetadatas().getVideoStream();
Time startTime_T1 = Time.valueOf(videoStream1.duration().get().toSeconds() - fadeDuration.toSeconds());
Duration duration_0_T1 = Duration.valueOf(startTime_T1.toSeconds());
Time startTime_T2 = Time.valueOf(videoStream2.duration().get().toSeconds() - fadeDuration.toSeconds());
Duration duration_T2_END = Duration.valueOf(startTime_T2.toSeconds());
// source 1: video
NullSourceVideo nullSourceVideo1 = NullSourceVideo.build().size(videoStream1.size()).duration(duration_T2_END);
Concat concat1V = Concat.create(builder, video1InputProcessor, FilterComplex.create(nullSourceVideo1)).countVideo(1).countAudio(0).countInputs(2);
// source 1: audio
AudioGenerator audioGenerator1 = AudioGenerator.build().silence().duration(duration_T2_END);
Concat concat1A = Concat.create(builder, video1InputProcessor, FilterComplex.create(audioGenerator1)).countVideo(0).countAudio(1).countInputs(2);
FilterComplex fadeAudio1 = FilterComplex.create(FadeAudio.out().startTime(startTime_T1).duration(fadeDuration)).addInput(concat1A);
// source 2: video
NullSourceVideo nullSourceVideo2 = NullSourceVideo.build().size(videoStream2.size()).duration(duration_0_T1);
Concat concat2V = Concat.create(builder, FilterComplex.create(nullSourceVideo2), video2InputProcessor).countVideo(1).countAudio(0).countInputs(2);
// source 2: audio
AudioGenerator audioGenerator2 = AudioGenerator.build().silence().duration(duration_0_T1);
Concat concat2A = Concat.create(builder, FilterComplex.create(audioGenerator2), video2InputProcessor).countVideo(0).countAudio(1).countInputs(2);
FilterComplex fadeAudio2 = FilterComplex.create(FadeAudio.in().startTime(startTime_T1).duration(fadeDuration)).addInput(concat2A);
// blend / merge video
SetSAR setSAR = SetSAR.toRatio("1");
Format formatRGBA = Format.with(PixelFormat.RGBA);
FilterComplex vfc1 = FilterComplex.create(setSAR, formatRGBA).addInput(concat1V);
FilterComplex vfc2 = FilterComplex.create(setSAR, formatRGBA).addInput(concat2V);
Blend blend = Blend.build().mode(Mode.ADDITION).repeatLast(true).opacity(1).exprFade(startTime_T1, fadeDuration);
Format formatYUV = Format.with(PixelFormat.YUVA422P10LE);
FilterComplex vfcBlend = FilterComplex.create(blend, formatYUV).addInput(vfc1).addInput(vfc2);
builder.filter(vfcBlend);
// merge audio
FilterComplex audioMix = AudioMix.build().duration(MixAudioDuration.SHORTEST).addInput(fadeAudio1).addInput(fadeAudio2);
builder.filter(audioMix);
// out
OutputProcessor outputProcessor = builder.addMediaOutputFile(outFile);
outputProcessor.overwrite();
FFExecutor<Object> executor = builder.build();
System.out.println(executor.getCommandLine());
FilterGraphUI.show(builder.getFFMPEGOperation());
// executor.execute();
}
use of org.fagu.fmv.ffmpeg.metadatas.VideoStream in project fmv by f-agu.
the class FFHelper method extractThumbnails2GIF.
/**
* similar extractThumbnails2JPEGS(..) without 'format("image2")'
*
* @param inFile
* @param outFolder
* @param countFrame
* @return
* @throws IOException
*/
public static void extractThumbnails2GIF(File inFile, File outFile, int countFrame) throws IOException {
if (!"gif".equalsIgnoreCase(FilenameUtils.getExtension(outFile.getName()))) {
throw new IllegalArgumentException("Not a gif: " + outFile.getPath());
}
// extract images
FFMPEGExecutorBuilder builder = FFMPEGExecutorBuilder.create();
// global
builder.hideBanner();
builder.logLevel(LogLevel.INFO);
// input
InputProcessor inputProcessor = builder.addMediaInputFile(inFile);
// infos
MovieMetadatas movieMetadatas = inputProcessor.getMovieMetadatas();
VideoStream videoStream = movieMetadatas.getVideoStream();
// filter
SelectVideo selectVideo = SelectVideo.build().countFrame(videoStream, countFrame);
builder.filter(selectVideo);
// necessary ?
builder.filter(Format.with(PixelFormat.RGB8));
builder.filter(Format.with(PixelFormat.RGB24));
// output
builder.addMediaOutputFile(outFile).videoSync(VSync.PASSTHROUGH).overwrite();
FFExecutor<Object> executor = builder.build();
executor.execute();
}
Aggregations