use of org.fagu.fmv.ffmpeg.executor.FFMPEGExecutorBuilder in project fmv by f-agu.
the class Ripper method encode.
/**
* @param vobFile
* @param mp4File
* @param mPlayerDump
* @param progressEncode
* @throws IOException
*/
private void encode(File vobFile, File mp4File, MPlayerDump mPlayerDump, AtomicInteger progressEncode, AtomicInteger currentEncoding, CountDownLatch encodingLatch) throws IOException {
FFMPEGExecutorBuilder builder = ffMPEGExecutorBuilderSupplier.get();
builder.hideBanner();
InputProcessor inputProcessor = builder.addMediaInputFile(vobFile);
MovieMetadatas movieMetadatas = inputProcessor.getMovieMetadatas();
OutputProcessor outputProcessor = builder.addMediaOutputFile(mp4File);
// video
for (VideoStream stream : movieMetadatas.getVideoStreams()) {
outputProcessor.map().streams(stream).input(inputProcessor);
}
// audio
filterAndMap(inputProcessor, outputProcessor, movieMetadatas.getAudioStreams().iterator(), mPlayerDump.getAudioStreams());
// subtitle
filterAndMap(inputProcessor, outputProcessor, movieMetadatas.getSubtitleStreams().iterator(), mPlayerDump.getSubtitles());
outputProcessor.codec(H264.findRecommanded().strict(Strict.EXPERIMENTAL).quality(21)).overwrite();
int nbFrames = 0;
OptionalInt countEstimateFrames = movieMetadatas.getVideoStream().countEstimateFrames();
if (countEstimateFrames.isPresent()) {
nbFrames = countEstimateFrames.getAsInt();
} else {
// TODO
}
builder.progressReadLine(new FFMpegProgress(progressEncode, nbFrames));
FFExecutor<Object> executor = builder.build();
logger.log(executor.getCommandLine());
ffmpegService.submit(() -> {
try {
currentEncoding.incrementAndGet();
executor.execute();
} catch (Exception e) {
logger.log(e);
} finally {
encodingLatch.countDown();
vobFile.delete();
}
});
}
use of org.fagu.fmv.ffmpeg.executor.FFMPEGExecutorBuilder in project fmv by f-agu.
the class Test method concatFade1.
public static void concatFade1(File in1VideoFile, File in2VideoFile, Duration fadeDuration, File outFile) throws IOException {
FFMPEGExecutorBuilder builder = FFMPEGExecutorBuilder.create();
InputProcessor video1InputProcessor = builder.addMediaInputFile(in1VideoFile);
InputProcessor video2InputProcessor = builder.addMediaInputFile(in2VideoFile);
VideoStream videoStream1 = video1InputProcessor.getMovieMetadatas().getVideoStream();
VideoStream videoStream2 = video2InputProcessor.getMovieMetadatas().getVideoStream();
Time startTime_T1 = Time.valueOf(videoStream1.duration().get().toSeconds() - fadeDuration.toSeconds());
Duration duration_0_T1 = Duration.valueOf(startTime_T1.toSeconds());
Time startTime_T2 = Time.valueOf(videoStream2.duration().get().toSeconds() - fadeDuration.toSeconds());
Duration duration_T2_END = Duration.valueOf(startTime_T2.toSeconds());
// source 1
NullSourceVideo nullSourceVideo1 = NullSourceVideo.build().size(videoStream1.size()).duration(duration_T2_END);
AudioGenerator audioGenerator1 = AudioGenerator.build().silence().duration(duration_T2_END);
Concat concat1 = Concat.create(builder, video1InputProcessor, FilterComplex.create(nullSourceVideo1), FilterComplex.create(audioGenerator1));
FilterComplex fadeAudio1 = FilterComplex.create(FadeAudio.out().startTime(startTime_T1).duration(fadeDuration)).addInput(concat1);
// source 2
NullSourceVideo nullSourceVideo2 = NullSourceVideo.build().size(videoStream2.size()).duration(duration_0_T1);
AudioGenerator audioGenerator2 = AudioGenerator.build().silence().duration(duration_0_T1);
Concat concat2 = Concat.create(builder, FilterComplex.create(nullSourceVideo2), FilterComplex.create(audioGenerator2), video2InputProcessor);
FilterComplex fadeAudio2 = FilterComplex.create(FadeAudio.in().startTime(startTime_T1).duration(fadeDuration)).addInput(concat2);
// blend for fade / merge
// video
SetSAR setSAR = SetSAR.toRatio("1");
Format formatRGBA = Format.with(PixelFormat.RGBA);
FilterComplex vfc1 = FilterComplex.create(setSAR, formatRGBA).addInput(concat1);
FilterComplex vfc2 = FilterComplex.create(setSAR, formatRGBA).addInput(concat2);
Blend blend = Blend.build().mode(Mode.ADDITION).repeatLast(true).opacity(1).exprFade(startTime_T1, fadeDuration);
Format formatYUV = Format.with(PixelFormat.YUVA422P10LE);
FilterComplex vfcBlend = FilterComplex.create(blend, formatYUV).addInput(vfc1).addInput(vfc2);
builder.filter(vfcBlend);
// audio
FilterComplex audioMix = AudioMix.build().duration(MixAudioDuration.SHORTEST).addInput(fadeAudio1).addInput(fadeAudio2);
builder.filter(audioMix);
// out
OutputProcessor outputProcessor = builder.addMediaOutputFile(outFile);
outputProcessor.overwrite();
FFExecutor<Object> executor = builder.build();
System.out.println(executor.getCommandLine());
FilterGraphUI.show(builder.getFFMPEGOperation());
// executor.execute();
}
use of org.fagu.fmv.ffmpeg.executor.FFMPEGExecutorBuilder in project fmv by f-agu.
the class Test method concatFade2.
public static void concatFade2(File in1VideoFile, File in2VideoFile, Duration fadeDuration, File outFile) throws IOException {
FFMPEGExecutorBuilder builder = FFMPEGExecutorBuilder.create();
InputProcessor video1InputProcessor = builder.addMediaInputFile(in1VideoFile);
InputProcessor video2InputProcessor = builder.addMediaInputFile(in2VideoFile);
VideoStream videoStream1 = video1InputProcessor.getMovieMetadatas().getVideoStream();
VideoStream videoStream2 = video2InputProcessor.getMovieMetadatas().getVideoStream();
Time startTime_T1 = Time.valueOf(videoStream1.duration().get().toSeconds() - fadeDuration.toSeconds());
Duration duration_0_T1 = Duration.valueOf(startTime_T1.toSeconds());
Time startTime_T2 = Time.valueOf(videoStream2.duration().get().toSeconds() - fadeDuration.toSeconds());
Duration duration_T2_END = Duration.valueOf(startTime_T2.toSeconds());
// source 1: video
NullSourceVideo nullSourceVideo1 = NullSourceVideo.build().size(videoStream1.size()).duration(duration_T2_END);
Concat concat1V = Concat.create(builder, video1InputProcessor, FilterComplex.create(nullSourceVideo1)).countVideo(1).countAudio(0).countInputs(2);
// source 1: audio
AudioGenerator audioGenerator1 = AudioGenerator.build().silence().duration(duration_T2_END);
Concat concat1A = Concat.create(builder, video1InputProcessor, FilterComplex.create(audioGenerator1)).countVideo(0).countAudio(1).countInputs(2);
FilterComplex fadeAudio1 = FilterComplex.create(FadeAudio.out().startTime(startTime_T1).duration(fadeDuration)).addInput(concat1A);
// source 2: video
NullSourceVideo nullSourceVideo2 = NullSourceVideo.build().size(videoStream2.size()).duration(duration_0_T1);
Concat concat2V = Concat.create(builder, FilterComplex.create(nullSourceVideo2), video2InputProcessor).countVideo(1).countAudio(0).countInputs(2);
// source 2: audio
AudioGenerator audioGenerator2 = AudioGenerator.build().silence().duration(duration_0_T1);
Concat concat2A = Concat.create(builder, FilterComplex.create(audioGenerator2), video2InputProcessor).countVideo(0).countAudio(1).countInputs(2);
FilterComplex fadeAudio2 = FilterComplex.create(FadeAudio.in().startTime(startTime_T1).duration(fadeDuration)).addInput(concat2A);
// blend / merge video
SetSAR setSAR = SetSAR.toRatio("1");
Format formatRGBA = Format.with(PixelFormat.RGBA);
FilterComplex vfc1 = FilterComplex.create(setSAR, formatRGBA).addInput(concat1V);
FilterComplex vfc2 = FilterComplex.create(setSAR, formatRGBA).addInput(concat2V);
Blend blend = Blend.build().mode(Mode.ADDITION).repeatLast(true).opacity(1).exprFade(startTime_T1, fadeDuration);
Format formatYUV = Format.with(PixelFormat.YUVA422P10LE);
FilterComplex vfcBlend = FilterComplex.create(blend, formatYUV).addInput(vfc1).addInput(vfc2);
builder.filter(vfcBlend);
// merge audio
FilterComplex audioMix = AudioMix.build().duration(MixAudioDuration.SHORTEST).addInput(fadeAudio1).addInput(fadeAudio2);
builder.filter(audioMix);
// out
OutputProcessor outputProcessor = builder.addMediaOutputFile(outFile);
outputProcessor.overwrite();
FFExecutor<Object> executor = builder.build();
System.out.println(executor.getCommandLine());
FilterGraphUI.show(builder.getFFMPEGOperation());
// executor.execute();
}
use of org.fagu.fmv.ffmpeg.executor.FFMPEGExecutorBuilder in project fmv by f-agu.
the class FFHelper method oo.
/**
* @param inFile
* @param outFile
* @throws IOException
*/
public static void oo(File srcFile, File outFile) throws IOException {
final int DEFAULT_AUDIO_SAMPLE_RATE = 44100;
final int DEFAULT_AUDIO_BIT_RATE = 128000;
final int DEFAULT_AUDIO_CHANNEL = 2;
Size size = Size.valueOf(930, 500);
FFMPEGExecutorBuilder builder = FFMPEGExecutorBuilder.create();
builder.hideBanner();
// input
InputProcessor inputProcessor = builder.addMediaInputFile(srcFile);
// infos
MovieMetadatas movieMetadatas = inputProcessor.getMovieMetadatas();
int audioFrequency = FFMpegUtils.minAudioSampleRate(movieMetadatas, DEFAULT_AUDIO_SAMPLE_RATE);
int audioBitRate = FFMpegUtils.minAudioBitRate(movieMetadatas, DEFAULT_AUDIO_BIT_RATE);
int audioChannel = FFMpegUtils.minAudioChannel(movieMetadatas, DEFAULT_AUDIO_CHANNEL);
// filters (garder l'ordre des filters)
builder.filter(AutoRotate.create(movieMetadatas));
builder.filter(Scale.to(size, ScaleMode.fitToBoxKeepAspectRatio()));
builder.filter(Format.with(PixelFormat.YUV420P));
builder.filter(ResampleAudio.build().frequency(audioFrequency));
// output
builder.mux(//
MP4Muxer.to(outFile).movflags(// , Movflags.FRAG_KEYFRAME, Movflags.EMPTY_MOOV
Movflags.FASTSTART)).codec(//
H264.findRecommanded().mostCompatible()).pixelFormat(// pour quicktime/safari
PixelFormat.YUV420P).codecAutoSelectAAC().audioChannel(//
audioChannel).audioBitRate(//
audioBitRate).overwrite();
FFExecutor<Object> executor = builder.build();
executor.execute();
}
use of org.fagu.fmv.ffmpeg.executor.FFMPEGExecutorBuilder in project fmv by f-agu.
the class FFHelper method splitTo3.
/**
* @param inFile
* @param outFile1
* @param outFile2
* @throws IOException
*/
public static void splitTo3(File inFile, File outFile1, File outFile2, File outFile3) throws IOException {
// outFile1 : no fade
// outFile2 : fade in
// outFile3 : fade out
FFMPEGExecutorBuilder builder = FFMPEGExecutorBuilder.create();
InputProcessor inputProcessor = builder.addMediaInputFile(inFile);
SplitVideo splitVideo = SplitVideo.build();
splitVideo.addInput(inputProcessor);
OutputKey outv1 = splitVideo.addOutput();
OutputKey outv2 = splitVideo.addOutput();
OutputKey outv3 = splitVideo.addOutput();
SplitAudio splitAudio = SplitAudio.build();
splitAudio.addInput(inputProcessor);
OutputKey outa1 = splitAudio.addOutput();
OutputKey outa2 = splitAudio.addOutput();
OutputKey outa3 = splitAudio.addOutput();
Fade fade2 = Fade.create(FadeType.IN, Time.valueOf(0), Duration.valueOf(1));
Fade fade3 = Fade.create(FadeType.OUT, Time.valueOf(0), Duration.valueOf(1));
fade2.addInput(outv2).addInput(outa2);
fade3.addInput(outv3).addInput(outa3);
builder.filter(splitVideo);
builder.filter(splitAudio);
builder.filter(fade2);
builder.filter(fade3);
OutputProcessor outputProcessor1 = builder.addMediaOutputFile(outFile1);
outputProcessor1.map().allStreams().label(outv1.getLabel()).label(outa1.getLabel());
outputProcessor1.overwrite();
OutputProcessor outputProcessor2 = builder.addMediaOutputFile(outFile2);
outputProcessor2.map().allStreams().input(fade2);
outputProcessor2.overwrite();
OutputProcessor outputProcessor3 = builder.addMediaOutputFile(outFile3);
outputProcessor3.map().allStreams().input(fade3);
outputProcessor3.overwrite();
FFExecutor<Object> executor = builder.build();
executor.execute();
}
Aggregations