use of ch.randelshofer.media.mp3.MP3AudioInputStream in project processing by processing.
the class MovieMaker method writeVideoAndAudio.
private void writeVideoAndAudio(File movieFile, File[] imgFiles, File audioFile, int width, int height, double fps, QuickTimeWriter.VideoFormat videoFormat, /*boolean passThrough,*/
String streaming) throws IOException {
File tmpFile = streaming.equals("none") ? movieFile : new File(movieFile.getPath() + ".tmp");
ProgressMonitor p = new ProgressMonitor(MovieMaker.this, Language.interpolate("movie_maker.progress.creating_file_name", movieFile.getName()), Language.text("movie_maker.progress.creating_output_file"), 0, imgFiles.length);
AudioInputStream audioIn = null;
QuickTimeWriter qtOut = null;
BufferedImage imgBuffer = null;
Graphics2D g = null;
try {
// Determine audio format
if (audioFile.getName().toLowerCase().endsWith(".mp3")) {
audioIn = new MP3AudioInputStream(audioFile);
} else {
audioIn = AudioSystem.getAudioInputStream(audioFile);
}
AudioFormat audioFormat = audioIn.getFormat();
boolean isVBR = audioFormat.getProperty("vbr") != null && ((Boolean) audioFormat.getProperty("vbr")).booleanValue();
// Determine duration of a single sample
int asDuration = (int) (audioFormat.getSampleRate() / audioFormat.getFrameRate());
int vsDuration = 100;
// Create writer
qtOut = new QuickTimeWriter(videoFormat == QuickTimeWriter.VideoFormat.RAW ? movieFile : tmpFile);
// audio in track 0
qtOut.addAudioTrack(audioFormat);
// video in track 1
qtOut.addVideoTrack(videoFormat, (int) (fps * vsDuration), width, height);
// Create audio buffer
int asSize;
byte[] audioBuffer;
if (isVBR) {
// => variable bit rate: create audio buffer for a single frame
asSize = audioFormat.getFrameSize();
audioBuffer = new byte[asSize];
} else {
// => fixed bit rate: create audio buffer for half a second
asSize = audioFormat.getChannels() * audioFormat.getSampleSizeInBits() / 8;
audioBuffer = new byte[(int) (qtOut.getMediaTimeScale(0) / 2 * asSize)];
}
//if (!passThrough) {
if (true) {
imgBuffer = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
g = imgBuffer.createGraphics();
g.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
}
// Main loop
int movieTime = 0;
int imgIndex = 0;
boolean isAudioDone = false;
while ((imgIndex < imgFiles.length || !isAudioDone) && !p.isCanceled()) {
// Advance movie time by half a second (we interleave twice per second)
movieTime += qtOut.getMovieTimeScale() / 2;
// Advance audio to movie time + 1 second (audio must be ahead of video by 1 second)
while (!isAudioDone && qtOut.getTrackDuration(0) < movieTime + qtOut.getMovieTimeScale()) {
int len = audioIn.read(audioBuffer);
if (len == -1) {
isAudioDone = true;
} else {
qtOut.writeSamples(0, len / asSize, audioBuffer, 0, len, asDuration);
}
if (isVBR) {
// => variable bit rate: format can change at any time
audioFormat = audioIn.getFormat();
if (audioFormat == null) {
break;
}
asSize = audioFormat.getFrameSize();
asDuration = (int) (audioFormat.getSampleRate() / audioFormat.getFrameRate());
if (audioBuffer.length < asSize) {
audioBuffer = new byte[asSize];
}
}
}
// Advance video to movie time
for (; imgIndex < imgFiles.length && qtOut.getTrackDuration(1) < movieTime; ++imgIndex) {
// catch up with video time
p.setProgress(imgIndex);
File f = imgFiles[imgIndex];
if (f == null)
continue;
p.setNote(Language.interpolate("movie_maker.progress.processing", f.getName()));
//if (passThrough) {
if (false) {
qtOut.writeSample(1, f, vsDuration);
} else {
//BufferedImage fImg = ImageIO.read(imgFiles[imgIndex]);
BufferedImage fImg = readImage(f);
if (fImg == null)
continue;
g.drawImage(fImg, 0, 0, width, height, null);
fImg.flush();
qtOut.writeFrame(1, imgBuffer, vsDuration);
}
}
}
if (streaming.equals("fastStart")) {
qtOut.toWebOptimizedMovie(movieFile, false);
tmpFile.delete();
} else if (streaming.equals("fastStartCompressed")) {
qtOut.toWebOptimizedMovie(movieFile, true);
tmpFile.delete();
}
qtOut.close();
qtOut = null;
} catch (UnsupportedAudioFileException e) {
IOException ioe = new IOException(e.getMessage());
ioe.initCause(e);
throw ioe;
} finally {
p.close();
if (qtOut != null) {
qtOut.close();
}
if (audioIn != null) {
audioIn.close();
}
if (g != null) {
g.dispose();
}
if (imgBuffer != null) {
imgBuffer.flush();
}
}
}
use of ch.randelshofer.media.mp3.MP3AudioInputStream in project processing by processing.
the class MovieMaker method writeAudioOnly.
private void writeAudioOnly(File movieFile, File audioFile, String streaming) throws IOException {
File tmpFile = streaming.equals("none") ? movieFile : new File(movieFile.getPath() + ".tmp");
// file length is used for a rough progress estimate. This will only work for uncompressed audio.
int length = (int) Math.min(Integer.MAX_VALUE, audioFile.length());
ProgressMonitor p = new ProgressMonitor(MovieMaker.this, Language.interpolate("movie_maker.progress.creating_file_name", movieFile.getName()), Language.text("movie_maker.progress.initializing"), 0, length);
AudioInputStream audioIn = null;
QuickTimeWriter qtOut = null;
try {
qtOut = new QuickTimeWriter(tmpFile);
if (audioFile.getName().toLowerCase().endsWith(".mp3")) {
audioIn = new MP3AudioInputStream(audioFile);
} else {
audioIn = AudioSystem.getAudioInputStream(audioFile);
}
AudioFormat audioFormat = audioIn.getFormat();
//System.out.println("QuickTimeMovieMakerMain " + audioFormat);
qtOut.addAudioTrack(audioFormat);
boolean isVBR = audioFormat.getProperty("vbr") != null && ((Boolean) audioFormat.getProperty("vbr")).booleanValue();
int asSize = audioFormat.getFrameSize();
int nbOfFramesInBuffer = isVBR ? 1 : Math.max(1, 1024 / asSize);
int asDuration = (int) (audioFormat.getSampleRate() / audioFormat.getFrameRate());
//System.out.println(" frameDuration=" + asDuration);
long count = 0;
byte[] audioBuffer = new byte[asSize * nbOfFramesInBuffer];
for (int bytesRead = audioIn.read(audioBuffer); bytesRead != -1; bytesRead = audioIn.read(audioBuffer)) {
if (bytesRead != 0) {
int framesRead = bytesRead / asSize;
qtOut.writeSamples(0, framesRead, audioBuffer, 0, bytesRead, asDuration);
count += bytesRead;
p.setProgress((int) count);
}
if (isVBR) {
audioFormat = audioIn.getFormat();
if (audioFormat == null) {
break;
}
asSize = audioFormat.getFrameSize();
asDuration = (int) (audioFormat.getSampleRate() / audioFormat.getFrameRate());
if (audioBuffer.length < asSize) {
audioBuffer = new byte[asSize];
}
}
}
audioIn.close();
audioIn = null;
if (streaming.equals("fastStart")) {
qtOut.toWebOptimizedMovie(movieFile, false);
tmpFile.delete();
} else if (streaming.equals("fastStartCompressed")) {
qtOut.toWebOptimizedMovie(movieFile, true);
tmpFile.delete();
}
qtOut.close();
qtOut = null;
} catch (UnsupportedAudioFileException e) {
IOException ioe = new IOException(e.getMessage());
ioe.initCause(e);
throw ioe;
} finally {
p.close();
if (audioIn != null) {
audioIn.close();
}
if (qtOut != null) {
qtOut.close();
}
}
}
Aggregations