use of javax.sound.sampled.AudioInputStream in project screenbird by adamhub.
the class AudioRecorder method cutAudioFile.
/**
*
* @param audioFile
* @param audioLength
* @throws UnsupportedAudioFileException
* @throws IOException
*/
public static void cutAudioFile(File audioFile, long audioLength) throws UnsupportedAudioFileException, IOException {
AudioInputStream soundFile = AudioSystem.getAudioInputStream(audioFile);
AudioFormat format = soundFile.getFormat();
double frameLength = audioLength / 1000.0 * format.getFrameRate();
long totalBytes = (long) (frameLength * format.getFrameSize());
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
OutputStream outputStream = byteArrayOutputStream;
byte[] sampledData = new byte[format.getFrameSize()];
ByteArrayInputStream byteArrayInputStream;
AudioInputStream audioInputStream;
byte[] abData;
while (soundFile.read(sampledData) != -1) {
outputStream.write(sampledData);
}
abData = byteArrayOutputStream.toByteArray();
byteArrayInputStream = new ByteArrayInputStream(abData);
audioInputStream = new AudioInputStream(byteArrayInputStream, format, totalBytes / format.getFrameSize());
try {
AudioSystem.write(audioInputStream, AudioFileFormat.Type.WAVE, audioFile);
} catch (IOException e) {
}
soundFile.close();
}
use of javax.sound.sampled.AudioInputStream in project screenbird by adamhub.
the class AudioRecorder method compileAudio.
/**
*
* @param audioFileName
*/
public void compileAudio(String audioFileName) {
if (this.getAudioFiles().size() > 0) {
try {
AudioFormat format = null;
int audioFilesSize = this.getAudioFiles().size();
for (int i = 0; i < audioFilesSize; i++) {
if (i < audioFilesSize - 1) {
AudioFileItem audioFileItem = this.getAudioFiles().get(i);
File audioFile = new File(audioFileItem.getName());
if (audioFile.isFile()) {
long audioLength = audioFileItem.getEndMS() - audioFileItem.getStartMS();
AudioRecorder.cutAudioFile(audioFile, audioLength);
}
}
}
File file = new File(audioFileName);
this.audioInputStreams = new Vector<AudioInputStream>();
long length = 0;
long correctStartTimestamp = 0;
// Iterate over audio files
for (int i = 0; i < audioFilesSize; i++) {
AudioFileItem audioFileItem = this.getAudioFiles().get(i);
File audioFile = new File(audioFileItem.getName());
if (audioFile.isFile()) {
AudioInputStream audioStream = AudioSystem.getAudioInputStream(audioFile);
// Checks if we need to add empty sound before adding the sound file
if (audioFileItem.getStartMS() > correctStartTimestamp && audioFileItem.isPreviousDropped()) {
// The correct start time for the audio file should
// be 0 if it's the first file or equals the end
// time of the prev audio file
AudioInputStream audioStreamEmpty = addEmptySound(audioFileItem.getStartMS(), correctStartTimestamp);
length += audioStreamEmpty.getFrameLength();
this.audioInputStreams.add(audioStreamEmpty);
}
correctStartTimestamp = audioFileItem.getStartMS() + (long) (1000 * audioStream.getFrameLength() / audioStream.getFormat().getFrameRate());
format = audioStream.getFormat();
length += audioStream.getFrameLength();
this.audioInputStreams.add(audioStream);
}
}
compileAudioStreams(this.audioInputStreams, file, format, length);
} catch (IOException ex) {
log(ex);
} catch (UnsupportedAudioFileException e) {
log(e);
}
}
}
use of javax.sound.sampled.AudioInputStream in project screenbird by adamhub.
the class AudioRecorder method compileAudioStreams.
/**
*
* @param streams
* @param file
* @param format
* @param length
* @throws IOException
*/
public static void compileAudioStreams(Vector<AudioInputStream> streams, File file, AudioFormat format, long length) throws IOException {
AudioInputStream appendedFiles = new AudioInputStream(new SequenceInputStream(streams.elements()), format, length);
AudioSystem.write(appendedFiles, AudioFileFormat.Type.WAVE, file);
for (int i = 0; i < streams.size(); i++) {
AudioInputStream audioStream = streams.get(i);
audioStream.close();
}
}
use of javax.sound.sampled.AudioInputStream in project screenbird by adamhub.
the class AudioRecorder method addEmptySound.
/**
* Adds a silent audio to the recorded audio as padding to fix the difference
* between the actual and targeted start times.
* @param startTimestamp
* @param correctStartTimestamp
* @return
* @throws UnsupportedAudioFileException
*/
public static AudioInputStream addEmptySound(long startTimestamp, long correctStartTimestamp) throws UnsupportedAudioFileException {
AudioInputStream emptyAudioStream = null;
AudioFormat format = null;
long prevLength = 0;
long secondsRounded = (long) Math.ceil((((double) (startTimestamp - correctStartTimestamp)) / 1000.0));
long realMillis = startTimestamp - correctStartTimestamp;
log("silent duration rounded: " + secondsRounded);
log("silent duration millis: " + realMillis);
log("current file start:" + startTimestamp);
log("current file correct start :" + correctStartTimestamp);
try {
Vector<AudioInputStream> emptyStreams = new Vector<AudioInputStream>();
for (int i = 0; i < secondsRounded; i++) {
InputStream emptyAudioFileWav = getSilentWav();
if (emptyAudioFileWav != null) {
emptyAudioStream = AudioSystem.getAudioInputStream(emptyAudioFileWav);
prevLength += emptyAudioStream.getFrameLength();
format = emptyAudioStream.getFormat();
emptyStreams.add(emptyAudioStream);
}
}
if (emptyStreams.size() > 0) {
String empty = recordingDir + FileUtil.addExtension(String.valueOf(System.currentTimeMillis()), "empty");
File file = new File(empty);
AudioRecorder.compileAudioStreams(emptyStreams, file, format, prevLength);
AudioRecorder.cutAudioFile(file, realMillis);
emptyAudioStream = AudioSystem.getAudioInputStream(file);
}
} catch (IOException ex) {
}
log("silent length in millis after cut: " + String.valueOf(emptyAudioStream.getFrameLength() / emptyAudioStream.getFormat().getFrameRate()));
log("===================");
return emptyAudioStream;
}
use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class SkipTest method main.
public static void main(String[] args) throws Exception {
AudioFloatFormatConverter converter = new AudioFloatFormatConverter();
byte[] data = { 10, 20, 30, 40, 30, 20, 10 };
AudioFormat format = new AudioFormat(8000, 8, 1, true, false);
AudioFormat format2 = new AudioFormat(16000, 8, 1, true, false);
AudioInputStream ais = new AudioInputStream(new ByteArrayInputStream(data), format, data.length);
AudioInputStream ais2 = converter.getAudioInputStream(format2, ais);
byte[] data2 = new byte[30];
int ret = ais2.read(data2, 0, data2.length);
ais.reset();
AudioInputStream ais3 = converter.getAudioInputStream(format2, ais);
byte[] data3 = new byte[100];
ais3.skip(7);
int ret2 = ais3.read(data3, 7, data3.length);
if (ret2 != ret - 7)
throw new Exception("Skip doesn't work correctly (" + ret2 + " != " + (ret - 7) + ")");
for (int i = 7; i < ret2 + 7; i++) {
if (data3[i] != data2[i])
throw new Exception("Skip doesn't work correctly (" + data3[i] + " != " + data2[i] + ")");
}
}
Aggregations