use of javax.sound.sampled.AudioFormat in project screenbird by adamhub.
the class AudioRecorder method cutAudioFile.
/**
*
* @param audioFile
* @param audioLength
* @throws UnsupportedAudioFileException
* @throws IOException
*/
public static void cutAudioFile(File audioFile, long audioLength) throws UnsupportedAudioFileException, IOException {
AudioInputStream soundFile = AudioSystem.getAudioInputStream(audioFile);
AudioFormat format = soundFile.getFormat();
double frameLength = audioLength / 1000.0 * format.getFrameRate();
long totalBytes = (long) (frameLength * format.getFrameSize());
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
OutputStream outputStream = byteArrayOutputStream;
byte[] sampledData = new byte[format.getFrameSize()];
ByteArrayInputStream byteArrayInputStream;
AudioInputStream audioInputStream;
byte[] abData;
while (soundFile.read(sampledData) != -1) {
outputStream.write(sampledData);
}
abData = byteArrayOutputStream.toByteArray();
byteArrayInputStream = new ByteArrayInputStream(abData);
audioInputStream = new AudioInputStream(byteArrayInputStream, format, totalBytes / format.getFrameSize());
try {
AudioSystem.write(audioInputStream, AudioFileFormat.Type.WAVE, audioFile);
} catch (IOException e) {
}
soundFile.close();
}
use of javax.sound.sampled.AudioFormat in project screenbird by adamhub.
the class AudioRecorder method compileAudio.
/**
*
* @param audioFileName
*/
public void compileAudio(String audioFileName) {
if (this.getAudioFiles().size() > 0) {
try {
AudioFormat format = null;
int audioFilesSize = this.getAudioFiles().size();
for (int i = 0; i < audioFilesSize; i++) {
if (i < audioFilesSize - 1) {
AudioFileItem audioFileItem = this.getAudioFiles().get(i);
File audioFile = new File(audioFileItem.getName());
if (audioFile.isFile()) {
long audioLength = audioFileItem.getEndMS() - audioFileItem.getStartMS();
AudioRecorder.cutAudioFile(audioFile, audioLength);
}
}
}
File file = new File(audioFileName);
this.audioInputStreams = new Vector<AudioInputStream>();
long length = 0;
long correctStartTimestamp = 0;
// Iterate over audio files
for (int i = 0; i < audioFilesSize; i++) {
AudioFileItem audioFileItem = this.getAudioFiles().get(i);
File audioFile = new File(audioFileItem.getName());
if (audioFile.isFile()) {
AudioInputStream audioStream = AudioSystem.getAudioInputStream(audioFile);
// Checks if we need to add empty sound before adding the sound file
if (audioFileItem.getStartMS() > correctStartTimestamp && audioFileItem.isPreviousDropped()) {
// The correct start time for the audio file should
// be 0 if it's the first file or equals the end
// time of the prev audio file
AudioInputStream audioStreamEmpty = addEmptySound(audioFileItem.getStartMS(), correctStartTimestamp);
length += audioStreamEmpty.getFrameLength();
this.audioInputStreams.add(audioStreamEmpty);
}
correctStartTimestamp = audioFileItem.getStartMS() + (long) (1000 * audioStream.getFrameLength() / audioStream.getFormat().getFrameRate());
format = audioStream.getFormat();
length += audioStream.getFrameLength();
this.audioInputStreams.add(audioStream);
}
}
compileAudioStreams(this.audioInputStreams, file, format, length);
} catch (IOException ex) {
log(ex);
} catch (UnsupportedAudioFileException e) {
log(e);
}
}
}
use of javax.sound.sampled.AudioFormat in project screenbird by adamhub.
the class AudioRecorder method addEmptySound.
/**
* Adds a silent audio to the recorded audio as padding to fix the difference
* between the actual and targeted start times.
* @param startTimestamp
* @param correctStartTimestamp
* @return
* @throws UnsupportedAudioFileException
*/
public static AudioInputStream addEmptySound(long startTimestamp, long correctStartTimestamp) throws UnsupportedAudioFileException {
AudioInputStream emptyAudioStream = null;
AudioFormat format = null;
long prevLength = 0;
long secondsRounded = (long) Math.ceil((((double) (startTimestamp - correctStartTimestamp)) / 1000.0));
long realMillis = startTimestamp - correctStartTimestamp;
log("silent duration rounded: " + secondsRounded);
log("silent duration millis: " + realMillis);
log("current file start:" + startTimestamp);
log("current file correct start :" + correctStartTimestamp);
try {
Vector<AudioInputStream> emptyStreams = new Vector<AudioInputStream>();
for (int i = 0; i < secondsRounded; i++) {
InputStream emptyAudioFileWav = getSilentWav();
if (emptyAudioFileWav != null) {
emptyAudioStream = AudioSystem.getAudioInputStream(emptyAudioFileWav);
prevLength += emptyAudioStream.getFrameLength();
format = emptyAudioStream.getFormat();
emptyStreams.add(emptyAudioStream);
}
}
if (emptyStreams.size() > 0) {
String empty = recordingDir + FileUtil.addExtension(String.valueOf(System.currentTimeMillis()), "empty");
File file = new File(empty);
AudioRecorder.compileAudioStreams(emptyStreams, file, format, prevLength);
AudioRecorder.cutAudioFile(file, realMillis);
emptyAudioStream = AudioSystem.getAudioInputStream(file);
}
} catch (IOException ex) {
}
log("silent length in millis after cut: " + String.valueOf(emptyAudioStream.getFrameLength() / emptyAudioStream.getFormat().getFrameRate()));
log("===================");
return emptyAudioStream;
}
use of javax.sound.sampled.AudioFormat in project MinecraftForge by MinecraftForge.
the class CodecIBXM method initialize.
/**
* Prepares an audio stream to read from. If another stream is already opened,
* it will be closed and a new audio stream opened in its place.
* @param url URL to an audio file to stream from.
* @return False if an error occurred or if end of stream was reached.
*/
@Override
public boolean initialize(URL url) {
initialized(SET, false);
cleanup();
if (url == null) {
errorMessage("url null in method 'initialize'");
cleanup();
return false;
}
InputStream is = null;
try {
is = url.openStream();
} catch (IOException ioe) {
errorMessage("Unable to open stream in method 'initialize'");
printStackTrace(ioe);
return false;
}
if (ibxm == null)
ibxm = new IBXM(48000);
if (myAudioFormat == null)
myAudioFormat = new AudioFormat(48000, 16, 2, true, true);
try {
setModule(loadModule(is));
} catch (IllegalArgumentException iae) {
errorMessage("Illegal argument in method 'initialize'");
printStackTrace(iae);
if (is != null) {
try {
is.close();
} catch (IOException ioe) {
}
}
return false;
} catch (IOException ioe) {
errorMessage("Error loading module in method 'initialize'");
printStackTrace(ioe);
if (is != null) {
try {
is.close();
} catch (IOException ioe2) {
}
}
return false;
}
if (is != null) {
try {
is.close();
} catch (IOException ioe) {
}
}
endOfStream(SET, false);
initialized(SET, true);
return true;
}
use of javax.sound.sampled.AudioFormat in project ACS by ACS-Community.
the class AlarmSound method dumpAudioInformation.
/**
* Dump info about supported audio, file types and so on...
* <P>
* This method is useful while updating the audio files.
*/
private void dumpAudioInformation() {
// Java supported file types
AudioFileFormat.Type[] fileTypes = AudioSystem.getAudioFileTypes();
if (fileTypes == null || fileTypes.length == 0) {
System.out.println("No audio file types supported.");
} else {
for (AudioFileFormat.Type type : fileTypes) {
System.out.println(type.toString() + ", extension " + type.getExtension());
}
}
Mixer.Info[] mixerInfos = AudioSystem.getMixerInfo();
System.out.println("Mixers found: " + mixerInfos.length);
for (Mixer.Info mi : mixerInfos) {
System.out.println("\tMixer " + mi.getName() + ": " + mi.getVendor() + ", " + mi.getDescription());
}
// Dump info about the alarm files
for (URL url : soundURLs) {
AudioFileFormat format = null;
try {
format = AudioSystem.getAudioFileFormat(url);
} catch (IOException ioe) {
System.err.println("Error " + ioe.getMessage() + " accessing URL " + url.toString());
continue;
} catch (UnsupportedAudioFileException ue) {
System.err.println("Unsupported audio format for " + url + " (" + ue.getMessage() + ")");
}
System.out.println("Properties of " + url);
System.out.println("\tAudio file type " + format.getType().toString());
System.out.println("\tIs file type supported: " + AudioSystem.isFileTypeSupported(format.getType()));
System.out.println("\tLength in byes " + format.getByteLength());
Map<String, Object> props = format.properties();
Set<String> keys = props.keySet();
for (String str : keys) {
System.out.println("\t[" + str + ", " + props.get(str).toString() + "]");
}
AudioFormat aFormat = format.getFormat();
System.out.println("\tEncoding " + aFormat.getEncoding().toString());
System.out.print("\tByte order ");
if (aFormat.isBigEndian()) {
System.out.println("big endian");
} else {
System.out.println("little endian");
}
System.out.println("\tSample rate: " + aFormat.getSampleRate());
System.out.println("\tNum. of bits of a sample: " + aFormat.getSampleSizeInBits());
System.out.println("\tNum. of channels: " + aFormat.getChannels());
}
}
Aggregations