use of javax.sound.sampled.AudioInputStream in project java-design-patterns by iluwatar.
the class Audio method update.
/**
* This method uses the Update Method pattern.
* It takes the audio from the queue and plays it
*/
public static void update() {
// If there are no pending requests, do nothing.
if (headIndex == tailIndex) {
return;
}
Clip clip = null;
try {
AudioInputStream audioStream = getPendingAudio()[headIndex].getStream();
headIndex++;
clip = AudioSystem.getClip();
clip.open(audioStream);
clip.start();
} catch (LineUnavailableException e) {
System.err.println("Error occoured while loading the audio: The line is unavailable");
e.printStackTrace();
} catch (IOException e) {
System.err.println("Input/Output error while loading the audio");
e.printStackTrace();
} catch (IllegalArgumentException e) {
System.err.println("The system doesn't support the sound: " + e.getMessage());
}
}
use of javax.sound.sampled.AudioInputStream in project yamcs-studio by yamcs.
the class SoundSystem method playAlarm.
private void playAlarm() {
try {
alarmClip = AudioSystem.getClip();
InputStream in = SoundSystem.class.getResourceAsStream(ALARM_SOUND);
AudioInputStream audioIn = AudioSystem.getAudioInputStream(new BufferedInputStream(in));
alarmClip.open(audioIn);
alarmClip.loop(Clip.LOOP_CONTINUOUSLY);
} catch (Exception e) {
log.log(Level.FINE, "Error playing alarm sound", e);
}
}
use of javax.sound.sampled.AudioInputStream in project jersey by jersey.
the class ToneGenerator method writeWav.
/**
* Writes the temporary file with the generated audio.
*
* @param inputStream input stream with the waveform
* @param length length of the waveform
* @return name of the generated temporary file
* @throws IOException
*/
private static String writeWav(InputStream inputStream, int length) throws IOException {
AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, SAMPLE_RATE, 8, 1, 1, SAMPLE_RATE, false);
File file = File.createTempFile("wav", ".");
AudioSystem.write(new AudioInputStream(inputStream, format, length), AudioFileFormat.Type.WAVE, file);
return file.getAbsolutePath();
}
use of javax.sound.sampled.AudioInputStream in project screenbird by adamhub.
the class AudioCache method playAudio.
/**
* Thread for playing audio.
*/
private synchronized void playAudio() {
try {
// Start a new thread for playing audio
if (this.playThread != null) {
this.playThread = null;
}
if (this.cacheStream != null) {
// Release for reading
this.cacheStream.flush();
this.cacheStream.close();
this.cacheStream = null;
}
// Load audio cache
log(String.format("Loading audio cache %s %d", this.cacheFile.getAbsolutePath(), this.cacheFile.length()));
final FileInputStream input = new FileInputStream(this.cacheFile);
log("Loaded audio cache file with size" + input.available());
// Set up hardware for playback
try {
AudioInputStream audioStream = AudioSystem.getAudioInputStream(this.cacheFile);
DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioStream.getFormat());
dataLine = (SourceDataLine) AudioSystem.getLine(info);
} catch (IOException e) {
log(e);
} catch (UnsupportedAudioFileException e) {
log(e);
}
// Set up audio playback thread.
Thread runner = new Thread("Preview Audio Thread") {
@Override
public void run() {
try {
// Marks audio in playing state
isPlaying = true;
// Prep hardware for audio playback
dataLine.open();
dataLine.start();
// Read data for playing audio
int bytesRead = 0;
int buffSize = dataLine.getBufferSize();
byte[] data = new byte[buffSize];
// Compute total time of audio playback
totalTimeMS = (long) ((input.available() * 1000) / ((double) audioFormat.getFrameSize() * audioFormat.getSampleRate()));
log("TimeMS: " + (startTimeMS));
// Prep offsets for accurate audio playback
currOffset = (long) ((startTimeMS / 1000) * (double) audioFormat.getFrameSize() * audioFormat.getSampleRate());
currTimeMS = startTimeMS;
log(String.format("Seek to MS[%d] Bytes[%d] TotalBytes[%d]", startTimeMS, currOffset, input.available()));
// If not starting at begining of audio file
input.skip(currOffset);
// Play the entire audio
while ((bytesRead = input.read(data, 0, data.length)) != -1 && isPlaying) {
currOffset += bytesRead;
// Update current time of audio that is being played
currTimeMS = (long) ((currOffset * 1000) / ((double) audioFormat.getFrameSize() * audioFormat.getSampleRate())) - 600;
// Check to see if sequence has been scrubbed
if (scrubIndex != null && !scrubIndex.isEmpty() && // Is current second in scrub index array
scrubIndex.indexOf((int) (currTimeMS / 1000)) >= 0) {
// Do not write to audio line
continue;
}
// Write to audio line
dataLine.write(data, 0, data.length);
}
if (isPlaying && dataLine != null) {
dataLine.drain();
dataLine.flush();
}
// Kills video feed
currTimeMS = totalTimeMS;
isPlaying = false;
if (dataLine != null) {
dataLine.stop();
}
log("Done with Audio");
} catch (LineUnavailableException e) {
log("No sound line available!" + e);
} catch (IOException e) {
log(e);
} finally {
// Release audio playback hardware
try {
dataLine.close();
} catch (NullPointerException e) {
// This always throws an exception for some reason
}
try {
input.close();
} catch (IOException e) {
log(e);
} catch (NullPointerException e) {
// Do nothing
}
}
// Stop running playback thread
this.interrupt();
playThread = null;
}
};
// Start audio playback thread
playThread = null;
playThread = runner;
playThread.start();
} catch (LineUnavailableException e) {
System.err.println("Line unavailable: " + e);
System.exit(-4);
} catch (FileNotFoundException e) {
log(e);
} catch (IOException e) {
log(e);
}
}
use of javax.sound.sampled.AudioInputStream in project playn by threerings.
the class BigClip method open.
@Override
public void open(AudioInputStream stream) throws IOException, LineUnavailableException {
AudioInputStream is1;
format = stream.getFormat();
if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) {
is1 = AudioSystem.getAudioInputStream(AudioFormat.Encoding.PCM_SIGNED, stream);
} else {
is1 = stream;
}
format = is1.getFormat();
InputStream is2 = is1;
byte[] buf = new byte[1 << 16];
int numRead = 0;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
numRead = is2.read(buf);
while (numRead > -1) {
baos.write(buf, 0, numRead);
numRead = is2.read(buf, 0, buf.length);
}
is2.close();
audioData = baos.toByteArray();
AudioFormat afTemp;
if (format.getChannels() < 2) {
int frameSize = format.getSampleSizeInBits() * 2 / 8;
afTemp = new AudioFormat(format.getEncoding(), format.getSampleRate(), format.getSampleSizeInBits(), 2, frameSize, format.getFrameRate(), format.isBigEndian());
} else {
afTemp = format;
}
setLoopPoints(0, audioData.length);
dataLine = AudioSystem.getSourceDataLine(afTemp);
dataLine.open();
inputStream = new ByteArrayInputStream(audioData);
}
Aggregations