use of javax.sound.sampled.SourceDataLine in project Minim by ddf.
the class JSMinim method getSourceDataLine.
SourceDataLine getSourceDataLine(AudioFormat format, int bufferSize) {
SourceDataLine line = null;
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
if (AudioSystem.isLineSupported(info)) {
try {
if (outputMixer == null) {
line = (SourceDataLine) AudioSystem.getLine(info);
} else {
line = (SourceDataLine) outputMixer.getLine(info);
}
// remember that time you spent, like, an entire afternoon fussing
// with this buffer size to try to get the latency decent on Linux?
// Yah, don't fuss with this anymore, ok?
line.open(format, bufferSize * format.getFrameSize() * 4);
if (line.isOpen()) {
debug("SourceDataLine is " + line.getClass().toString() + "\n" + "Buffer size is " + line.getBufferSize() + " bytes.\n" + "Format is " + line.getFormat().toString() + ".");
} else {
line = null;
}
} catch (Exception e) {
error("Couldn't open the line: " + e.getMessage());
line = null;
}
} else {
error("Unable to return a SourceDataLine: unsupported format - " + format.toString());
}
return line;
}
use of javax.sound.sampled.SourceDataLine in project screenbird by adamhub.
the class AudioCache method playAudio.
/**
* Thread for playing audio.
*/
private synchronized void playAudio() {
try {
// Start a new thread for playing audio
if (this.playThread != null) {
this.playThread = null;
}
if (this.cacheStream != null) {
// Release for reading
this.cacheStream.flush();
this.cacheStream.close();
this.cacheStream = null;
}
// Load audio cache
log(String.format("Loading audio cache %s %d", this.cacheFile.getAbsolutePath(), this.cacheFile.length()));
final FileInputStream input = new FileInputStream(this.cacheFile);
log("Loaded audio cache file with size" + input.available());
// Set up hardware for playback
try {
AudioInputStream audioStream = AudioSystem.getAudioInputStream(this.cacheFile);
DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioStream.getFormat());
dataLine = (SourceDataLine) AudioSystem.getLine(info);
} catch (IOException e) {
log(e);
} catch (UnsupportedAudioFileException e) {
log(e);
}
// Set up audio playback thread.
Thread runner = new Thread("Preview Audio Thread") {
@Override
public void run() {
try {
// Marks audio in playing state
isPlaying = true;
// Prep hardware for audio playback
dataLine.open();
dataLine.start();
// Read data for playing audio
int bytesRead = 0;
int buffSize = dataLine.getBufferSize();
byte[] data = new byte[buffSize];
// Compute total time of audio playback
totalTimeMS = (long) ((input.available() * 1000) / ((double) audioFormat.getFrameSize() * audioFormat.getSampleRate()));
log("TimeMS: " + (startTimeMS));
// Prep offsets for accurate audio playback
currOffset = (long) ((startTimeMS / 1000) * (double) audioFormat.getFrameSize() * audioFormat.getSampleRate());
currTimeMS = startTimeMS;
log(String.format("Seek to MS[%d] Bytes[%d] TotalBytes[%d]", startTimeMS, currOffset, input.available()));
// If not starting at begining of audio file
input.skip(currOffset);
// Play the entire audio
while ((bytesRead = input.read(data, 0, data.length)) != -1 && isPlaying) {
currOffset += bytesRead;
// Update current time of audio that is being played
currTimeMS = (long) ((currOffset * 1000) / ((double) audioFormat.getFrameSize() * audioFormat.getSampleRate())) - 600;
// Check to see if sequence has been scrubbed
if (scrubIndex != null && !scrubIndex.isEmpty() && // Is current second in scrub index array
scrubIndex.indexOf((int) (currTimeMS / 1000)) >= 0) {
// Do not write to audio line
continue;
}
// Write to audio line
dataLine.write(data, 0, data.length);
}
if (isPlaying && dataLine != null) {
dataLine.drain();
dataLine.flush();
}
// Kills video feed
currTimeMS = totalTimeMS;
isPlaying = false;
if (dataLine != null) {
dataLine.stop();
}
log("Done with Audio");
} catch (LineUnavailableException e) {
log("No sound line available!" + e);
} catch (IOException e) {
log(e);
} finally {
// Release audio playback hardware
try {
dataLine.close();
} catch (NullPointerException e) {
// This always throws an exception for some reason
}
try {
input.close();
} catch (IOException e) {
log(e);
} catch (NullPointerException e) {
// Do nothing
}
}
// Stop running playback thread
this.interrupt();
playThread = null;
}
};
// Start audio playback thread
playThread = null;
playThread = runner;
playThread.start();
} catch (LineUnavailableException e) {
System.err.println("Line unavailable: " + e);
System.exit(-4);
} catch (FileNotFoundException e) {
log(e);
} catch (IOException e) {
log(e);
}
}
use of javax.sound.sampled.SourceDataLine in project ACS by ACS-Community.
the class AlarmSound method play.
/**
* Play the sound for the given priority
*
* @param priority The priority of the alarm
*/
private void play(int priority) throws Exception {
if (priority < 0 || priority > 3) {
throw new IllegalStateException("Invalid alarm priority " + priority);
}
URL url = soundURLs[priority];
AudioInputStream audioInputStream = null;
try {
audioInputStream = AudioSystem.getAudioInputStream(url);
} catch (Throwable t) {
// If there is an error then the panel does nothing
// It might happen for example if another application
// is locking the audio.
System.err.println(t.getMessage());
t.printStackTrace();
return;
}
// Obtain the information about the AudioInputStream
AudioFormat audioFormat = audioInputStream.getFormat();
SourceDataLine line = null;
DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
// Get the list of available mixers
Mixer.Info[] mixersInfo = AudioSystem.getMixerInfo();
// one is available is found
for (int i = 0; i < mixersInfo.length && line == null; i++) {
Mixer.Info mi = mixersInfo[i];
try {
Mixer mixer = AudioSystem.getMixer(mi);
line = (SourceDataLine) mixer.getLine(info);
} catch (LineUnavailableException lue) {
System.err.println("Line unavailable " + lue.getMessage());
line = null;
continue;
} catch (Throwable t) {
System.err.println("Exception getting the line " + t.getMessage());
line = null;
continue;
}
try {
line.open(audioFormat, EXTERNAL_BUFFER_SIZE);
} catch (Throwable t) {
System.err.println("Error opeining the line: " + t.getMessage());
line = null;
continue;
}
try {
line.start();
} catch (Throwable t) {
System.err.println("Error starting the line: " + t.getMessage());
line = null;
continue;
}
try {
playOnLine(line, audioInputStream);
} catch (Throwable t) {
System.err.println("Error playing: " + t.getMessage());
line = null;
continue;
}
// plays what's left and and closes the audioChannel
line.drain();
line.close();
}
}
use of javax.sound.sampled.SourceDataLine in project JMRI by JMRI.
the class SoundUtil method playSoundBuffer.
/**
* Play a sound from a buffer
*
*/
public static void playSoundBuffer(byte[] wavData) {
// get characteristics from buffer
jmri.jmrit.sound.WavBuffer wb = new jmri.jmrit.sound.WavBuffer(wavData);
float sampleRate = wb.getSampleRate();
int sampleSizeInBits = wb.getSampleSizeInBits();
int channels = wb.getChannels();
boolean signed = wb.getSigned();
boolean bigEndian = wb.getBigEndian();
AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
SourceDataLine line;
// format is an AudioFormat object
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
if (!AudioSystem.isLineSupported(info)) {
// Handle the error.
log.warn("line not supported: " + info);
return;
}
// Obtain and open the line.
try {
line = (SourceDataLine) AudioSystem.getLine(info);
line.open(format);
} catch (LineUnavailableException ex) {
// Handle the error.
log.error("error opening line: " + ex);
return;
}
line.start();
// write(byte[] b, int off, int len)
line.write(wavData, 0, wavData.length);
}
use of javax.sound.sampled.SourceDataLine in project jdk8u_jdk by JetBrains.
the class SoftMixingMixer method open.
public void open(SourceDataLine line) throws LineUnavailableException {
if (isOpen()) {
implicitOpen = false;
return;
}
synchronized (control_mutex) {
try {
if (line != null)
format = line.getFormat();
AudioInputStream ais = openStream(getFormat());
if (line == null) {
synchronized (SoftMixingMixerProvider.mutex) {
SoftMixingMixerProvider.lockthread = Thread.currentThread();
}
try {
Mixer defaultmixer = AudioSystem.getMixer(null);
if (defaultmixer != null) {
// Search for suitable line
DataLine.Info idealinfo = null;
AudioFormat idealformat = null;
Line.Info[] lineinfos = defaultmixer.getSourceLineInfo();
idealFound: for (int i = 0; i < lineinfos.length; i++) {
if (lineinfos[i].getLineClass() == SourceDataLine.class) {
DataLine.Info info = (DataLine.Info) lineinfos[i];
AudioFormat[] formats = info.getFormats();
for (int j = 0; j < formats.length; j++) {
AudioFormat format = formats[j];
if (format.getChannels() == 2 || format.getChannels() == AudioSystem.NOT_SPECIFIED)
if (format.getEncoding().equals(Encoding.PCM_SIGNED) || format.getEncoding().equals(Encoding.PCM_UNSIGNED))
if (format.getSampleRate() == AudioSystem.NOT_SPECIFIED || format.getSampleRate() == 48000.0)
if (format.getSampleSizeInBits() == AudioSystem.NOT_SPECIFIED || format.getSampleSizeInBits() == 16) {
idealinfo = info;
int ideal_channels = format.getChannels();
boolean ideal_signed = format.getEncoding().equals(Encoding.PCM_SIGNED);
float ideal_rate = format.getSampleRate();
boolean ideal_endian = format.isBigEndian();
int ideal_bits = format.getSampleSizeInBits();
if (ideal_bits == AudioSystem.NOT_SPECIFIED)
ideal_bits = 16;
if (ideal_channels == AudioSystem.NOT_SPECIFIED)
ideal_channels = 2;
if (ideal_rate == AudioSystem.NOT_SPECIFIED)
ideal_rate = 48000;
idealformat = new AudioFormat(ideal_rate, ideal_bits, ideal_channels, ideal_signed, ideal_endian);
break idealFound;
}
}
}
}
if (idealformat != null) {
format = idealformat;
line = (SourceDataLine) defaultmixer.getLine(idealinfo);
}
}
if (line == null)
line = AudioSystem.getSourceDataLine(format);
} finally {
synchronized (SoftMixingMixerProvider.mutex) {
SoftMixingMixerProvider.lockthread = null;
}
}
if (line == null)
throw new IllegalArgumentException("No line matching " + info.toString() + " is supported.");
}
double latency = this.latency;
if (!line.isOpen()) {
int bufferSize = getFormat().getFrameSize() * (int) (getFormat().getFrameRate() * (latency / 1000000f));
line.open(getFormat(), bufferSize);
// Remember that we opened that line
// so we can close again in SoftSynthesizer.close()
sourceDataLine = line;
}
if (!line.isActive())
line.start();
int controlbuffersize = 512;
try {
controlbuffersize = ais.available();
} catch (IOException e) {
}
// Tell mixer not fill read buffers fully.
// This lowers latency, and tells DataPusher
// to read in smaller amounts.
// mainmixer.readfully = false;
// pusher = new DataPusher(line, ais);
int buffersize = line.getBufferSize();
buffersize -= buffersize % controlbuffersize;
if (buffersize < 3 * controlbuffersize)
buffersize = 3 * controlbuffersize;
if (jitter_correction) {
ais = new SoftJitterCorrector(ais, buffersize, controlbuffersize);
}
pusher = new SoftAudioPusher(line, ais, controlbuffersize);
pusher_stream = ais;
pusher.start();
} catch (LineUnavailableException e) {
if (isOpen())
close();
throw new LineUnavailableException(e.toString());
}
}
}
Aggregations