use of javax.sound.sampled.AudioInputStream in project intellij-community by JetBrains.
the class UIUtil method playSoundFromStream.
public static void playSoundFromStream(final Factory<InputStream> streamProducer) {
new Thread(new Runnable() {
// The wrapper thread is unnecessary, unless it blocks on the
// Clip finishing; see comments.
@Override
public void run() {
try {
Clip clip = AudioSystem.getClip();
InputStream stream = streamProducer.create();
if (!stream.markSupported())
stream = new BufferedInputStream(stream);
AudioInputStream inputStream = AudioSystem.getAudioInputStream(stream);
clip.open(inputStream);
clip.start();
} catch (Exception ignore) {
LOG.info(ignore);
}
}
}, "play sound").start();
}
use of javax.sound.sampled.AudioInputStream in project ACS by ACS-Community.
the class AlarmSound method play.
/**
* Play the sound for the given priority
*
* @param priority The priority of the alarm
*/
private void play(int priority) throws Exception {
if (priority < 0 || priority > 3) {
throw new IllegalStateException("Invalid alarm priority " + priority);
}
URL url = soundURLs[priority];
AudioInputStream audioInputStream = null;
try {
audioInputStream = AudioSystem.getAudioInputStream(url);
} catch (Throwable t) {
// If there is an error then the panel does nothing
// It might happen for example if another application
// is locking the audio.
System.err.println(t.getMessage());
t.printStackTrace();
return;
}
// Obtain the information about the AudioInputStream
AudioFormat audioFormat = audioInputStream.getFormat();
SourceDataLine line = null;
DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
// Get the list of available mixers
Mixer.Info[] mixersInfo = AudioSystem.getMixerInfo();
// one is available is found
for (int i = 0; i < mixersInfo.length && line == null; i++) {
Mixer.Info mi = mixersInfo[i];
try {
Mixer mixer = AudioSystem.getMixer(mi);
line = (SourceDataLine) mixer.getLine(info);
} catch (LineUnavailableException lue) {
System.err.println("Line unavailable " + lue.getMessage());
line = null;
continue;
} catch (Throwable t) {
System.err.println("Exception getting the line " + t.getMessage());
line = null;
continue;
}
try {
line.open(audioFormat, EXTERNAL_BUFFER_SIZE);
} catch (Throwable t) {
System.err.println("Error opeining the line: " + t.getMessage());
line = null;
continue;
}
try {
line.start();
} catch (Throwable t) {
System.err.println("Error starting the line: " + t.getMessage());
line = null;
continue;
}
try {
playOnLine(line, audioInputStream);
} catch (Throwable t) {
System.err.println("Error playing: " + t.getMessage());
line = null;
continue;
}
// plays what's left and and closes the audioChannel
line.drain();
line.close();
}
}
use of javax.sound.sampled.AudioInputStream in project lwjgl by LWJGL.
the class WaveDataTest method executeStreamCreationTest.
private void executeStreamCreationTest() {
try {
AudioInputStream ais = AudioSystem.getAudioInputStream(new File(filePath));
WaveData wd = WaveData.create(ais);
if (wd == null) {
System.out.println("executeMidStreamCreationTest::success");
}
} catch (Exception e) {
e.printStackTrace();
}
}
use of javax.sound.sampled.AudioInputStream in project lwjgl by LWJGL.
the class WaveDataTest method executeMidStreamCreationTest.
private void executeMidStreamCreationTest() {
try {
AudioInputStream ais = AudioSystem.getAudioInputStream(WaveDataTest.class.getClassLoader().getResource(filePath));
int totalSize = ais.getFormat().getChannels() * (int) ais.getFrameLength() * ais.getFormat().getSampleSizeInBits() / 8;
// skip 1/4 of the stream
int skip = totalSize / 4;
long skipped = ais.skip(skip);
WaveData wd = WaveData.create(ais);
if (wd == null) {
System.out.println("executeMidStreamCreationTest::success");
}
} catch (Exception e) {
e.printStackTrace();
}
}
use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class NoteOverFlowTest2 method main.
public static void main(String[] args) throws Exception {
// Create instance of the synthesizer with very low polyphony
AudioSynthesizer synth = new SoftSynthesizer();
AudioFormat format = new AudioFormat(44100, 16, 2, true, false);
Map<String, Object> p = new HashMap<String, Object>();
p.put("max polyphony", new Integer(5));
AudioInputStream stream = synth.openStream(format, p);
// Create instrument with too many regions (more than max polyphony)
SF2Soundbank sf2 = new SF2Soundbank();
SF2Sample sample = new SF2Sample(sf2);
sample.setName("test sample");
sample.setData(new byte[100]);
sample.setSampleRate(44100);
sample.setOriginalPitch(20);
sf2.addResource(sample);
SF2Layer layer = new SF2Layer(sf2);
layer.setName("test layer");
sf2.addResource(layer);
for (int i = 0; i < 100; i++) {
SF2LayerRegion region = new SF2LayerRegion();
region.setSample(sample);
layer.getRegions().add(region);
}
SF2Instrument ins = new SF2Instrument(sf2);
ins.setPatch(new Patch(0, 0));
ins.setName("test instrument");
sf2.addInstrument(ins);
SF2InstrumentRegion insregion = new SF2InstrumentRegion();
insregion.setLayer(layer);
ins.getRegions().add(insregion);
// Load the test soundbank into the synthesizer
synth.unloadAllInstruments(synth.getDefaultSoundbank());
synth.loadAllInstruments(sf2);
// Send out one midi on message
MidiChannel ch1 = synth.getChannels()[0];
ch1.programChange(0);
ch1.noteOn(64, 64);
// Read 1 sec from stream
stream.skip(format.getFrameSize() * ((int) (format.getFrameRate() * 2)));
// Close the synthesizer after use
synth.close();
}
Aggregations