use of com.sun.media.sound.AudioSynthesizer in project Zong by Xenoage.
the class SynthManager method findAudioSynthesizer.
private AudioSynthesizer findAudioSynthesizer() throws MidiUnavailableException {
// first check if default synthesizer is AudioSynthesizer.
Synthesizer synth = MidiSystem.getSynthesizer();
if (synth instanceof AudioSynthesizer)
return (AudioSynthesizer) synth;
// if default synthesizer is not AudioSynthesizer, check others.
Info[] infos = MidiSystem.getMidiDeviceInfo();
for (val info : infos) {
MidiDevice dev = MidiSystem.getMidiDevice(info);
if (dev instanceof AudioSynthesizer)
return (AudioSynthesizer) dev;
}
// no AudioSynthesizer was found, return null.
return null;
}
use of com.sun.media.sound.AudioSynthesizer in project jdk8u_jdk by JetBrains.
the class NoteOverFlowTest2 method main.
public static void main(String[] args) throws Exception {
// Create instance of the synthesizer with very low polyphony
AudioSynthesizer synth = new SoftSynthesizer();
AudioFormat format = new AudioFormat(44100, 16, 2, true, false);
Map<String, Object> p = new HashMap<String, Object>();
p.put("max polyphony", new Integer(5));
AudioInputStream stream = synth.openStream(format, p);
// Create instrument with too many regions (more than max polyphony)
SF2Soundbank sf2 = new SF2Soundbank();
SF2Sample sample = new SF2Sample(sf2);
sample.setName("test sample");
sample.setData(new byte[100]);
sample.setSampleRate(44100);
sample.setOriginalPitch(20);
sf2.addResource(sample);
SF2Layer layer = new SF2Layer(sf2);
layer.setName("test layer");
sf2.addResource(layer);
for (int i = 0; i < 100; i++) {
SF2LayerRegion region = new SF2LayerRegion();
region.setSample(sample);
layer.getRegions().add(region);
}
SF2Instrument ins = new SF2Instrument(sf2);
ins.setPatch(new Patch(0, 0));
ins.setName("test instrument");
sf2.addInstrument(ins);
SF2InstrumentRegion insregion = new SF2InstrumentRegion();
insregion.setLayer(layer);
ins.getRegions().add(insregion);
// Load the test soundbank into the synthesizer
synth.unloadAllInstruments(synth.getDefaultSoundbank());
synth.loadAllInstruments(sf2);
// Send out one midi on message
MidiChannel ch1 = synth.getChannels()[0];
ch1.programChange(0);
ch1.noteOn(64, 64);
// Read 1 sec from stream
stream.skip(format.getFrameSize() * ((int) (format.getFrameRate() * 2)));
// Close the synthesizer after use
synth.close();
}
use of com.sun.media.sound.AudioSynthesizer in project jdk8u_jdk by JetBrains.
the class TestPreciseTimestampRendering method test.
public static void test(Soundbank soundbank) throws Exception {
// Create instance of synthesizer using the testing soundbank above
AudioSynthesizer synth = new SoftSynthesizer();
AudioInputStream stream = synth.openStream(format, null);
synth.unloadAllInstruments(synth.getDefaultSoundbank());
synth.loadAllInstruments(soundbank);
Receiver recv = synth.getReceiver();
// Set volume to max and turn reverb off
ShortMessage reverb_off = new ShortMessage();
reverb_off.setMessage(ShortMessage.CONTROL_CHANGE, 91, 0);
recv.send(reverb_off, -1);
ShortMessage full_volume = new ShortMessage();
full_volume.setMessage(ShortMessage.CONTROL_CHANGE, 7, 127);
recv.send(full_volume, -1);
Random random = new Random(3485934583945l);
// Create random timestamps
long[] test_timestamps = new long[30];
for (int i = 1; i < test_timestamps.length; i++) {
test_timestamps[i] = i * 44100 + (int) (random.nextDouble() * 22050.0);
}
// Send midi note on message to synthesizer
for (int i = 0; i < test_timestamps.length; i++) {
ShortMessage midi_on = new ShortMessage();
midi_on.setMessage(ShortMessage.NOTE_ON, 69, 127);
recv.send(midi_on, (long) ((test_timestamps[i] / 44100.0) * 1000000.0));
}
// Measure timing from rendered audio
float[] fbuffer = new float[100];
byte[] buffer = new byte[fbuffer.length * format.getFrameSize()];
long firsts = -1;
int counter = 0;
long s = 0;
long max_jitter = 0;
outerloop: for (int k = 0; k < 10000000; k++) {
stream.read(buffer);
AudioFloatConverter.getConverter(format).toFloatArray(buffer, fbuffer);
for (int i = 0; i < fbuffer.length; i++) {
if (fbuffer[i] != 0) {
if (firsts == -1)
firsts = s;
long measure_time = (s - firsts);
long predicted_time = test_timestamps[counter];
long jitter = Math.abs(measure_time - predicted_time);
if (jitter > 10)
max_jitter = jitter;
counter++;
if (counter == test_timestamps.length)
break outerloop;
}
s++;
}
}
synth.close();
if (counter == 0)
throw new Exception("Nothing was measured!");
if (max_jitter != 0) {
throw new Exception("Jitter has occurred! " + "(max jitter = " + max_jitter + ")");
}
}
use of com.sun.media.sound.AudioSynthesizer in project Zong by Xenoage.
the class SynthManager method initMidi.
/**
* (Re)initializes the MIDI objects and (re)configures the audio settings.
* If currently playback is running, it is stopped.
* TIDY
* @param sampleRate the number of samples per second, e.g. 44100
* @param sampleSizeInBits the number of bits in each sample, e.g. 16
* @param channels the number of channels (1 for mono, 2 for stereo, and so on)
* @param latency the latency in ms
* @param polyphony maximum number of concurrent notes
* @param deviceName name of the device, or null for default
* @param interpolation linear, cubic, sinc or point
*/
public void initMidi(float sampleRate, int sampleSizeInBits, int channels, int latency, int polyphony, String deviceName, String interpolation) throws MidiUnavailableException {
Sequence sequence = null;
if (sequencer != null) {
sequencer.stop();
sequence = sequencer.getSequence();
}
if (synthesizer != null) {
synthesizer.close();
}
if (mixer != null) {
mixer.close();
}
format = new AudioFormat(sampleRate, sampleSizeInBits, channels, true, false);
if (deviceName != null) {
Mixer.Info selinfo = null;
for (Mixer.Info info : AudioSystem.getMixerInfo()) {
Mixer mixer = AudioSystem.getMixer(info);
boolean hassrcline = false;
for (Line.Info linfo : mixer.getSourceLineInfo()) {
if (linfo instanceof javax.sound.sampled.DataLine.Info) {
hassrcline = true;
break;
}
}
if (hassrcline) {
if (info.getName().equals(deviceName)) {
selinfo = info;
break;
}
}
}
if (selinfo != null) {
mixer = AudioSystem.getMixer(selinfo);
try {
mixer.open();
int bufferSize = (int) (format.getFrameSize() * format.getFrameRate() * latency / 1000f);
if (bufferSize < 500)
bufferSize = 500;
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, format, bufferSize);
if (mixer.isLineSupported(dataLineInfo)) {
line = (SourceDataLine) mixer.getLine(dataLineInfo);
line.open(format, bufferSize);
line.start();
}
} catch (Throwable t) {
mixer = null;
}
}
}
Map<String, Object> ainfo = new HashMap<>();
ainfo.put("format", format);
ainfo.put("max polyphony", polyphony);
ainfo.put("latency", latency * 1000L);
ainfo.put("interpolation", interpolation);
ainfo.put("large mode", true);
AudioSynthesizer synth = findAudioSynthesizer();
if (synth == null)
// no audio synthesizer
return;
synth.open(line, ainfo);
synthesizer = synth;
if (soundbank == null) {
soundbank = synth.getDefaultSoundbank();
}
if (sequencer == null) {
try {
sequencer = MidiSystem.getSequencer(false);
} catch (MidiUnavailableException ex) {
// sequencer already open. no problem.
}
}
if (sequencer.isOpen()) {
sequencer.close();
}
sequencer.getTransmitter().setReceiver(synthesizer.getReceiver());
sequencer.open();
if (sequence != null) {
try {
sequencer.setSequence(sequence);
} catch (InvalidMidiDataException ex) {
}
}
}
Aggregations