use of javax.sound.sampled.AudioFormat in project JMRI by JMRI.
the class SoundUtil method playSoundBuffer.
/**
* Play a sound from a buffer
*
*/
public static void playSoundBuffer(byte[] wavData) {
// get characteristics from buffer
jmri.jmrit.sound.WavBuffer wb = new jmri.jmrit.sound.WavBuffer(wavData);
float sampleRate = wb.getSampleRate();
int sampleSizeInBits = wb.getSampleSizeInBits();
int channels = wb.getChannels();
boolean signed = wb.getSigned();
boolean bigEndian = wb.getBigEndian();
AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
SourceDataLine line;
// format is an AudioFormat object
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
if (!AudioSystem.isLineSupported(info)) {
// Handle the error.
log.warn("line not supported: " + info);
return;
}
// Obtain and open the line.
try {
line = (SourceDataLine) AudioSystem.getLine(info);
line.open(format);
} catch (LineUnavailableException ex) {
// Handle the error.
log.error("error opening line: " + ex);
return;
}
line.start();
// write(byte[] b, int off, int len)
line.write(wavData, 0, wavData.length);
}
use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class SoftSynthesizer method getPropertyInfo.
public AudioSynthesizerPropertyInfo[] getPropertyInfo(Map<String, Object> info) {
List<AudioSynthesizerPropertyInfo> list = new ArrayList<AudioSynthesizerPropertyInfo>();
AudioSynthesizerPropertyInfo item;
// If info != null or synthesizer is closed
// we return how the synthesizer will be set on next open
// If info == null and synthesizer is open
// we return current synthesizer properties.
boolean o = info == null && open;
item = new AudioSynthesizerPropertyInfo("interpolation", o ? resamplerType : "linear");
item.choices = new String[] { "linear", "linear1", "linear2", "cubic", "lanczos", "sinc", "point" };
item.description = "Interpolation method";
list.add(item);
item = new AudioSynthesizerPropertyInfo("control rate", o ? controlrate : 147f);
item.description = "Control rate";
list.add(item);
item = new AudioSynthesizerPropertyInfo("format", o ? format : new AudioFormat(44100, 16, 2, true, false));
item.description = "Default audio format";
list.add(item);
item = new AudioSynthesizerPropertyInfo("latency", o ? latency : 120000L);
item.description = "Default latency";
list.add(item);
item = new AudioSynthesizerPropertyInfo("device id", o ? deviceid : 0);
item.description = "Device ID for SysEx Messages";
list.add(item);
item = new AudioSynthesizerPropertyInfo("max polyphony", o ? maxpoly : 64);
item.description = "Maximum polyphony";
list.add(item);
item = new AudioSynthesizerPropertyInfo("reverb", o ? reverb_on : true);
item.description = "Turn reverb effect on or off";
list.add(item);
item = new AudioSynthesizerPropertyInfo("chorus", o ? chorus_on : true);
item.description = "Turn chorus effect on or off";
list.add(item);
item = new AudioSynthesizerPropertyInfo("auto gain control", o ? agc_on : true);
item.description = "Turn auto gain control on or off";
list.add(item);
item = new AudioSynthesizerPropertyInfo("large mode", o ? largemode : false);
item.description = "Turn large mode on or off.";
list.add(item);
item = new AudioSynthesizerPropertyInfo("midi channels", o ? channels.length : 16);
item.description = "Number of midi channels.";
list.add(item);
item = new AudioSynthesizerPropertyInfo("jitter correction", o ? jitter_correction : true);
item.description = "Turn jitter correction on or off.";
list.add(item);
item = new AudioSynthesizerPropertyInfo("light reverb", o ? reverb_light : true);
item.description = "Turn light reverb mode on or off";
list.add(item);
item = new AudioSynthesizerPropertyInfo("load default soundbank", o ? load_default_soundbank : true);
item.description = "Enabled/disable loading default soundbank";
list.add(item);
AudioSynthesizerPropertyInfo[] items;
items = list.toArray(new AudioSynthesizerPropertyInfo[list.size()]);
Properties storedProperties = getStoredProperties();
for (AudioSynthesizerPropertyInfo item2 : items) {
Object v = (info == null) ? null : info.get(item2.name);
v = (v != null) ? v : storedProperties.getProperty(item2.name);
if (v != null) {
Class c = (item2.valueClass);
if (c.isInstance(v))
item2.value = v;
else if (v instanceof String) {
String s = (String) v;
if (c == Boolean.class) {
if (s.equalsIgnoreCase("true"))
item2.value = Boolean.TRUE;
if (s.equalsIgnoreCase("false"))
item2.value = Boolean.FALSE;
} else if (c == AudioFormat.class) {
int channels = 2;
boolean signed = true;
boolean bigendian = false;
int bits = 16;
float sampleRate = 44100f;
try {
StringTokenizer st = new StringTokenizer(s, ", ");
String prevToken = "";
while (st.hasMoreTokens()) {
String token = st.nextToken().toLowerCase();
if (token.equals("mono"))
channels = 1;
if (token.startsWith("channel"))
channels = Integer.parseInt(prevToken);
if (token.contains("unsigned"))
signed = false;
if (token.equals("big-endian"))
bigendian = true;
if (token.equals("bit"))
bits = Integer.parseInt(prevToken);
if (token.equals("hz"))
sampleRate = Float.parseFloat(prevToken);
prevToken = token;
}
item2.value = new AudioFormat(sampleRate, bits, channels, signed, bigendian);
} catch (NumberFormatException e) {
}
} else
try {
if (c == Byte.class)
item2.value = Byte.valueOf(s);
else if (c == Short.class)
item2.value = Short.valueOf(s);
else if (c == Integer.class)
item2.value = Integer.valueOf(s);
else if (c == Long.class)
item2.value = Long.valueOf(s);
else if (c == Float.class)
item2.value = Float.valueOf(s);
else if (c == Double.class)
item2.value = Double.valueOf(s);
} catch (NumberFormatException e) {
}
} else if (v instanceof Number) {
Number n = (Number) v;
if (c == Byte.class)
item2.value = Byte.valueOf(n.byteValue());
if (c == Short.class)
item2.value = Short.valueOf(n.shortValue());
if (c == Integer.class)
item2.value = Integer.valueOf(n.intValue());
if (c == Long.class)
item2.value = Long.valueOf(n.longValue());
if (c == Float.class)
item2.value = Float.valueOf(n.floatValue());
if (c == Double.class)
item2.value = Double.valueOf(n.doubleValue());
}
}
}
return items;
}
use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class Toolkit method getPCMConvertedAudioInputStream.
public static AudioInputStream getPCMConvertedAudioInputStream(AudioInputStream ais) {
// we can't open the device for non-PCM playback, so we have
// convert any other encodings to PCM here (at least we try!)
AudioFormat af = ais.getFormat();
if ((!af.getEncoding().equals(AudioFormat.Encoding.PCM_SIGNED)) && (!af.getEncoding().equals(AudioFormat.Encoding.PCM_UNSIGNED))) {
try {
AudioFormat newFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, af.getSampleRate(), 16, af.getChannels(), af.getChannels() * 2, af.getSampleRate(), Platform.isBigEndian());
ais = AudioSystem.getAudioInputStream(newFormat, ais);
} catch (Exception e) {
if (Printer.err)
e.printStackTrace();
ais = null;
}
}
return ais;
}
use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class SoftMixingMixer method open.
public void open(SourceDataLine line) throws LineUnavailableException {
if (isOpen()) {
implicitOpen = false;
return;
}
synchronized (control_mutex) {
try {
if (line != null)
format = line.getFormat();
AudioInputStream ais = openStream(getFormat());
if (line == null) {
synchronized (SoftMixingMixerProvider.mutex) {
SoftMixingMixerProvider.lockthread = Thread.currentThread();
}
try {
Mixer defaultmixer = AudioSystem.getMixer(null);
if (defaultmixer != null) {
// Search for suitable line
DataLine.Info idealinfo = null;
AudioFormat idealformat = null;
Line.Info[] lineinfos = defaultmixer.getSourceLineInfo();
idealFound: for (int i = 0; i < lineinfos.length; i++) {
if (lineinfos[i].getLineClass() == SourceDataLine.class) {
DataLine.Info info = (DataLine.Info) lineinfos[i];
AudioFormat[] formats = info.getFormats();
for (int j = 0; j < formats.length; j++) {
AudioFormat format = formats[j];
if (format.getChannels() == 2 || format.getChannels() == AudioSystem.NOT_SPECIFIED)
if (format.getEncoding().equals(Encoding.PCM_SIGNED) || format.getEncoding().equals(Encoding.PCM_UNSIGNED))
if (format.getSampleRate() == AudioSystem.NOT_SPECIFIED || format.getSampleRate() == 48000.0)
if (format.getSampleSizeInBits() == AudioSystem.NOT_SPECIFIED || format.getSampleSizeInBits() == 16) {
idealinfo = info;
int ideal_channels = format.getChannels();
boolean ideal_signed = format.getEncoding().equals(Encoding.PCM_SIGNED);
float ideal_rate = format.getSampleRate();
boolean ideal_endian = format.isBigEndian();
int ideal_bits = format.getSampleSizeInBits();
if (ideal_bits == AudioSystem.NOT_SPECIFIED)
ideal_bits = 16;
if (ideal_channels == AudioSystem.NOT_SPECIFIED)
ideal_channels = 2;
if (ideal_rate == AudioSystem.NOT_SPECIFIED)
ideal_rate = 48000;
idealformat = new AudioFormat(ideal_rate, ideal_bits, ideal_channels, ideal_signed, ideal_endian);
break idealFound;
}
}
}
}
if (idealformat != null) {
format = idealformat;
line = (SourceDataLine) defaultmixer.getLine(idealinfo);
}
}
if (line == null)
line = AudioSystem.getSourceDataLine(format);
} finally {
synchronized (SoftMixingMixerProvider.mutex) {
SoftMixingMixerProvider.lockthread = null;
}
}
if (line == null)
throw new IllegalArgumentException("No line matching " + info.toString() + " is supported.");
}
double latency = this.latency;
if (!line.isOpen()) {
int bufferSize = getFormat().getFrameSize() * (int) (getFormat().getFrameRate() * (latency / 1000000f));
line.open(getFormat(), bufferSize);
// Remember that we opened that line
// so we can close again in SoftSynthesizer.close()
sourceDataLine = line;
}
if (!line.isActive())
line.start();
int controlbuffersize = 512;
try {
controlbuffersize = ais.available();
} catch (IOException e) {
}
// Tell mixer not fill read buffers fully.
// This lowers latency, and tells DataPusher
// to read in smaller amounts.
// mainmixer.readfully = false;
// pusher = new DataPusher(line, ais);
int buffersize = line.getBufferSize();
buffersize -= buffersize % controlbuffersize;
if (buffersize < 3 * controlbuffersize)
buffersize = 3 * controlbuffersize;
if (jitter_correction) {
ais = new SoftJitterCorrector(ais, buffersize, controlbuffersize);
}
pusher = new SoftAudioPusher(line, ais, controlbuffersize);
pusher_stream = ais;
pusher.start();
} catch (LineUnavailableException e) {
if (isOpen())
close();
throw new LineUnavailableException(e.toString());
}
}
}
use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class UlawCodec method getOutputFormats.
/**
* Obtains the set of output formats supported by the codec
* given a particular input format.
* If no output formats are supported for this input format,
* returns an array of length 0.
* @return array of supported output formats.
*/
/* public AudioFormat[] getOutputFormats(AudioFormat inputFormat) { */
private AudioFormat[] getOutputFormats(AudioFormat inputFormat) {
Vector formats = new Vector();
AudioFormat format;
if ((inputFormat.getSampleSizeInBits() == 16) && AudioFormat.Encoding.PCM_SIGNED.equals(inputFormat.getEncoding())) {
format = new AudioFormat(AudioFormat.Encoding.ULAW, inputFormat.getSampleRate(), 8, inputFormat.getChannels(), inputFormat.getChannels(), inputFormat.getSampleRate(), false);
formats.addElement(format);
}
if (AudioFormat.Encoding.ULAW.equals(inputFormat.getEncoding())) {
format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, inputFormat.getSampleRate(), 16, inputFormat.getChannels(), inputFormat.getChannels() * 2, inputFormat.getSampleRate(), false);
formats.addElement(format);
format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, inputFormat.getSampleRate(), 16, inputFormat.getChannels(), inputFormat.getChannels() * 2, inputFormat.getSampleRate(), true);
formats.addElement(format);
}
AudioFormat[] formatArray = new AudioFormat[formats.size()];
for (int i = 0; i < formatArray.length; i++) {
formatArray[i] = (AudioFormat) (formats.elementAt(i));
}
return formatArray;
}
Aggregations