use of jmri.jmrit.audio.AudioBuffer in project JMRI by JMRI.
the class AbstractAudioManagerConfigXML method store.
/**
* Default implementation for storing the contents of a AudioManager
*
* @param o Object to store, of type AudioManager
* @return Element containing the complete info
*/
@Override
public Element store(Object o) {
Element audio = new Element("audio");
setStoreElementClass(audio);
AudioManager am = (AudioManager) o;
if (am != null) {
java.util.Iterator<String> iter = am.getSystemNameList().iterator();
// don't return an element if there are not any audios to include
if (!iter.hasNext()) {
return null;
}
// (no need to store the automatically created Listener object by itself)
if (am.getSystemNameList(Audio.SOURCE).isEmpty() && am.getSystemNameList(Audio.BUFFER).isEmpty()) {
return null;
}
// finally, don't store if the only Sources and Buffers are for the
// virtual sound decoder (VSD)
int vsdObjectCount = 0;
// count all VSD objects
for (String sname : am.getSystemNameList()) {
if (log.isDebugEnabled()) {
log.debug("Check if " + sname + " is a VSD object");
}
if (sname.length() >= 8 && sname.substring(3, 8).equalsIgnoreCase("$VSD:")) {
log.debug("...yes");
vsdObjectCount++;
}
}
if (log.isDebugEnabled()) {
log.debug("Found " + vsdObjectCount + " VSD objects of " + am.getSystemNameList(Audio.SOURCE).size() + am.getSystemNameList(Audio.BUFFER).size() + " objects");
}
// the number of VSD objects - if so, exit.
if (am.getSystemNameList(Audio.SOURCE).size() + am.getSystemNameList(Audio.BUFFER).size() == vsdObjectCount) {
log.debug("Only VSD objects - nothing to store");
return null;
}
// store global information
audio.setAttribute("distanceattenuated", am.getActiveAudioFactory().isDistanceAttenuated() ? "yes" : "no");
// store the audios
while (iter.hasNext()) {
String sname = iter.next();
if (sname == null) {
log.error("System name null during store");
continue;
}
if (log.isDebugEnabled()) {
log.debug("system name is " + sname);
}
if (sname.length() >= 8 && sname.substring(3, 8).equalsIgnoreCase("$VSD:")) {
if (log.isDebugEnabled()) {
log.debug("Skipping storage of VSD object " + sname);
}
continue;
}
Audio a = am.getBySystemName(sname);
// Transient objects for current element and any children
Element e = null;
Element ce = null;
int type = a.getSubType();
if (type == Audio.BUFFER) {
AudioBuffer ab = (AudioBuffer) a;
e = new Element("audiobuffer").setAttribute("systemName", sname);
e.addContent(new Element("systemName").addContent(sname));
// store common part
storeCommon(ab, e);
// store sub-type specific data
String url = ab.getURL();
ce = new Element("url").addContent("" + (url.isEmpty() ? "" : FileUtil.getPortableFilename(url)));
e.addContent(ce);
ce = new Element("looppoint");
ce.setAttribute("start", "" + ab.getStartLoopPoint());
ce.setAttribute("end", "" + ab.getEndLoopPoint());
e.addContent(ce);
ce = new Element("streamed");
ce.addContent("" + (ab.isStreamed() ? "yes" : "no"));
e.addContent(ce);
} else if (type == Audio.LISTENER) {
AudioListener al = (AudioListener) a;
e = new Element("audiolistener").setAttribute("systemName", sname);
e.addContent(new Element("systemName").addContent(sname));
// store common part
storeCommon(al, e);
// store sub-type specific data
ce = new Element("position");
ce.setAttribute("x", "" + al.getPosition().x);
ce.setAttribute("y", "" + al.getPosition().y);
ce.setAttribute("z", "" + al.getPosition().z);
e.addContent(ce);
ce = new Element("velocity");
ce.setAttribute("x", "" + al.getVelocity().x);
ce.setAttribute("y", "" + al.getVelocity().y);
ce.setAttribute("z", "" + al.getVelocity().z);
e.addContent(ce);
ce = new Element("orientation");
ce.setAttribute("atX", "" + al.getOrientation(Audio.AT).x);
ce.setAttribute("atY", "" + al.getOrientation(Audio.AT).y);
ce.setAttribute("atZ", "" + al.getOrientation(Audio.AT).z);
ce.setAttribute("upX", "" + al.getOrientation(Audio.UP).x);
ce.setAttribute("upY", "" + al.getOrientation(Audio.UP).y);
ce.setAttribute("upZ", "" + al.getOrientation(Audio.UP).z);
e.addContent(ce);
ce = new Element("gain");
ce.addContent("" + al.getGain());
e.addContent(ce);
ce = new Element("metersperunit");
ce.addContent("" + al.getMetersPerUnit());
e.addContent(ce);
} else if (type == Audio.SOURCE) {
AudioSource as = (AudioSource) a;
e = new Element("audiosource").setAttribute("systemName", sname);
e.addContent(new Element("systemName").addContent(sname));
// store common part
storeCommon(as, e);
// store sub-type specific data
ce = new Element("position");
ce.setAttribute("x", "" + as.getPosition().x);
ce.setAttribute("y", "" + as.getPosition().y);
ce.setAttribute("z", "" + as.getPosition().z);
e.addContent(ce);
ce = new Element("velocity");
ce.setAttribute("x", "" + as.getVelocity().x);
ce.setAttribute("y", "" + as.getVelocity().y);
ce.setAttribute("z", "" + as.getVelocity().z);
e.addContent(ce);
ce = new Element("assignedbuffer");
if (as.getAssignedBuffer() != null) {
ce.addContent("" + as.getAssignedBufferName());
}
e.addContent(ce);
ce = new Element("gain");
ce.addContent("" + as.getGain());
e.addContent(ce);
ce = new Element("pitch");
ce.addContent("" + as.getPitch());
e.addContent(ce);
ce = new Element("distances");
ce.setAttribute("ref", "" + as.getReferenceDistance());
float f = as.getMaximumDistance();
ce.setAttribute("max", "" + f);
e.addContent(ce);
ce = new Element("loops");
ce.setAttribute("min", "" + as.getMinLoops());
ce.setAttribute("max", "" + as.getMaxLoops());
// ce.setAttribute("mindelay", ""+as.getMinLoopDelay());
// ce.setAttribute("maxdelay", ""+as.getMaxLoopDelay());
e.addContent(ce);
ce = new Element("fadetimes");
ce.setAttribute("in", "" + as.getFadeIn());
ce.setAttribute("out", "" + as.getFadeOut());
e.addContent(ce);
ce = new Element("dopplerfactor");
ce.addContent("" + as.getDopplerFactor());
e.addContent(ce);
ce = new Element("positionrelative");
ce.addContent("" + (as.isPositionRelative() ? "yes" : "no"));
e.addContent(ce);
}
log.debug("store Audio " + sname);
audio.addContent(e);
}
}
return audio;
}
use of jmri.jmrit.audio.AudioBuffer in project JMRI by JMRI.
the class AbstractAudioManagerConfigXML method loadAudio.
/**
* Utility method to load the individual Audio objects. If there's no
* additional info needed for a specific Audio type, invoke this with the
* parent of the set of Audio elements.
*
* @param audio Element containing the Audio elements to load.
*/
@SuppressWarnings("unchecked")
public void loadAudio(Element audio) {
AudioManager am = InstanceManager.getDefault(jmri.AudioManager.class);
// Count number of loaded Audio objects
int loadedObjects = 0;
// Load buffers first
List<Element> audioList = audio.getChildren("audiobuffer");
if (log.isDebugEnabled()) {
log.debug("Found " + audioList.size() + " Audio Buffer objects");
}
for (int i = 0; i < audioList.size(); i++) {
Element e = audioList.get(i);
String sysName = getSystemName(e);
if (sysName == null) {
log.warn("unexpected null in systemName " + (e) + " " + (e).getAttributes());
break;
}
String userName = getUserName(e);
if (log.isDebugEnabled()) {
log.debug("create Audio: (" + sysName + ")(" + (userName == null ? "<null>" : userName) + ")");
}
try {
AudioBuffer ab = (AudioBuffer) am.newAudio(sysName, userName);
// load common parts
loadCommon(ab, e);
// load sub-type specific parts
// Transient objects for reading child elements
Element ce;
String value;
if ((ce = e.getChild("url")) != null) {
ab.setURL(ce.getValue());
}
if ((ce = e.getChild("looppoint")) != null) {
if ((value = ce.getAttributeValue("start")) != null) {
ab.setStartLoopPoint(Integer.parseInt(value));
}
if ((value = ce.getAttributeValue("end")) != null) {
ab.setEndLoopPoint(Integer.parseInt(value));
}
}
if ((ce = e.getChild("streamed")) != null) {
ab.setStreamed(ce.getValue().equals("yes"));
}
} catch (AudioException ex) {
log.error("Error loading AudioBuffer (" + sysName + "): " + ex);
}
}
loadedObjects += audioList.size();
// Now load sources
audioList = audio.getChildren("audiosource");
if (log.isDebugEnabled()) {
log.debug("Found " + audioList.size() + " Audio Source objects");
}
for (int i = 0; i < audioList.size(); i++) {
Element e = audioList.get(i);
String sysName = getSystemName(e);
if (sysName == null) {
log.warn("unexpected null in systemName " + (e) + " " + (e).getAttributes());
break;
}
String userName = getUserName(e);
if (log.isDebugEnabled()) {
log.debug("create Audio: (" + sysName + ")(" + (userName == null ? "<null>" : userName) + ")");
}
try {
AudioSource as = (AudioSource) am.newAudio(sysName, userName);
// load common parts
loadCommon(as, e);
// load sub-type specific parts
// Transient objects for reading child elements
Element ce;
String value;
if ((ce = e.getChild("position")) != null) {
as.setPosition(new Vector3f(Float.parseFloat(ce.getAttributeValue("x")), Float.parseFloat(ce.getAttributeValue("y")), Float.parseFloat(ce.getAttributeValue("z"))));
}
if ((ce = e.getChild("velocity")) != null) {
as.setVelocity(new Vector3f(Float.parseFloat(ce.getAttributeValue("x")), Float.parseFloat(ce.getAttributeValue("y")), Float.parseFloat(ce.getAttributeValue("z"))));
}
if ((ce = e.getChild("assignedbuffer")) != null) {
if (ce.getValue().length() != 0 && !ce.getValue().equals("null")) {
as.setAssignedBuffer(ce.getValue());
}
}
if ((ce = e.getChild("gain")) != null && ce.getValue().length() != 0) {
as.setGain(Float.parseFloat(ce.getValue()));
}
if ((ce = e.getChild("pitch")) != null && ce.getValue().length() != 0) {
as.setPitch(Float.parseFloat(ce.getValue()));
}
if ((ce = e.getChild("distances")) != null) {
if ((value = ce.getAttributeValue("ref")) != null) {
as.setReferenceDistance(Float.parseFloat(value));
}
if ((value = ce.getAttributeValue("max")) != null) {
as.setMaximumDistance(Float.parseFloat(value));
}
}
if ((ce = e.getChild("loops")) != null) {
if ((value = ce.getAttributeValue("min")) != null) {
as.setMinLoops(Integer.parseInt(value));
}
if ((value = ce.getAttributeValue("max")) != null) {
as.setMaxLoops(Integer.parseInt(value));
}
// if ((value = ce.getAttributeValue("mindelay"))!=null)
// as.setMinLoopDelay(Integer.parseInt(value));
// if ((value = ce.getAttributeValue("maxdelay"))!=null)
// as.setMaxLoopDelay(Integer.parseInt(value));
}
if ((ce = e.getChild("fadetimes")) != null) {
if ((value = ce.getAttributeValue("in")) != null) {
as.setFadeIn(Integer.parseInt(value));
}
if ((value = ce.getAttributeValue("out")) != null) {
as.setFadeOut(Integer.parseInt(value));
}
}
if ((ce = e.getChild("dopplerfactor")) != null && ce.getValue().length() != 0) {
as.setDopplerFactor(Float.parseFloat(ce.getValue()));
}
if ((ce = e.getChild("positionrelative")) != null) {
as.setPositionRelative(ce.getValue().equals("yes"));
}
} catch (AudioException ex) {
log.error("Error loading AudioSource (" + sysName + "): " + ex);
}
}
loadedObjects += audioList.size();
// Finally, load Listeners if needed
if (loadedObjects > 0) {
audioList = audio.getChildren("audiolistener");
if (log.isDebugEnabled()) {
log.debug("Found " + audioList.size() + " Audio Listener objects");
}
for (int i = 0; i < audioList.size(); i++) {
Element e = audioList.get(i);
String sysName = getSystemName(e);
if (sysName == null) {
log.warn("unexpected null in systemName " + (e) + " " + (e).getAttributes());
break;
}
String userName = getUserName(e);
if (log.isDebugEnabled()) {
log.debug("create Audio: (" + sysName + ")(" + (userName == null ? "<null>" : userName) + ")");
}
try {
AudioListener al = (AudioListener) am.newAudio(sysName, userName);
// load common parts
loadCommon(al, e);
// load sub-type specific parts
// Transient object for reading child elements
Element ce;
if ((ce = e.getChild("position")) != null) {
al.setPosition(new Vector3f(Float.parseFloat(ce.getAttributeValue("x")), Float.parseFloat(ce.getAttributeValue("y")), Float.parseFloat(ce.getAttributeValue("z"))));
}
if ((ce = e.getChild("velocity")) != null) {
al.setVelocity(new Vector3f(Float.parseFloat(ce.getAttributeValue("x")), Float.parseFloat(ce.getAttributeValue("y")), Float.parseFloat(ce.getAttributeValue("z"))));
}
if ((ce = e.getChild("orientation")) != null) {
al.setOrientation(new Vector3f(Float.parseFloat(ce.getAttributeValue("atX")), Float.parseFloat(ce.getAttributeValue("atY")), Float.parseFloat(ce.getAttributeValue("atZ"))), new Vector3f(Float.parseFloat(ce.getAttributeValue("upX")), Float.parseFloat(ce.getAttributeValue("upY")), Float.parseFloat(ce.getAttributeValue("upZ"))));
}
if ((ce = e.getChild("gain")) != null) {
al.setGain(Float.parseFloat(ce.getValue()));
}
if ((ce = e.getChild("metersperunit")) != null) {
al.setMetersPerUnit(Float.parseFloat((ce.getValue())));
}
} catch (AudioException ex) {
log.error("Error loading AudioListener (" + sysName + "): " + ex);
}
}
Attribute a;
if ((a = audio.getAttribute("distanceattenuated")) != null) {
am.getActiveAudioFactory().setDistanceAttenuated(a.getValue().equals("yes"));
}
}
}
use of jmri.jmrit.audio.AudioBuffer in project JMRI by JMRI.
the class AudioBufferFrame method populateFrame.
/**
* Method to populate the Edit Buffer frame with current values
*/
@Override
public void populateFrame(Audio a) {
if (!(a instanceof AudioBuffer)) {
throw new IllegalArgumentException(a.getSystemName() + " is not an AudioBuffer object");
}
super.populateFrame(a);
AudioBuffer b = (AudioBuffer) a;
url.setText(b.getURL());
// format.setText(b.toString());
stream.setSelected(b.isStreamed());
//(!b.isStreamedForced());
stream.setEnabled(false);
loopStart.setValue(b.getStartLoopPoint());
loopEnd.setValue(b.getEndLoopPoint());
loopStart.setEnabled(true);
loopStartLabel.setEnabled(true);
loopEnd.setEnabled(true);
loopEndLabel.setEnabled(true);
this.newBuffer = false;
}
use of jmri.jmrit.audio.AudioBuffer in project JMRI by JMRI.
the class AudioBufferFrame method applyPressed.
void applyPressed(ActionEvent e) {
String user = userName.getText();
if (user.equals("")) {
user = null;
}
String sName = sysName.getText().toUpperCase();
AudioBuffer b;
try {
AudioManager am = InstanceManager.getDefault(jmri.AudioManager.class);
try {
b = (AudioBuffer) am.provideAudio(sName);
} catch (IllegalArgumentException ex) {
throw new AudioException("Problem creating buffer");
}
if (newBuffer && am.getByUserName(user) != null) {
am.deregister(b);
synchronized (lock) {
counter--;
}
throw new AudioException("Duplicate user name - please modify");
}
b.setUserName(user);
b.setStreamed(stream.isSelected());
if (newBuffer || !b.getURL().equals(url.getText())) {
b.setURL(url.getText());
log.debug("After load, end loop point = " + b.getEndLoopPoint());
//b.setStartLoopPoint((Long)loopStart.getValue());
//b.setEndLoopPoint((Long)loopEnd.getValue());
} else {
if (!b.getURL().equals(url.getText())) {
log.debug("Sound changed from: " + b.getURL());
b.setURL(url.getText());
}
}
// Update streaming checkbox if necessary
stream.setSelected(b.isStreamed());
//(!b.isStreamedForced());
stream.setEnabled(false);
// Notify changes
model.fireTableDataChanged();
} catch (AudioException ex) {
JOptionPane.showMessageDialog(null, ex.getMessage(), Bundle.getMessage("AudioCreateErrorTitle"), JOptionPane.ERROR_MESSAGE);
}
}
use of jmri.jmrit.audio.AudioBuffer in project JMRI by JMRI.
the class AudioUtil method getAudioBufferList.
/**
* Take a list of AudioByteBuffers and provide a 1:1 corresponding List of
* AudioBuffers
*
* @param prefix : prefix to use when generating AudioBuffer system names.
* @param blist : list of AudioByteBuffers to convert.
*
* @return List of AudioBuffers
*/
public static List<AudioBuffer> getAudioBufferList(String prefix, List<AudioByteBuffer> blist) {
// Sanity check the prefix, since if it's wrong we'll get a casting error below.
if (prefix.charAt(2) != Audio.BUFFER) {
log.warn("Not a Buffer request! " + prefix);
return null;
}
List<AudioBuffer> rlist = new ArrayList<>();
// Index used for the sub-buffer system names
int i = 0;
for (AudioByteBuffer b : blist) {
try {
AudioBuffer buf = (AudioBuffer) jmri.InstanceManager.getDefault(jmri.AudioManager.class).provideAudio(prefix + "_sbuf" + i);
i++;
if (buf == null) {
log.debug("provideAudio returned null!");
return null;
}
// might be redundant with the try/catch.
if (buf.getLength() > 0) {
log.debug("provideAudio found already-built buffer:" + buf.getSystemName() + " ... skipping load.");
} else {
buf.loadBuffer(b.data, b.format, b.frequency);
if (log.isDebugEnabled()) {
log.debug("Loaded buffer: " + buf.getSystemName());
log.debug(" from file: " + buf.getURL());
log.debug(" format: " + b.format + ", " + b.frequency + " Hz");
log.debug(" length: " + b.data.limit());
}
}
rlist.add(buf);
} catch (AudioException | IllegalArgumentException e) {
log.warn("Error on provideAudio! " + e.toString());
if (log.isDebugEnabled()) {
jmri.InstanceManager.getDefault(jmri.AudioManager.class).getSystemNameList(Audio.BUFFER).stream().forEach((s) -> {
log.debug("\tBuffer: " + s);
});
}
return null;
}
}
return rlist;
}
Aggregations