use of com.jme3.audio.AudioStream in project jmonkeyengine by jMonkeyEngine.
the class ALAudioRenderer method setSourceParams.
private void setSourceParams(int id, AudioSource src, boolean forceNonLoop) {
if (src.isPositional()) {
Vector3f pos = src.getPosition();
Vector3f vel = src.getVelocity();
al.alSource3f(id, AL_POSITION, pos.x, pos.y, pos.z);
al.alSource3f(id, AL_VELOCITY, vel.x, vel.y, vel.z);
al.alSourcef(id, AL_MAX_DISTANCE, src.getMaxDistance());
al.alSourcef(id, AL_REFERENCE_DISTANCE, src.getRefDistance());
al.alSourcei(id, AL_SOURCE_RELATIVE, AL_FALSE);
if (src.isReverbEnabled() && supportEfx) {
int filter = EFX.AL_FILTER_NULL;
if (src.getReverbFilter() != null) {
Filter f = src.getReverbFilter();
if (f.isUpdateNeeded()) {
updateFilter(f);
}
filter = f.getId();
}
al.alSource3i(id, EFX.AL_AUXILIARY_SEND_FILTER, reverbFxSlot, 0, filter);
}
} else {
// play in headspace
al.alSourcei(id, AL_SOURCE_RELATIVE, AL_TRUE);
al.alSource3f(id, AL_POSITION, 0, 0, 0);
al.alSource3f(id, AL_VELOCITY, 0, 0, 0);
}
if (src.getDryFilter() != null && supportEfx) {
Filter f = src.getDryFilter();
if (f.isUpdateNeeded()) {
updateFilter(f);
// NOTE: must re-attach filter for changes to apply.
al.alSourcei(id, EFX.AL_DIRECT_FILTER, f.getId());
}
}
if (forceNonLoop || src.getAudioData() instanceof AudioStream) {
al.alSourcei(id, AL_LOOPING, AL_FALSE);
} else {
al.alSourcei(id, AL_LOOPING, src.isLooping() ? AL_TRUE : AL_FALSE);
}
al.alSourcef(id, AL_GAIN, src.getVolume());
al.alSourcef(id, AL_PITCH, src.getPitch());
al.alSourcef(id, AL_SEC_OFFSET, src.getTimeOffset());
if (src.isDirectional()) {
Vector3f dir = src.getDirection();
al.alSource3f(id, AL_DIRECTION, dir.x, dir.y, dir.z);
al.alSourcef(id, AL_CONE_INNER_ANGLE, src.getInnerAngle());
al.alSourcef(id, AL_CONE_OUTER_ANGLE, src.getOuterAngle());
al.alSourcef(id, AL_CONE_OUTER_GAIN, 0);
} else {
al.alSourcef(id, AL_CONE_INNER_ANGLE, 360);
al.alSourcef(id, AL_CONE_OUTER_ANGLE, 360);
al.alSourcef(id, AL_CONE_OUTER_GAIN, 1f);
}
}
use of com.jme3.audio.AudioStream in project jmonkeyengine by jMonkeyEngine.
the class ALAudioRenderer method updateInRenderThread.
public void updateInRenderThread(float tpf) {
if (audioDisabled) {
return;
}
for (int i = 0; i < channels.length; i++) {
AudioSource src = chanSrcs[i];
if (src == null) {
continue;
}
int sourceId = channels[i];
boolean boundSource = i == src.getChannel();
boolean reclaimChannel = false;
Status oalStatus = convertStatus(al.alGetSourcei(sourceId, AL_SOURCE_STATE));
if (!boundSource) {
// Handle it here.
if (oalStatus == Status.Stopped) {
// Instanced audio stopped playing. Reclaim channel.
clearChannel(i);
freeChannel(i);
} else if (oalStatus == Status.Paused) {
throw new AssertionError("Instanced audio cannot be paused");
}
continue;
}
Status jmeStatus = src.getStatus();
// Check if we need to sync JME status with OAL status.
if (oalStatus != jmeStatus) {
if (oalStatus == Status.Stopped && jmeStatus == Status.Playing) {
// Maybe we need to reclaim the channel.
if (src.getAudioData() instanceof AudioStream) {
AudioStream stream = (AudioStream) src.getAudioData();
if (stream.isEOF() && !src.isLooping()) {
// Stream finished playing
reclaimChannel = true;
} else {
// Stream still has data.
// Buffer starvation occured.
// Audio decoder thread will fill the data
// and start the channel again.
}
} else {
// Buffer finished playing.
if (src.isLooping()) {
// When a device is disconnected, all sources
// will enter the "stopped" state.
logger.warning("A looping sound has stopped playing");
}
reclaimChannel = true;
}
if (reclaimChannel) {
src.setStatus(Status.Stopped);
src.setChannel(-1);
clearChannel(i);
freeChannel(i);
}
} else {
// This is only relevant for bound sources.
throw new AssertionError("Unexpected sound status. " + "OAL: " + oalStatus + ", JME: " + jmeStatus);
}
} else {
// Stopped channel was not cleared correctly.
if (oalStatus == Status.Stopped) {
throw new AssertionError("Channel " + i + " was not reclaimed");
}
}
}
}
use of com.jme3.audio.AudioStream in project jmonkeyengine by jMonkeyEngine.
the class WAVLoader method load.
private AudioData load(AssetInfo info, InputStream inputStream, boolean stream) throws IOException {
this.in = new ResettableInputStream(info, inputStream);
inOffset = 0;
int sig = in.readInt();
if (sig != i_RIFF)
throw new IOException("File is not a WAVE file");
// skip size
in.readInt();
if (in.readInt() != i_WAVE)
throw new IOException("WAVE File does not contain audio");
inOffset += 4 * 3;
readStream = stream;
if (readStream) {
audioStream = new AudioStream();
audioData = audioStream;
} else {
audioBuffer = new AudioBuffer();
audioData = audioBuffer;
}
while (true) {
int type = in.readInt();
int len = in.readInt();
inOffset += 4 * 2;
switch(type) {
case i_fmt:
readFormatChunk(len);
inOffset += len;
break;
case i_data:
// Compute duration based on data chunk size
duration = (float) (len / bytesPerSec);
if (readStream) {
readDataChunkForStream(inOffset, len);
} else {
readDataChunkForBuffer(len);
}
return audioData;
default:
int skipped = in.skipBytes(len);
if (skipped <= 0) {
return null;
}
inOffset += skipped;
break;
}
}
}
Aggregations