Search in sources :

Example 1 with AudioStream

use of com.jme3.audio.AudioStream in project jmonkeyengine by jMonkeyEngine.

the class WAVLoader method load.

public Object load(AssetInfo info) throws IOException {
    AudioData data;
    InputStream inputStream = null;
    try {
        inputStream = info.openStream();
        data = load(info, inputStream, ((AudioKey) info.getKey()).isStream());
        if (data instanceof AudioStream) {
            inputStream = null;
        }
        return data;
    } finally {
        if (inputStream != null) {
            inputStream.close();
        }
    }
}
Also used : AudioStream(com.jme3.audio.AudioStream) AudioData(com.jme3.audio.AudioData) BufferedInputStream(java.io.BufferedInputStream) InputStream(java.io.InputStream) AudioKey(com.jme3.audio.AudioKey)

Example 2 with AudioStream

use of com.jme3.audio.AudioStream in project jmonkeyengine by jMonkeyEngine.

the class NativeVorbisLoader method loadStream.

private static AudioStream loadStream(AssetInfo assetInfo) throws IOException {
    AndroidAssetInfo aai = (AndroidAssetInfo) assetInfo;
    AssetFileDescriptor afd = null;
    NativeVorbisFile file = null;
    boolean success = false;
    try {
        afd = aai.openFileDescriptor();
        int fd = afd.getParcelFileDescriptor().getFd();
        file = new NativeVorbisFile(fd, afd.getStartOffset(), afd.getLength());
        AudioStream stream = new AudioStream();
        stream.setupFormat(file.channels, 16, file.sampleRate);
        stream.updateData(new VorbisInputStream(afd, file), file.duration);
        success = true;
        return stream;
    } finally {
        if (!success) {
            if (file != null) {
                file.close();
            }
            if (afd != null) {
                afd.close();
            }
        }
    }
}
Also used : AudioStream(com.jme3.audio.AudioStream) AssetFileDescriptor(android.content.res.AssetFileDescriptor) AndroidAssetInfo(com.jme3.asset.plugins.AndroidLocator.AndroidAssetInfo)

Example 3 with AudioStream

use of com.jme3.audio.AudioStream in project jmonkeyengine by jMonkeyEngine.

the class OGGLoader method load.

private AudioData load(InputStream in, boolean readStream, boolean streamCache) throws IOException {
    if (readStream && streamCache) {
        oggStream = new CachedOggStream(in);
    } else {
        oggStream = new UncachedOggStream(in);
    }
    Collection<LogicalOggStream> streams = oggStream.getLogicalStreams();
    loStream = streams.iterator().next();
    //        if (loStream == null){
    //            throw new IOException("OGG File does not contain vorbis audio stream");
    //        }
    vorbisStream = new VorbisStream(loStream);
    streamHdr = vorbisStream.getIdentificationHeader();
    if (!readStream) {
        AudioBuffer audioBuffer = new AudioBuffer();
        audioBuffer.setupFormat(streamHdr.getChannels(), 16, streamHdr.getSampleRate());
        audioBuffer.updateData(readToBuffer());
        return audioBuffer;
    } else {
        AudioStream audioStream = new AudioStream();
        audioStream.setupFormat(streamHdr.getChannels(), 16, streamHdr.getSampleRate());
        // might return -1 if unknown
        float streamDuration = computeStreamDuration();
        audioStream.updateData(readToStream(oggStream.isSeekable()), streamDuration);
        return audioStream;
    }
}
Also used : AudioStream(com.jme3.audio.AudioStream) LogicalOggStream(de.jarnbjo.ogg.LogicalOggStream) VorbisStream(de.jarnbjo.vorbis.VorbisStream) AudioBuffer(com.jme3.audio.AudioBuffer)

Example 4 with AudioStream

use of com.jme3.audio.AudioStream in project jmonkeyengine by jMonkeyEngine.

the class ALAudioRenderer method updateSourceParam.

public void updateSourceParam(AudioSource src, AudioParam param) {
    checkDead();
    synchronized (threadLock) {
        if (audioDisabled) {
            return;
        }
        // it should be safe to just ignore the update
        if (src.getChannel() < 0) {
            return;
        }
        assert src.getChannel() >= 0;
        int id = channels[src.getChannel()];
        switch(param) {
            case Position:
                if (!src.isPositional()) {
                    return;
                }
                Vector3f pos = src.getPosition();
                al.alSource3f(id, AL_POSITION, pos.x, pos.y, pos.z);
                break;
            case Velocity:
                if (!src.isPositional()) {
                    return;
                }
                Vector3f vel = src.getVelocity();
                al.alSource3f(id, AL_VELOCITY, vel.x, vel.y, vel.z);
                break;
            case MaxDistance:
                if (!src.isPositional()) {
                    return;
                }
                al.alSourcef(id, AL_MAX_DISTANCE, src.getMaxDistance());
                break;
            case RefDistance:
                if (!src.isPositional()) {
                    return;
                }
                al.alSourcef(id, AL_REFERENCE_DISTANCE, src.getRefDistance());
                break;
            case ReverbFilter:
                if (!supportEfx || !src.isPositional() || !src.isReverbEnabled()) {
                    return;
                }
                int filter = EFX.AL_FILTER_NULL;
                if (src.getReverbFilter() != null) {
                    Filter f = src.getReverbFilter();
                    if (f.isUpdateNeeded()) {
                        updateFilter(f);
                    }
                    filter = f.getId();
                }
                al.alSource3i(id, EFX.AL_AUXILIARY_SEND_FILTER, reverbFxSlot, 0, filter);
                break;
            case ReverbEnabled:
                if (!supportEfx || !src.isPositional()) {
                    return;
                }
                if (src.isReverbEnabled()) {
                    updateSourceParam(src, AudioParam.ReverbFilter);
                } else {
                    al.alSource3i(id, EFX.AL_AUXILIARY_SEND_FILTER, 0, 0, EFX.AL_FILTER_NULL);
                }
                break;
            case IsPositional:
                if (!src.isPositional()) {
                    // Play in headspace
                    al.alSourcei(id, AL_SOURCE_RELATIVE, AL_TRUE);
                    al.alSource3f(id, AL_POSITION, 0, 0, 0);
                    al.alSource3f(id, AL_VELOCITY, 0, 0, 0);
                    // Disable reverb
                    al.alSource3i(id, EFX.AL_AUXILIARY_SEND_FILTER, 0, 0, EFX.AL_FILTER_NULL);
                } else {
                    al.alSourcei(id, AL_SOURCE_RELATIVE, AL_FALSE);
                    updateSourceParam(src, AudioParam.Position);
                    updateSourceParam(src, AudioParam.Velocity);
                    updateSourceParam(src, AudioParam.MaxDistance);
                    updateSourceParam(src, AudioParam.RefDistance);
                    updateSourceParam(src, AudioParam.ReverbEnabled);
                }
                break;
            case Direction:
                if (!src.isDirectional()) {
                    return;
                }
                Vector3f dir = src.getDirection();
                al.alSource3f(id, AL_DIRECTION, dir.x, dir.y, dir.z);
                break;
            case InnerAngle:
                if (!src.isDirectional()) {
                    return;
                }
                al.alSourcef(id, AL_CONE_INNER_ANGLE, src.getInnerAngle());
                break;
            case OuterAngle:
                if (!src.isDirectional()) {
                    return;
                }
                al.alSourcef(id, AL_CONE_OUTER_ANGLE, src.getOuterAngle());
                break;
            case IsDirectional:
                if (src.isDirectional()) {
                    updateSourceParam(src, AudioParam.Direction);
                    updateSourceParam(src, AudioParam.InnerAngle);
                    updateSourceParam(src, AudioParam.OuterAngle);
                    al.alSourcef(id, AL_CONE_OUTER_GAIN, 0);
                } else {
                    al.alSourcef(id, AL_CONE_INNER_ANGLE, 360);
                    al.alSourcef(id, AL_CONE_OUTER_ANGLE, 360);
                    al.alSourcef(id, AL_CONE_OUTER_GAIN, 1f);
                }
                break;
            case DryFilter:
                if (!supportEfx) {
                    return;
                }
                if (src.getDryFilter() != null) {
                    Filter f = src.getDryFilter();
                    if (f.isUpdateNeeded()) {
                        updateFilter(f);
                        // NOTE: must re-attach filter for changes to apply.
                        al.alSourcei(id, EFX.AL_DIRECT_FILTER, f.getId());
                    }
                } else {
                    al.alSourcei(id, EFX.AL_DIRECT_FILTER, EFX.AL_FILTER_NULL);
                }
                break;
            case Looping:
                if (src.isLooping() && !(src.getAudioData() instanceof AudioStream)) {
                    al.alSourcei(id, AL_LOOPING, AL_TRUE);
                } else {
                    al.alSourcei(id, AL_LOOPING, AL_FALSE);
                }
                break;
            case Volume:
                al.alSourcef(id, AL_GAIN, src.getVolume());
                break;
            case Pitch:
                al.alSourcef(id, AL_PITCH, src.getPitch());
                break;
        }
    }
}
Also used : Vector3f(com.jme3.math.Vector3f)

Example 5 with AudioStream

use of com.jme3.audio.AudioStream in project jmonkeyengine by jMonkeyEngine.

the class ALAudioRenderer method updateInDecoderThread.

public void updateInDecoderThread(float tpf) {
    if (audioDisabled) {
        return;
    }
    for (int i = 0; i < channels.length; i++) {
        AudioSource src = chanSrcs[i];
        if (src == null || !(src.getAudioData() instanceof AudioStream)) {
            continue;
        }
        int sourceId = channels[i];
        AudioStream stream = (AudioStream) src.getAudioData();
        Status oalStatus = convertStatus(al.alGetSourcei(sourceId, AL_SOURCE_STATE));
        Status jmeStatus = src.getStatus();
        // Keep filling data (even if we are stopped / paused)
        boolean buffersWereFilled = fillStreamingSource(sourceId, stream, src.isLooping());
        if (buffersWereFilled) {
            if (oalStatus == Status.Stopped && jmeStatus == Status.Playing) {
                // The source got stopped due to buffer starvation.
                // Start it again.
                logger.log(Level.WARNING, "Buffer starvation " + "occurred while playing stream");
                al.alSourcePlay(sourceId);
            } else {
                // Buffers were filled, stream continues to play.
                if (oalStatus == Status.Playing && jmeStatus == Status.Playing) {
                // Nothing to do.
                } else {
                    throw new AssertionError();
                }
            }
        }
    }
    // Delete any unused objects.
    objManager.deleteUnused(this);
}
Also used : Status(com.jme3.audio.AudioSource.Status)

Aggregations

AudioStream (com.jme3.audio.AudioStream)4 AudioBuffer (com.jme3.audio.AudioBuffer)2 Status (com.jme3.audio.AudioSource.Status)2 Vector3f (com.jme3.math.Vector3f)2 AssetFileDescriptor (android.content.res.AssetFileDescriptor)1 AndroidAssetInfo (com.jme3.asset.plugins.AndroidLocator.AndroidAssetInfo)1 AudioData (com.jme3.audio.AudioData)1 AudioKey (com.jme3.audio.AudioKey)1 LogicalOggStream (de.jarnbjo.ogg.LogicalOggStream)1 VorbisStream (de.jarnbjo.vorbis.VorbisStream)1 BufferedInputStream (java.io.BufferedInputStream)1 IOException (java.io.IOException)1 InputStream (java.io.InputStream)1