Search in sources :

Example 6 with Mixer

use of javax.sound.sampled.Mixer in project jdk8u_jdk by JetBrains.

the class SoftMixingMixer method open.

public void open(SourceDataLine line) throws LineUnavailableException {
    if (isOpen()) {
        implicitOpen = false;
        return;
    }
    synchronized (control_mutex) {
        try {
            if (line != null)
                format = line.getFormat();
            AudioInputStream ais = openStream(getFormat());
            if (line == null) {
                synchronized (SoftMixingMixerProvider.mutex) {
                    SoftMixingMixerProvider.lockthread = Thread.currentThread();
                }
                try {
                    Mixer defaultmixer = AudioSystem.getMixer(null);
                    if (defaultmixer != null) {
                        // Search for suitable line
                        DataLine.Info idealinfo = null;
                        AudioFormat idealformat = null;
                        Line.Info[] lineinfos = defaultmixer.getSourceLineInfo();
                        idealFound: for (int i = 0; i < lineinfos.length; i++) {
                            if (lineinfos[i].getLineClass() == SourceDataLine.class) {
                                DataLine.Info info = (DataLine.Info) lineinfos[i];
                                AudioFormat[] formats = info.getFormats();
                                for (int j = 0; j < formats.length; j++) {
                                    AudioFormat format = formats[j];
                                    if (format.getChannels() == 2 || format.getChannels() == AudioSystem.NOT_SPECIFIED)
                                        if (format.getEncoding().equals(Encoding.PCM_SIGNED) || format.getEncoding().equals(Encoding.PCM_UNSIGNED))
                                            if (format.getSampleRate() == AudioSystem.NOT_SPECIFIED || format.getSampleRate() == 48000.0)
                                                if (format.getSampleSizeInBits() == AudioSystem.NOT_SPECIFIED || format.getSampleSizeInBits() == 16) {
                                                    idealinfo = info;
                                                    int ideal_channels = format.getChannels();
                                                    boolean ideal_signed = format.getEncoding().equals(Encoding.PCM_SIGNED);
                                                    float ideal_rate = format.getSampleRate();
                                                    boolean ideal_endian = format.isBigEndian();
                                                    int ideal_bits = format.getSampleSizeInBits();
                                                    if (ideal_bits == AudioSystem.NOT_SPECIFIED)
                                                        ideal_bits = 16;
                                                    if (ideal_channels == AudioSystem.NOT_SPECIFIED)
                                                        ideal_channels = 2;
                                                    if (ideal_rate == AudioSystem.NOT_SPECIFIED)
                                                        ideal_rate = 48000;
                                                    idealformat = new AudioFormat(ideal_rate, ideal_bits, ideal_channels, ideal_signed, ideal_endian);
                                                    break idealFound;
                                                }
                                }
                            }
                        }
                        if (idealformat != null) {
                            format = idealformat;
                            line = (SourceDataLine) defaultmixer.getLine(idealinfo);
                        }
                    }
                    if (line == null)
                        line = AudioSystem.getSourceDataLine(format);
                } finally {
                    synchronized (SoftMixingMixerProvider.mutex) {
                        SoftMixingMixerProvider.lockthread = null;
                    }
                }
                if (line == null)
                    throw new IllegalArgumentException("No line matching " + info.toString() + " is supported.");
            }
            double latency = this.latency;
            if (!line.isOpen()) {
                int bufferSize = getFormat().getFrameSize() * (int) (getFormat().getFrameRate() * (latency / 1000000f));
                line.open(getFormat(), bufferSize);
                // Remember that we opened that line
                // so we can close again in SoftSynthesizer.close()
                sourceDataLine = line;
            }
            if (!line.isActive())
                line.start();
            int controlbuffersize = 512;
            try {
                controlbuffersize = ais.available();
            } catch (IOException e) {
            }
            // Tell mixer not fill read buffers fully.
            // This lowers latency, and tells DataPusher
            // to read in smaller amounts.
            // mainmixer.readfully = false;
            // pusher = new DataPusher(line, ais);
            int buffersize = line.getBufferSize();
            buffersize -= buffersize % controlbuffersize;
            if (buffersize < 3 * controlbuffersize)
                buffersize = 3 * controlbuffersize;
            if (jitter_correction) {
                ais = new SoftJitterCorrector(ais, buffersize, controlbuffersize);
            }
            pusher = new SoftAudioPusher(line, ais, controlbuffersize);
            pusher_stream = ais;
            pusher.start();
        } catch (LineUnavailableException e) {
            if (isOpen())
                close();
            throw new LineUnavailableException(e.toString());
        }
    }
}
Also used : Mixer(javax.sound.sampled.Mixer) DataLine(javax.sound.sampled.DataLine) SourceDataLine(javax.sound.sampled.SourceDataLine) LineUnavailableException(javax.sound.sampled.LineUnavailableException) IOException(java.io.IOException) AudioInputStream(javax.sound.sampled.AudioInputStream) SourceDataLine(javax.sound.sampled.SourceDataLine) AudioFormat(javax.sound.sampled.AudioFormat)

Example 7 with Mixer

use of javax.sound.sampled.Mixer in project Spark by igniterealtime.

the class JavaMixer method createMixerChildren.

private void createMixerChildren(JavaMixer.MixerNode mixerNode) {
    Mixer mixer = mixerNode.getMixer();
    Line.Info[] infosToCheck = getPortInfo(mixer);
    for (Line.Info anInfosToCheck : infosToCheck) {
        if (mixer.isLineSupported(anInfosToCheck)) {
            Port port = null;
            DataLine dLine = null;
            int maxLines = mixer.getMaxLines(anInfosToCheck);
            // Workaround to prevent a JVM crash on Mac OS X (Intel) 1.5.0_07 JVM
            if (maxLines > 0) {
                try {
                    if (anInfosToCheck instanceof Port.Info) {
                        port = (Port) mixer.getLine(anInfosToCheck);
                        port.open();
                    } else if (anInfosToCheck instanceof DataLine.Info) {
                        dLine = (DataLine) mixer.getLine(anInfosToCheck);
                        if (!dLine.isOpen()) {
                            dLine.open();
                        }
                    }
                } catch (LineUnavailableException e) {
                // Do Nothing
                } catch (Exception e) {
                // Do Nothing
                }
            }
            if (port != null) {
                JavaMixer.PortNode portNode = new JavaMixer.PortNode(port);
                createPortChildren(portNode);
                mixerNode.add(portNode);
            } else if (dLine != null) {
                JavaMixer.PortNode portNode = new JavaMixer.PortNode(dLine);
                createPortChildren(portNode);
                mixerNode.add(portNode);
            }
        }
    }
}
Also used : Mixer(javax.sound.sampled.Mixer) Port(javax.sound.sampled.Port) DataLine(javax.sound.sampled.DataLine) LineUnavailableException(javax.sound.sampled.LineUnavailableException) LineUnavailableException(javax.sound.sampled.LineUnavailableException) DataLine(javax.sound.sampled.DataLine) Line(javax.sound.sampled.Line)

Example 8 with Mixer

use of javax.sound.sampled.Mixer in project javacv by bytedeco.

the class WebcamAndMicrophoneCapture method main.

public static void main(String[] args) throws Exception, org.bytedeco.javacv.FrameGrabber.Exception {
    int captureWidth = 1280;
    int captureHeight = 720;
    // The available FrameGrabber classes include OpenCVFrameGrabber (opencv_videoio),
    // DC1394FrameGrabber, FlyCaptureFrameGrabber, OpenKinectFrameGrabber,
    // PS3EyeFrameGrabber, VideoInputFrameGrabber, and FFmpegFrameGrabber.
    OpenCVFrameGrabber grabber = new OpenCVFrameGrabber(WEBCAM_DEVICE_INDEX);
    grabber.setImageWidth(captureWidth);
    grabber.setImageHeight(captureHeight);
    grabber.start();
    // org.bytedeco.javacv.FFmpegFrameRecorder.FFmpegFrameRecorder(String
    // filename, int imageWidth, int imageHeight, int audioChannels)
    // For each param, we're passing in...
    // filename = either a path to a local file we wish to create, or an
    // RTMP url to an FMS / Wowza server
    // imageWidth = width we specified for the grabber
    // imageHeight = height we specified for the grabber
    // audioChannels = 2, because we like stereo
    FFmpegFrameRecorder recorder = new FFmpegFrameRecorder("rtmp://my-streaming-server/app_name_here/instance_name/stream_name", captureWidth, captureHeight, 2);
    recorder.setInterleaved(true);
    // decrease "startup" latency in FFMPEG (see:
    // https://trac.ffmpeg.org/wiki/StreamingGuide)
    recorder.setVideoOption("tune", "zerolatency");
    // tradeoff between quality and encode speed
    // possible values are ultrafast,superfast, veryfast, faster, fast,
    // medium, slow, slower, veryslow
    // ultrafast offers us the least amount of compression (lower encoder
    // CPU) at the cost of a larger stream size
    // at the other end, veryslow provides the best compression (high
    // encoder CPU) while lowering the stream size
    // (see: https://trac.ffmpeg.org/wiki/Encode/H.264)
    recorder.setVideoOption("preset", "ultrafast");
    // Constant Rate Factor (see: https://trac.ffmpeg.org/wiki/Encode/H.264)
    recorder.setVideoOption("crf", "28");
    // 2000 kb/s, reasonable "sane" area for 720
    recorder.setVideoBitrate(2000000);
    recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
    recorder.setFormat("flv");
    // FPS (frames per second)
    recorder.setFrameRate(FRAME_RATE);
    // Key frame interval, in our case every 2 seconds -> 30 (fps) * 2 = 60
    // (gop length)
    recorder.setGopSize(GOP_LENGTH_IN_FRAMES);
    // We don't want variable bitrate audio
    recorder.setAudioOption("crf", "0");
    // Highest quality
    recorder.setAudioQuality(0);
    // 192 Kbps
    recorder.setAudioBitrate(192000);
    recorder.setSampleRate(44100);
    recorder.setAudioChannels(2);
    recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
    // Jack 'n coke... do it...
    recorder.start();
    // Thread for audio capture, this could be in a nested private class if you prefer...
    new Thread(new Runnable() {

        @Override
        public void run() {
            // Pick a format...
            // NOTE: It is better to enumerate the formats that the system supports,
            // because getLine() can error out with any particular format...
            // For us: 44.1 sample rate, 16 bits, stereo, signed, little endian
            AudioFormat audioFormat = new AudioFormat(44100.0F, 16, 2, true, false);
            // Get TargetDataLine with that format
            Mixer.Info[] minfoSet = AudioSystem.getMixerInfo();
            Mixer mixer = AudioSystem.getMixer(minfoSet[AUDIO_DEVICE_INDEX]);
            DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
            try {
                // Open and start capturing audio
                // It's possible to have more control over the chosen audio device with this line:
                // TargetDataLine line = (TargetDataLine)mixer.getLine(dataLineInfo);
                TargetDataLine line = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
                line.open(audioFormat);
                line.start();
                int sampleRate = (int) audioFormat.getSampleRate();
                int numChannels = audioFormat.getChannels();
                // Let's initialize our audio buffer...
                int audioBufferSize = sampleRate * numChannels;
                byte[] audioBytes = new byte[audioBufferSize];
                // Using a ScheduledThreadPoolExecutor vs a while loop with
                // a Thread.sleep will allow
                // us to get around some OS specific timing issues, and keep
                // to a more precise
                // clock as the fixed rate accounts for garbage collection
                // time, etc
                // a similar approach could be used for the webcam capture
                // as well, if you wish
                ScheduledThreadPoolExecutor exec = new ScheduledThreadPoolExecutor(1);
                exec.scheduleAtFixedRate(new Runnable() {

                    @Override
                    public void run() {
                        try {
                            // Read from the line... non-blocking
                            int nBytesRead = 0;
                            while (nBytesRead == 0) {
                                nBytesRead = line.read(audioBytes, 0, line.available());
                            }
                            // Since we specified 16 bits in the AudioFormat,
                            // we need to convert our read byte[] to short[]
                            // (see source from FFmpegFrameRecorder.recordSamples for AV_SAMPLE_FMT_S16)
                            // Let's initialize our short[] array
                            int nSamplesRead = nBytesRead / 2;
                            short[] samples = new short[nSamplesRead];
                            // Let's wrap our short[] into a ShortBuffer and
                            // pass it to recordSamples
                            ByteBuffer.wrap(audioBytes).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(samples);
                            ShortBuffer sBuff = ShortBuffer.wrap(samples, 0, nSamplesRead);
                            // recorder is instance of
                            // org.bytedeco.javacv.FFmpegFrameRecorder
                            recorder.recordSamples(sampleRate, numChannels, sBuff);
                        } catch (org.bytedeco.javacv.FrameRecorder.Exception e) {
                            e.printStackTrace();
                        }
                    }
                }, 0, (long) 1000 / FRAME_RATE, TimeUnit.MILLISECONDS);
            } catch (LineUnavailableException e1) {
                e1.printStackTrace();
            }
        }
    }).start();
    // A really nice hardware accelerated component for our preview...
    CanvasFrame cFrame = new CanvasFrame("Capture Preview", CanvasFrame.getDefaultGamma() / grabber.getGamma());
    Frame capturedFrame = null;
    // While we are capturing...
    while ((capturedFrame = grabber.grab()) != null) {
        if (cFrame.isVisible()) {
            // Show our frame in the preview
            cFrame.showImage(capturedFrame);
        }
        // as the delta from assignment to computed time could be too high
        if (startTime == 0)
            startTime = System.currentTimeMillis();
        // Create timestamp for this frame
        videoTS = 1000 * (System.currentTimeMillis() - startTime);
        // Check for AV drift
        if (videoTS > recorder.getTimestamp()) {
            System.out.println("Lip-flap correction: " + videoTS + " : " + recorder.getTimestamp() + " -> " + (videoTS - recorder.getTimestamp()));
            // We tell the recorder to write this frame at this timestamp
            recorder.setTimestamp(videoTS);
        }
        // Send the frame to the org.bytedeco.javacv.FFmpegFrameRecorder
        recorder.record(capturedFrame);
    }
    cFrame.dispose();
    recorder.stop();
    grabber.stop();
}
Also used : Frame(org.bytedeco.javacv.Frame) CanvasFrame(org.bytedeco.javacv.CanvasFrame) ScheduledThreadPoolExecutor(java.util.concurrent.ScheduledThreadPoolExecutor) Mixer(javax.sound.sampled.Mixer) TargetDataLine(javax.sound.sampled.TargetDataLine) DataLine(javax.sound.sampled.DataLine) LineUnavailableException(javax.sound.sampled.LineUnavailableException) OpenCVFrameGrabber(org.bytedeco.javacv.OpenCVFrameGrabber) LineUnavailableException(javax.sound.sampled.LineUnavailableException) Exception(org.bytedeco.javacv.FrameRecorder.Exception) TargetDataLine(javax.sound.sampled.TargetDataLine) FFmpegFrameRecorder(org.bytedeco.javacv.FFmpegFrameRecorder) AudioFormat(javax.sound.sampled.AudioFormat) ShortBuffer(java.nio.ShortBuffer) CanvasFrame(org.bytedeco.javacv.CanvasFrame)

Example 9 with Mixer

use of javax.sound.sampled.Mixer in project ceylon by eclipse.

the class ServicesTestCase method testAudioMixerServices.

@Test
@Ignore("See https://github.com/ceylon/ceylon/issues/4856")
public void testAudioMixerServices() throws Throwable {
    Mixer mixer = AudioSystem.getMixer(null);
    Mixer.Info[] mixers = AudioSystem.getMixerInfo();
    Type[] fileTypes = AudioSystem.getAudioFileTypes();
    boolean plainHasMixer = mixer != null;
    int plainMixerCount = mixers.length;
    int plainFileTypeCount = fileTypes.length;
    System.out.println("Number of mixers/filetypes using plain Java = " + plainMixerCount + "/" + plainFileTypeCount);
    System.setProperty("ceylon.runtime.test.services.audiotest.hasmixer", String.valueOf(plainHasMixer));
    System.setProperty("ceylon.runtime.test.services.audiotest.mixers", String.valueOf(plainMixerCount));
    System.setProperty("ceylon.runtime.test.services.audiotest.filetypes", String.valueOf(plainFileTypeCount));
    JavaArchive module = ShrinkWrap.create(JavaArchive.class, "ceylon.audiotest-1.0.0.car");
    module.addClasses(ceylon.audiotest.$module_.class, ceylon.audiotest.run_.class);
    testArchive(module);
}
Also used : Type(javax.sound.sampled.AudioFileFormat.Type) Mixer(javax.sound.sampled.Mixer) JavaArchive(org.jboss.shrinkwrap.api.spec.JavaArchive) Ignore(org.junit.Ignore) ModulesTest(org.jboss.ceylon.test.modules.ModulesTest) Test(org.junit.Test)

Example 10 with Mixer

use of javax.sound.sampled.Mixer in project smarthome by eclipse.

the class AudioPlayer method run.

/**
 * This method plays the contained AudioSource
 */
@Override
public void run() {
    SourceDataLine line;
    AudioFormat audioFormat = convertAudioFormat(this.audioStream.getFormat());
    if (audioFormat == null) {
        logger.warn("Audio format is unsupported or does not have enough details in order to be played");
        return;
    }
    DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
    try {
        line = (SourceDataLine) AudioSystem.getLine(info);
        line.open(audioFormat);
    } catch (Exception e) {
        logger.warn("No line found: {}", e.getMessage());
        logger.info("Available lines are:");
        // get available mixers
        Mixer.Info[] mixerInfo = AudioSystem.getMixerInfo();
        Mixer mixer = null;
        for (int cnt = 0; cnt < mixerInfo.length; cnt++) {
            mixer = AudioSystem.getMixer(mixerInfo[cnt]);
            Line.Info[] lineInfos = mixer.getSourceLineInfo();
            for (Info lineInfo : lineInfos) {
                logger.info("{}", lineInfo);
            }
        }
        return;
    }
    line.start();
    int nRead = 0;
    // needs to be a multiple of 4 and 6, to support both 16 and 24 bit stereo
    byte[] abData = new byte[65532];
    try {
        while (-1 != nRead) {
            nRead = audioStream.read(abData, 0, abData.length);
            if (nRead >= 0) {
                line.write(abData, 0, nRead);
            }
        }
    } catch (IOException e) {
        logger.error("Error while playing audio: {}", e.getMessage());
        return;
    } finally {
        line.drain();
        line.close();
        try {
            audioStream.close();
        } catch (IOException e) {
        }
    }
}
Also used : Line(javax.sound.sampled.Line) DataLine(javax.sound.sampled.DataLine) SourceDataLine(javax.sound.sampled.SourceDataLine) DataLine(javax.sound.sampled.DataLine) SourceDataLine(javax.sound.sampled.SourceDataLine) Mixer(javax.sound.sampled.Mixer) SourceDataLine(javax.sound.sampled.SourceDataLine) IOException(java.io.IOException) AudioFormat(javax.sound.sampled.AudioFormat) Info(javax.sound.sampled.Line.Info) IOException(java.io.IOException)

Aggregations

Mixer (javax.sound.sampled.Mixer)10 LineUnavailableException (javax.sound.sampled.LineUnavailableException)6 DataLine (javax.sound.sampled.DataLine)5 AudioFormat (javax.sound.sampled.AudioFormat)4 SourceDataLine (javax.sound.sampled.SourceDataLine)3 IOException (java.io.IOException)2 Type (javax.sound.sampled.AudioFileFormat.Type)2 AudioInputStream (javax.sound.sampled.AudioInputStream)2 Line (javax.sound.sampled.Line)2 Port (javax.sound.sampled.Port)2 URL (java.net.URL)1 ShortBuffer (java.nio.ShortBuffer)1 ArrayList (java.util.ArrayList)1 ScheduledThreadPoolExecutor (java.util.concurrent.ScheduledThreadPoolExecutor)1 FloatControl (javax.sound.sampled.FloatControl)1 Info (javax.sound.sampled.Line.Info)1 TargetDataLine (javax.sound.sampled.TargetDataLine)1 CanvasFrame (org.bytedeco.javacv.CanvasFrame)1 FFmpegFrameRecorder (org.bytedeco.javacv.FFmpegFrameRecorder)1 Frame (org.bytedeco.javacv.Frame)1