Search in sources :

Example 26 with AudioFormat

use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.

the class EmergencySoundbank method newSimpleFFTSample.

public static SF2Sample newSimpleFFTSample(SF2Soundbank sf2, String name, double[] data, double base, int fadeuptime) {
    int fftsize = data.length / 2;
    AudioFormat format = new AudioFormat(44100, 16, 1, true, false);
    double basefreq = (base / fftsize) * format.getSampleRate() * 0.5;
    randomPhase(data);
    ifft(data);
    data = realPart(data);
    normalize(data, 0.9);
    float[] fdata = toFloat(data);
    fdata = loopExtend(fdata, fdata.length + 512);
    fadeUp(fdata, fadeuptime);
    byte[] bdata = toBytes(fdata, format);
    /*
         * Create SoundFont2 sample.
         */
    SF2Sample sample = new SF2Sample(sf2);
    sample.setName(name);
    sample.setData(bdata);
    sample.setStartLoop(256);
    sample.setEndLoop(fftsize + 256);
    sample.setSampleRate((long) format.getSampleRate());
    double orgnote = (69 + 12) + (12 * Math.log(basefreq / 440.0) / Math.log(2));
    sample.setOriginalPitch((int) orgnote);
    sample.setPitchCorrection((byte) (-(orgnote - (int) orgnote) * 100.0));
    sf2.addResource(sample);
    return sample;
}
Also used : AudioFormat(javax.sound.sampled.AudioFormat)

Example 27 with AudioFormat

use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.

the class EmergencySoundbank method newSimpleDrumSample.

public static SF2Sample newSimpleDrumSample(SF2Soundbank sf2, String name, double[] data) {
    int fftsize = data.length;
    AudioFormat format = new AudioFormat(44100, 16, 1, true, false);
    byte[] bdata = toBytes(toFloat(realPart(data)), format);
    /*
         * Create SoundFont2 sample.
         */
    SF2Sample sample = new SF2Sample(sf2);
    sample.setName(name);
    sample.setData(bdata);
    sample.setStartLoop(256);
    sample.setEndLoop(fftsize + 256);
    sample.setSampleRate((long) format.getSampleRate());
    sample.setOriginalPitch(60);
    sf2.addResource(sample);
    return sample;
}
Also used : AudioFormat(javax.sound.sampled.AudioFormat)

Example 28 with AudioFormat

use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.

the class ModelByteBufferWavetable method openStream.

public AudioFloatInputStream openStream() {
    if (buffer == null)
        return null;
    if (format == null) {
        InputStream is = buffer.getInputStream();
        AudioInputStream ais = null;
        try {
            ais = AudioSystem.getAudioInputStream(is);
        } catch (Exception e) {
            //e.printStackTrace();
            return null;
        }
        return AudioFloatInputStream.getInputStream(ais);
    }
    if (buffer.array() == null) {
        return AudioFloatInputStream.getInputStream(new AudioInputStream(buffer.getInputStream(), format, buffer.capacity() / format.getFrameSize()));
    }
    if (buffer8 != null) {
        if (format.getEncoding().equals(Encoding.PCM_SIGNED) || format.getEncoding().equals(Encoding.PCM_UNSIGNED)) {
            InputStream is = new Buffer8PlusInputStream();
            AudioFormat format2 = new AudioFormat(format.getEncoding(), format.getSampleRate(), format.getSampleSizeInBits() + 8, format.getChannels(), format.getFrameSize() + (1 * format.getChannels()), format.getFrameRate(), format.isBigEndian());
            AudioInputStream ais = new AudioInputStream(is, format2, buffer.capacity() / format.getFrameSize());
            return AudioFloatInputStream.getInputStream(ais);
        }
    }
    return AudioFloatInputStream.getInputStream(format, buffer.array(), (int) buffer.arrayOffset(), (int) buffer.capacity());
}
Also used : AudioInputStream(javax.sound.sampled.AudioInputStream) AudioInputStream(javax.sound.sampled.AudioInputStream) InputStream(java.io.InputStream) AudioFormat(javax.sound.sampled.AudioFormat) IOException(java.io.IOException)

Example 29 with AudioFormat

use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.

the class DLSSoundbank method readWaveChunk.

private void readWaveChunk(RIFFReader riff) throws IOException {
    DLSSample sample = new DLSSample(this);
    while (riff.hasNextChunk()) {
        RIFFReader chunk = riff.nextChunk();
        String format = chunk.getFormat();
        if (format.equals("LIST")) {
            if (chunk.getType().equals("INFO")) {
                readWaveInfoChunk(sample, chunk);
            }
        } else {
            if (format.equals("dlid")) {
                sample.guid = new byte[16];
                chunk.readFully(sample.guid);
            }
            if (format.equals("fmt ")) {
                int sampleformat = chunk.readUnsignedShort();
                if (sampleformat != 1 && sampleformat != 3) {
                    throw new RIFFInvalidDataException("Only PCM samples are supported!");
                }
                int channels = chunk.readUnsignedShort();
                long samplerate = chunk.readUnsignedInt();
                // bytes per sec
                /* long framerate = */
                chunk.readUnsignedInt();
                // block align, framesize
                int framesize = chunk.readUnsignedShort();
                int bits = chunk.readUnsignedShort();
                AudioFormat audioformat = null;
                if (sampleformat == 1) {
                    if (bits == 8) {
                        audioformat = new AudioFormat(Encoding.PCM_UNSIGNED, samplerate, bits, channels, framesize, samplerate, false);
                    } else {
                        audioformat = new AudioFormat(Encoding.PCM_SIGNED, samplerate, bits, channels, framesize, samplerate, false);
                    }
                }
                if (sampleformat == 3) {
                    audioformat = new AudioFormat(Encoding.PCM_FLOAT, samplerate, bits, channels, framesize, samplerate, false);
                }
                sample.format = audioformat;
            }
            if (format.equals("data")) {
                if (largeFormat) {
                    sample.setData(new ModelByteBuffer(sampleFile, chunk.getFilePointer(), chunk.available()));
                } else {
                    byte[] buffer = new byte[chunk.available()];
                    //  chunk.read(buffer);
                    sample.setData(buffer);
                    int read = 0;
                    int avail = chunk.available();
                    while (read != avail) {
                        if (avail - read > 65536) {
                            chunk.readFully(buffer, read, 65536);
                            read += 65536;
                        } else {
                            chunk.readFully(buffer, read, avail - read);
                            read = avail;
                        }
                    }
                }
            }
            if (format.equals("wsmp")) {
                sample.sampleoptions = new DLSSampleOptions();
                readWsmpChunk(sample.sampleoptions, chunk);
            }
        }
    }
    samples.add(sample);
}
Also used : AudioFormat(javax.sound.sampled.AudioFormat)

Example 30 with AudioFormat

use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.

the class PCMtoPCMCodec method getOutputFormats.

/**
     * Obtains the set of output formats supported by the codec
     * given a particular input format.
     * If no output formats are supported for this input format,
     * returns an array of length 0.
     * @return array of supported output formats.
     */
/*  public AudioFormat[] getOutputFormats(AudioFormat inputFormat) { */
private AudioFormat[] getOutputFormats(AudioFormat inputFormat) {
    Vector formats = new Vector();
    AudioFormat format;
    int sampleSize = inputFormat.getSampleSizeInBits();
    boolean isBigEndian = inputFormat.isBigEndian();
    if (sampleSize == 8) {
        if (AudioFormat.Encoding.PCM_SIGNED.equals(inputFormat.getEncoding())) {
            format = new AudioFormat(AudioFormat.Encoding.PCM_UNSIGNED, inputFormat.getSampleRate(), inputFormat.getSampleSizeInBits(), inputFormat.getChannels(), inputFormat.getFrameSize(), inputFormat.getFrameRate(), false);
            formats.addElement(format);
        }
        if (AudioFormat.Encoding.PCM_UNSIGNED.equals(inputFormat.getEncoding())) {
            format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, inputFormat.getSampleRate(), inputFormat.getSampleSizeInBits(), inputFormat.getChannels(), inputFormat.getFrameSize(), inputFormat.getFrameRate(), false);
            formats.addElement(format);
        }
    } else if (sampleSize == 16) {
        if (AudioFormat.Encoding.PCM_SIGNED.equals(inputFormat.getEncoding()) && isBigEndian) {
            format = new AudioFormat(AudioFormat.Encoding.PCM_UNSIGNED, inputFormat.getSampleRate(), inputFormat.getSampleSizeInBits(), inputFormat.getChannels(), inputFormat.getFrameSize(), inputFormat.getFrameRate(), true);
            formats.addElement(format);
            format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, inputFormat.getSampleRate(), inputFormat.getSampleSizeInBits(), inputFormat.getChannels(), inputFormat.getFrameSize(), inputFormat.getFrameRate(), false);
            formats.addElement(format);
            format = new AudioFormat(AudioFormat.Encoding.PCM_UNSIGNED, inputFormat.getSampleRate(), inputFormat.getSampleSizeInBits(), inputFormat.getChannels(), inputFormat.getFrameSize(), inputFormat.getFrameRate(), false);
            formats.addElement(format);
        }
        if (AudioFormat.Encoding.PCM_UNSIGNED.equals(inputFormat.getEncoding()) && isBigEndian) {
            format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, inputFormat.getSampleRate(), inputFormat.getSampleSizeInBits(), inputFormat.getChannels(), inputFormat.getFrameSize(), inputFormat.getFrameRate(), true);
            formats.addElement(format);
            format = new AudioFormat(AudioFormat.Encoding.PCM_UNSIGNED, inputFormat.getSampleRate(), inputFormat.getSampleSizeInBits(), inputFormat.getChannels(), inputFormat.getFrameSize(), inputFormat.getFrameRate(), false);
            formats.addElement(format);
            format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, inputFormat.getSampleRate(), inputFormat.getSampleSizeInBits(), inputFormat.getChannels(), inputFormat.getFrameSize(), inputFormat.getFrameRate(), false);
            formats.addElement(format);
        }
        if (AudioFormat.Encoding.PCM_SIGNED.equals(inputFormat.getEncoding()) && !isBigEndian) {
            format = new AudioFormat(AudioFormat.Encoding.PCM_UNSIGNED, inputFormat.getSampleRate(), inputFormat.getSampleSizeInBits(), inputFormat.getChannels(), inputFormat.getFrameSize(), inputFormat.getFrameRate(), false);
            formats.addElement(format);
            format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, inputFormat.getSampleRate(), inputFormat.getSampleSizeInBits(), inputFormat.getChannels(), inputFormat.getFrameSize(), inputFormat.getFrameRate(), true);
            formats.addElement(format);
            format = new AudioFormat(AudioFormat.Encoding.PCM_UNSIGNED, inputFormat.getSampleRate(), inputFormat.getSampleSizeInBits(), inputFormat.getChannels(), inputFormat.getFrameSize(), inputFormat.getFrameRate(), true);
            formats.addElement(format);
        }
        if (AudioFormat.Encoding.PCM_UNSIGNED.equals(inputFormat.getEncoding()) && !isBigEndian) {
            format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, inputFormat.getSampleRate(), inputFormat.getSampleSizeInBits(), inputFormat.getChannels(), inputFormat.getFrameSize(), inputFormat.getFrameRate(), false);
            formats.addElement(format);
            format = new AudioFormat(AudioFormat.Encoding.PCM_UNSIGNED, inputFormat.getSampleRate(), inputFormat.getSampleSizeInBits(), inputFormat.getChannels(), inputFormat.getFrameSize(), inputFormat.getFrameRate(), true);
            formats.addElement(format);
            format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, inputFormat.getSampleRate(), inputFormat.getSampleSizeInBits(), inputFormat.getChannels(), inputFormat.getFrameSize(), inputFormat.getFrameRate(), true);
            formats.addElement(format);
        }
    }
    AudioFormat[] formatArray;
    synchronized (formats) {
        formatArray = new AudioFormat[formats.size()];
        for (int i = 0; i < formatArray.length; i++) {
            formatArray[i] = (AudioFormat) (formats.elementAt(i));
        }
    }
    return formatArray;
}
Also used : AudioFormat(javax.sound.sampled.AudioFormat) Vector(java.util.Vector)

Aggregations

AudioFormat (javax.sound.sampled.AudioFormat)74 AudioInputStream (javax.sound.sampled.AudioInputStream)32 IOException (java.io.IOException)12 InputStream (java.io.InputStream)12 UnsupportedAudioFileException (javax.sound.sampled.UnsupportedAudioFileException)11 AudioFileFormat (javax.sound.sampled.AudioFileFormat)10 SourceDataLine (javax.sound.sampled.SourceDataLine)8 ByteArrayInputStream (java.io.ByteArrayInputStream)7 MpegAudioFormat (javazoom.spi.mpeg.sampled.file.MpegAudioFormat)7 ByteArrayOutputStream (java.io.ByteArrayOutputStream)6 DataInputStream (java.io.DataInputStream)6 File (java.io.File)6 BufferedInputStream (java.io.BufferedInputStream)5 FileInputStream (java.io.FileInputStream)5 Vector (java.util.Vector)5 DataLine (javax.sound.sampled.DataLine)5 LineUnavailableException (javax.sound.sampled.LineUnavailableException)5 SequenceInputStream (java.io.SequenceInputStream)4 AudioMetaData (ddf.minim.AudioMetaData)3 DataOutputStream (java.io.DataOutputStream)3