use of javax.sound.sampled.AudioInputStream in project JMRI by JMRI.
the class SoundUtil method bufferFromFile.
public static byte[] bufferFromFile(String filename, float sampleRate, int sampleSizeInBits, int channels, boolean signed, boolean bigEndian) throws java.io.IOException, javax.sound.sampled.UnsupportedAudioFileException {
File sourceFile = new File(filename);
// Get the type of the source file. We need this information
// later to write the audio data to a file of the same type.
AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(sourceFile);
//AudioFileFormat.Type targetFileType = fileFormat.getType();
AudioFormat audioFormat = fileFormat.getFormat();
// get desired output format
AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
// get a conversion stream
// (Errors not checked yet)
AudioInputStream stream = AudioSystem.getAudioInputStream(sourceFile);
AudioInputStream inputAIS = AudioSystem.getAudioInputStream(format, stream);
// Read the audio data into a memory buffer.
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int nBufferSize = BUFFER_LENGTH * audioFormat.getFrameSize();
byte[] abBuffer = new byte[nBufferSize];
while (true) {
if (log.isDebugEnabled()) {
log.debug("trying to read (bytes): " + abBuffer.length);
}
int nBytesRead = inputAIS.read(abBuffer);
if (log.isDebugEnabled()) {
log.debug("read (bytes): " + nBytesRead);
}
if (nBytesRead == -1) {
break;
}
baos.write(abBuffer, 0, nBytesRead);
}
// Create byte array
byte[] abAudioData = baos.toByteArray();
return abAudioData;
}
use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class ModelByteBufferWavetable method openStream.
public AudioFloatInputStream openStream() {
if (buffer == null)
return null;
if (format == null) {
InputStream is = buffer.getInputStream();
AudioInputStream ais = null;
try {
ais = AudioSystem.getAudioInputStream(is);
} catch (Exception e) {
//e.printStackTrace();
return null;
}
return AudioFloatInputStream.getInputStream(ais);
}
if (buffer.array() == null) {
return AudioFloatInputStream.getInputStream(new AudioInputStream(buffer.getInputStream(), format, buffer.capacity() / format.getFrameSize()));
}
if (buffer8 != null) {
if (format.getEncoding().equals(Encoding.PCM_SIGNED) || format.getEncoding().equals(Encoding.PCM_UNSIGNED)) {
InputStream is = new Buffer8PlusInputStream();
AudioFormat format2 = new AudioFormat(format.getEncoding(), format.getSampleRate(), format.getSampleSizeInBits() + 8, format.getChannels(), format.getFrameSize() + (1 * format.getChannels()), format.getFrameRate(), format.isBigEndian());
AudioInputStream ais = new AudioInputStream(is, format2, buffer.capacity() / format.getFrameSize());
return AudioFloatInputStream.getInputStream(ais);
}
}
return AudioFloatInputStream.getInputStream(format, buffer.array(), (int) buffer.arrayOffset(), (int) buffer.capacity());
}
use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class WaveFileWriter method getFileStream.
private InputStream getFileStream(WaveFileFormat waveFileFormat, InputStream audioStream) throws IOException {
// private method ... assumes audioFileFormat is a supported file type
// WAVE header fields
AudioFormat audioFormat = waveFileFormat.getFormat();
int headerLength = waveFileFormat.getHeaderSize();
int riffMagic = WaveFileFormat.RIFF_MAGIC;
int waveMagic = WaveFileFormat.WAVE_MAGIC;
int fmtMagic = WaveFileFormat.FMT_MAGIC;
int fmtLength = WaveFileFormat.getFmtChunkSize(waveFileFormat.getWaveType());
short wav_type = (short) waveFileFormat.getWaveType();
short channels = (short) audioFormat.getChannels();
short sampleSizeInBits = (short) audioFormat.getSampleSizeInBits();
int sampleRate = (int) audioFormat.getSampleRate();
int frameSizeInBytes = (int) audioFormat.getFrameSize();
int frameRate = (int) audioFormat.getFrameRate();
int avgBytesPerSec = channels * sampleSizeInBits * sampleRate / 8;
;
short blockAlign = (short) ((sampleSizeInBits / 8) * channels);
int dataMagic = WaveFileFormat.DATA_MAGIC;
int dataLength = waveFileFormat.getFrameLength() * frameSizeInBytes;
int length = waveFileFormat.getByteLength();
int riffLength = dataLength + headerLength - 8;
byte[] header = null;
ByteArrayInputStream headerStream = null;
ByteArrayOutputStream baos = null;
DataOutputStream dos = null;
SequenceInputStream waveStream = null;
AudioFormat audioStreamFormat = null;
AudioFormat.Encoding encoding = null;
InputStream codedAudioStream = audioStream;
// if audioStream is an AudioInputStream and we need to convert, do it here...
if (audioStream instanceof AudioInputStream) {
audioStreamFormat = ((AudioInputStream) audioStream).getFormat();
encoding = audioStreamFormat.getEncoding();
if (AudioFormat.Encoding.PCM_SIGNED.equals(encoding)) {
if (sampleSizeInBits == 8) {
wav_type = WaveFileFormat.WAVE_FORMAT_PCM;
// plug in the transcoder to convert from PCM_SIGNED to PCM_UNSIGNED
codedAudioStream = AudioSystem.getAudioInputStream(new AudioFormat(AudioFormat.Encoding.PCM_UNSIGNED, audioStreamFormat.getSampleRate(), audioStreamFormat.getSampleSizeInBits(), audioStreamFormat.getChannels(), audioStreamFormat.getFrameSize(), audioStreamFormat.getFrameRate(), false), (AudioInputStream) audioStream);
}
}
if ((AudioFormat.Encoding.PCM_SIGNED.equals(encoding) && audioStreamFormat.isBigEndian()) || (AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding) && !audioStreamFormat.isBigEndian()) || (AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding) && audioStreamFormat.isBigEndian())) {
if (sampleSizeInBits != 8) {
wav_type = WaveFileFormat.WAVE_FORMAT_PCM;
// plug in the transcoder to convert to PCM_SIGNED_LITTLE_ENDIAN
codedAudioStream = AudioSystem.getAudioInputStream(new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, audioStreamFormat.getSampleRate(), audioStreamFormat.getSampleSizeInBits(), audioStreamFormat.getChannels(), audioStreamFormat.getFrameSize(), audioStreamFormat.getFrameRate(), false), (AudioInputStream) audioStream);
}
}
}
// Now push the header into a stream, concat, and return the new SequenceInputStream
baos = new ByteArrayOutputStream();
dos = new DataOutputStream(baos);
// we write in littleendian...
dos.writeInt(riffMagic);
dos.writeInt(big2little(riffLength));
dos.writeInt(waveMagic);
dos.writeInt(fmtMagic);
dos.writeInt(big2little(fmtLength));
dos.writeShort(big2littleShort(wav_type));
dos.writeShort(big2littleShort(channels));
dos.writeInt(big2little(sampleRate));
dos.writeInt(big2little(avgBytesPerSec));
dos.writeShort(big2littleShort(blockAlign));
dos.writeShort(big2littleShort(sampleSizeInBits));
//$$fb 2002-04-16: Fix for 4636355: RIFF audio headers could be _more_ spec compliant
if (wav_type != WaveFileFormat.WAVE_FORMAT_PCM) {
// add length 0 for "codec specific data length"
dos.writeShort(0);
}
dos.writeInt(dataMagic);
dos.writeInt(big2little(dataLength));
dos.close();
header = baos.toByteArray();
headerStream = new ByteArrayInputStream(header);
waveStream = new SequenceInputStream(headerStream, new NoCloseInputStream(codedAudioStream));
return (InputStream) waveStream;
}
use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class WaveFloatFileReader method getAudioInputStream.
public AudioInputStream getAudioInputStream(InputStream stream) throws UnsupportedAudioFileException, IOException {
AudioFileFormat format = getAudioFileFormat(stream);
RIFFReader riffiterator = new RIFFReader(stream);
if (!riffiterator.getFormat().equals("RIFF"))
throw new UnsupportedAudioFileException();
if (!riffiterator.getType().equals("WAVE"))
throw new UnsupportedAudioFileException();
while (riffiterator.hasNextChunk()) {
RIFFReader chunk = riffiterator.nextChunk();
if (chunk.getFormat().equals("data")) {
return new AudioInputStream(chunk, format.getFormat(), chunk.getSize());
}
}
throw new UnsupportedAudioFileException();
}
use of javax.sound.sampled.AudioInputStream in project Minim by ddf.
the class JSBufferedSampleRecorder method save.
/**
* Saves the audio in the internal buffer to a file using the current settings for
* file type and file name.
*/
public AudioRecordingStream save() {
if (isRecording()) {
system.error("You must stop recording before you can write to a file.");
} else {
int channels = format.getChannels();
int length = left.capacity();
int totalSamples = (buffers.size() / channels) * length;
FloatSampleBuffer fsb = new FloatSampleBuffer(channels, totalSamples, format.getSampleRate());
if (channels == 1) {
for (int i = 0; i < buffers.size(); i++) {
int offset = i * length;
FloatBuffer fb = (FloatBuffer) buffers.get(i);
fb.rewind();
// copy all the floats in fb to the first channel
// of fsb, starting at the index offset, and copy
// the whole FloatBuffer over.
fb.get(fsb.getChannel(0), offset, length);
}
} else {
for (int i = 0; i < buffers.size(); i += 2) {
int offset = (i / 2) * length;
FloatBuffer fbL = (FloatBuffer) buffers.get(i);
FloatBuffer fbR = (FloatBuffer) buffers.get(i + 1);
fbL.rewind();
fbL.get(fsb.getChannel(0), offset, length);
fbR.rewind();
fbR.get(fsb.getChannel(1), offset, length);
}
}
int sampleFrames = fsb.getByteArrayBufferSize(format) / format.getFrameSize();
ByteArrayInputStream bais = new ByteArrayInputStream(fsb.convertToByteArray(format));
AudioInputStream ais = new AudioInputStream(bais, format, sampleFrames);
if (AudioSystem.isFileTypeSupported(type, ais)) {
File out = new File(name);
try {
AudioSystem.write(ais, type, out);
} catch (IOException e) {
system.error("AudioRecorder.save: Error attempting to save buffer to " + name + "\n" + e.getMessage());
}
if (out.length() == 0) {
system.error("AudioRecorder.save: Error attempting to save buffer to " + name + ", the output file is empty.");
}
} else {
system.error("AudioRecorder.save: Can't write " + type.toString() + " using format " + format.toString() + ".");
}
}
String filePath = filePath();
AudioInputStream ais = system.getAudioInputStream(filePath);
SourceDataLine sdl = system.getSourceDataLine(ais.getFormat(), 1024);
// this is fine because the recording will always be
// in a raw format (WAV, AU, etc).
long length = AudioUtils.frames2Millis(ais.getFrameLength(), format);
BasicMetaData meta = new BasicMetaData(filePath, length, ais.getFrameLength());
JSPCMAudioRecordingStream recording = new JSPCMAudioRecordingStream(system, meta, ais, sdl, 1024);
return recording;
}
Aggregations