use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class AiffFileWriter method getAudioFileFormat.
// -----------------------------------------------------------------------
/**
* Returns the AudioFileFormat describing the file that will be written from this AudioInputStream.
* Throws IllegalArgumentException if not supported.
*/
private AudioFileFormat getAudioFileFormat(AudioFileFormat.Type type, AudioInputStream stream) {
AudioFormat format = null;
AiffFileFormat fileFormat = null;
AudioFormat.Encoding encoding = AudioFormat.Encoding.PCM_SIGNED;
AudioFormat streamFormat = stream.getFormat();
AudioFormat.Encoding streamEncoding = streamFormat.getEncoding();
float sampleRate;
int sampleSizeInBits;
int channels;
int frameSize;
float frameRate;
int fileSize;
boolean convert8to16 = false;
if (!types[0].equals(type)) {
throw new IllegalArgumentException("File type " + type + " not supported.");
}
if ((AudioFormat.Encoding.ALAW.equals(streamEncoding)) || (AudioFormat.Encoding.ULAW.equals(streamEncoding))) {
if (streamFormat.getSampleSizeInBits() == 8) {
encoding = AudioFormat.Encoding.PCM_SIGNED;
sampleSizeInBits = 16;
convert8to16 = true;
} else {
// can't convert non-8-bit ALAW,ULAW
throw new IllegalArgumentException("Encoding " + streamEncoding + " supported only for 8-bit data.");
}
} else if (streamFormat.getSampleSizeInBits() == 8) {
encoding = AudioFormat.Encoding.PCM_UNSIGNED;
sampleSizeInBits = 8;
} else {
encoding = AudioFormat.Encoding.PCM_SIGNED;
sampleSizeInBits = streamFormat.getSampleSizeInBits();
}
format = new AudioFormat(encoding, streamFormat.getSampleRate(), sampleSizeInBits, streamFormat.getChannels(), streamFormat.getFrameSize(), streamFormat.getFrameRate(), // AIFF is big endian
true);
if (stream.getFrameLength() != AudioSystem.NOT_SPECIFIED) {
if (convert8to16) {
fileSize = (int) stream.getFrameLength() * streamFormat.getFrameSize() * 2 + AiffFileFormat.AIFF_HEADERSIZE;
} else {
fileSize = (int) stream.getFrameLength() * streamFormat.getFrameSize() + AiffFileFormat.AIFF_HEADERSIZE;
}
} else {
fileSize = AudioSystem.NOT_SPECIFIED;
}
fileFormat = new AiffFileFormat(AudioFileFormat.Type.AIFF, fileSize, format, (int) stream.getFrameLength());
return fileFormat;
}
use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class AiffFileReader method getCOMM.
//--------------------------------------------------------------------
private AudioFileFormat getCOMM(InputStream is, boolean doReset) throws UnsupportedAudioFileException, IOException {
DataInputStream dis = new DataInputStream(is);
if (doReset) {
dis.mark(MAX_READ_LENGTH);
}
// assumes a stream at the beginning of the file which has already
// passed the magic number test...
// leaves the input stream at the beginning of the audio data
int fileRead = 0;
int dataLength = 0;
AudioFormat format = null;
// Read the magic number
int magic = dis.readInt();
// $$fb: fix for 4369044: javax.sound.sampled.AudioSystem.getAudioInputStream() works wrong with Cp037
if (magic != AiffFileFormat.AIFF_MAGIC) {
// not AIFF, throw exception
if (doReset) {
dis.reset();
}
throw new UnsupportedAudioFileException("not an AIFF file");
}
int length = dis.readInt();
int iffType = dis.readInt();
fileRead += 12;
int totallength;
if (length <= 0) {
length = AudioSystem.NOT_SPECIFIED;
totallength = AudioSystem.NOT_SPECIFIED;
} else {
totallength = length + 8;
}
// Is this an AIFC or just plain AIFF file.
boolean aifc = false;
// $$fb: fix for 4369044: javax.sound.sampled.AudioSystem.getAudioInputStream() works wrong with Cp037
if (iffType == AiffFileFormat.AIFC_MAGIC) {
aifc = true;
}
// Loop through the AIFF chunks until
// we get to the SSND chunk.
boolean ssndFound = false;
while (!ssndFound) {
// Read the chunk name
int chunkName = dis.readInt();
int chunkLen = dis.readInt();
fileRead += 8;
int chunkRead = 0;
// Switch on the chunk name.
switch(chunkName) {
case AiffFileFormat.FVER_MAGIC:
// Ignore format version for now.
break;
case AiffFileFormat.COMM_MAGIC:
// $$fb: fix for 4399551: Repost of bug candidate: cannot replay aif file (Review ID: 108108)
if ((!aifc && chunkLen < 18) || (aifc && chunkLen < 22)) {
throw new UnsupportedAudioFileException("Invalid AIFF/COMM chunksize");
}
// Read header info.
int channels = dis.readUnsignedShort();
if (channels <= 0) {
throw new UnsupportedAudioFileException("Invalid number of channels");
}
// numSampleFrames
dis.readInt();
int sampleSizeInBits = dis.readUnsignedShort();
if (sampleSizeInBits < 1 || sampleSizeInBits > 32) {
throw new UnsupportedAudioFileException("Invalid AIFF/COMM sampleSize");
}
float sampleRate = (float) read_ieee_extended(dis);
chunkRead += (2 + 4 + 2 + 10);
// If this is not AIFC then we assume it's
// a linearly encoded file.
AudioFormat.Encoding encoding = AudioFormat.Encoding.PCM_SIGNED;
if (aifc) {
int enc = dis.readInt();
chunkRead += 4;
switch(enc) {
case AiffFileFormat.AIFC_PCM:
encoding = AudioFormat.Encoding.PCM_SIGNED;
break;
case AiffFileFormat.AIFC_ULAW:
encoding = AudioFormat.Encoding.ULAW;
// Java Sound convention
sampleSizeInBits = 8;
break;
default:
throw new UnsupportedAudioFileException("Invalid AIFF encoding");
}
}
int frameSize = calculatePCMFrameSize(sampleSizeInBits, channels);
//$fb what's that ??
//if (sampleSizeInBits == 8) {
// encoding = AudioFormat.Encoding.PCM_SIGNED;
//}
format = new AudioFormat(encoding, sampleRate, sampleSizeInBits, channels, frameSize, sampleRate, true);
break;
case AiffFileFormat.SSND_MAGIC:
// Data chunk.
// we are getting *weird* numbers for chunkLen sometimes;
// this really should be the size of the data chunk....
int dataOffset = dis.readInt();
int blocksize = dis.readInt();
chunkRead += 8;
if (chunkLen < length) {
dataLength = chunkLen - chunkRead;
} else {
// $$kk: 11.03.98: this seems dangerous!
dataLength = length - (fileRead + chunkRead);
}
ssndFound = true;
break;
}
// switch
fileRead += chunkRead;
// skip the remainder of this chunk
if (!ssndFound) {
int toSkip = chunkLen - chunkRead;
if (toSkip > 0) {
fileRead += dis.skipBytes(toSkip);
}
}
}
if (format == null) {
throw new UnsupportedAudioFileException("missing COMM chunk");
}
AudioFileFormat.Type type = aifc ? AudioFileFormat.Type.AIFC : AudioFileFormat.Type.AIFF;
return new AiffFileFormat(type, totallength, format, dataLength / format.getFrameSize());
}
use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class AudioFloatFormatConverter method getTargetFormats.
public AudioFormat[] getTargetFormats(Encoding targetEncoding, AudioFormat sourceFormat) {
if (AudioFloatConverter.getConverter(sourceFormat) == null)
return new AudioFormat[0];
int channels = sourceFormat.getChannels();
ArrayList<AudioFormat> formats = new ArrayList<AudioFormat>();
if (targetEncoding.equals(Encoding.PCM_SIGNED))
formats.add(new AudioFormat(Encoding.PCM_SIGNED, AudioSystem.NOT_SPECIFIED, 8, channels, channels, AudioSystem.NOT_SPECIFIED, false));
if (targetEncoding.equals(Encoding.PCM_UNSIGNED))
formats.add(new AudioFormat(Encoding.PCM_UNSIGNED, AudioSystem.NOT_SPECIFIED, 8, channels, channels, AudioSystem.NOT_SPECIFIED, false));
for (int bits = 16; bits < 32; bits += 8) {
if (targetEncoding.equals(Encoding.PCM_SIGNED)) {
formats.add(new AudioFormat(Encoding.PCM_SIGNED, AudioSystem.NOT_SPECIFIED, bits, channels, channels * bits / 8, AudioSystem.NOT_SPECIFIED, false));
formats.add(new AudioFormat(Encoding.PCM_SIGNED, AudioSystem.NOT_SPECIFIED, bits, channels, channels * bits / 8, AudioSystem.NOT_SPECIFIED, true));
}
if (targetEncoding.equals(Encoding.PCM_UNSIGNED)) {
formats.add(new AudioFormat(Encoding.PCM_UNSIGNED, AudioSystem.NOT_SPECIFIED, bits, channels, channels * bits / 8, AudioSystem.NOT_SPECIFIED, true));
formats.add(new AudioFormat(Encoding.PCM_UNSIGNED, AudioSystem.NOT_SPECIFIED, bits, channels, channels * bits / 8, AudioSystem.NOT_SPECIFIED, false));
}
}
if (targetEncoding.equals(Encoding.PCM_FLOAT)) {
formats.add(new AudioFormat(Encoding.PCM_FLOAT, AudioSystem.NOT_SPECIFIED, 32, channels, channels * 4, AudioSystem.NOT_SPECIFIED, false));
formats.add(new AudioFormat(Encoding.PCM_FLOAT, AudioSystem.NOT_SPECIFIED, 32, channels, channels * 4, AudioSystem.NOT_SPECIFIED, true));
formats.add(new AudioFormat(Encoding.PCM_FLOAT, AudioSystem.NOT_SPECIFIED, 64, channels, channels * 8, AudioSystem.NOT_SPECIFIED, false));
formats.add(new AudioFormat(Encoding.PCM_FLOAT, AudioSystem.NOT_SPECIFIED, 64, channels, channels * 8, AudioSystem.NOT_SPECIFIED, true));
}
return formats.toArray(new AudioFormat[formats.size()]);
}
use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class AuFileReader method getAudioFileFormat.
/**
* Obtains the audio file format of the URL provided. The URL must
* point to valid audio file data.
* @param url the URL from which file format information should be
* extracted
* @return an <code>AudioFileFormat</code> object describing the audio file format
* @throws UnsupportedAudioFileException if the URL does not point to valid audio
* file data recognized by the system
* @throws IOException if an I/O exception occurs
*/
public AudioFileFormat getAudioFileFormat(URL url) throws UnsupportedAudioFileException, IOException {
InputStream urlStream = null;
BufferedInputStream bis = null;
AudioFileFormat fileFormat = null;
AudioFormat format = null;
// throws IOException
urlStream = url.openStream();
try {
bis = new BufferedInputStream(urlStream, bisBufferSize);
// throws UnsupportedAudioFileException
fileFormat = getAudioFileFormat(bis);
} finally {
urlStream.close();
}
return fileFormat;
}
use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class WaveFileReader method getFMT.
//--------------------------------------------------------------------
private AudioFileFormat getFMT(InputStream stream, boolean doReset) throws UnsupportedAudioFileException, IOException {
// assumes sream is rewound
int bytesRead;
int nread = 0;
int fmt;
int length = 0;
int wav_type = 0;
short channels;
long sampleRate;
long avgBytesPerSec;
short blockAlign;
int sampleSizeInBits;
AudioFormat.Encoding encoding = null;
DataInputStream dis = new DataInputStream(stream);
if (doReset) {
dis.mark(MAX_READ_LENGTH);
}
int magic = dis.readInt();
int fileLength = rllong(dis);
int waveMagic = dis.readInt();
int totallength;
if (fileLength <= 0) {
fileLength = AudioSystem.NOT_SPECIFIED;
totallength = AudioSystem.NOT_SPECIFIED;
} else {
totallength = fileLength + 8;
}
if ((magic != WaveFileFormat.RIFF_MAGIC) || (waveMagic != WaveFileFormat.WAVE_MAGIC)) {
// not WAVE, throw UnsupportedAudioFileException
if (doReset) {
dis.reset();
}
throw new UnsupportedAudioFileException("not a WAVE file");
}
// we break out of this loop either by hitting EOF or finding "fmt "
while (true) {
try {
fmt = dis.readInt();
nread += 4;
if (fmt == WaveFileFormat.FMT_MAGIC) {
// we've found the 'fmt' chunk
break;
} else {
// else not 'fmt', skip this chunk
length = rllong(dis);
nread += 4;
if (length % 2 > 0)
length++;
nread += dis.skipBytes(length);
}
} catch (EOFException eof) {
// we've reached the end of the file without finding the 'fmt' chunk
throw new UnsupportedAudioFileException("Not a valid WAV file");
}
}
// Read the format chunk size.
length = rllong(dis);
nread += 4;
// This is the nread position at the end of the format chunk
int endLength = nread + length;
// Read the wave format data out of the format chunk.
// encoding.
wav_type = rlshort(dis);
nread += 2;
if (wav_type == WaveFileFormat.WAVE_FORMAT_PCM)
// if 8-bit, we need PCM_UNSIGNED, below...
encoding = AudioFormat.Encoding.PCM_SIGNED;
else if (wav_type == WaveFileFormat.WAVE_FORMAT_ALAW)
encoding = AudioFormat.Encoding.ALAW;
else if (wav_type == WaveFileFormat.WAVE_FORMAT_MULAW)
encoding = AudioFormat.Encoding.ULAW;
else {
// we don't support any other WAVE formats....
throw new UnsupportedAudioFileException("Not a supported WAV file");
}
// channels
channels = rlshort(dis);
nread += 2;
if (channels <= 0) {
throw new UnsupportedAudioFileException("Invalid number of channels");
}
// sample rate.
sampleRate = rllong(dis);
nread += 4;
// this is the avgBytesPerSec
avgBytesPerSec = rllong(dis);
nread += 4;
// this is blockAlign value
blockAlign = rlshort(dis);
nread += 2;
// this is the PCM-specific value bitsPerSample
sampleSizeInBits = (int) rlshort(dis);
nread += 2;
if (sampleSizeInBits <= 0) {
throw new UnsupportedAudioFileException("Invalid bitsPerSample");
}
// if sampleSizeInBits==8, we need to use PCM_UNSIGNED
if ((sampleSizeInBits == 8) && encoding.equals(AudioFormat.Encoding.PCM_SIGNED))
encoding = AudioFormat.Encoding.PCM_UNSIGNED;
if (length % 2 != 0)
length += 1;
// This fixes #4257986
if (endLength > nread)
nread += dis.skipBytes(endLength - nread);
// we have a format now, so find the "data" chunk
// we break out of this loop either by hitting EOF or finding "data"
// $$kk: if "data" chunk precedes "fmt" chunk we are hosed -- can this legally happen?
nread = 0;
while (true) {
try {
int datahdr = dis.readInt();
nread += 4;
if (datahdr == WaveFileFormat.DATA_MAGIC) {
// we've found the 'data' chunk
break;
} else {
// else not 'data', skip this chunk
int thisLength = rllong(dis);
nread += 4;
if (thisLength % 2 > 0)
thisLength++;
nread += dis.skipBytes(thisLength);
}
} catch (EOFException eof) {
// we've reached the end of the file without finding the 'data' chunk
throw new UnsupportedAudioFileException("Not a valid WAV file");
}
}
// this is the length of the data chunk
int dataLength = rllong(dis);
nread += 4;
// now build the new AudioFileFormat and return
AudioFormat format = new AudioFormat(encoding, (float) sampleRate, sampleSizeInBits, channels, calculatePCMFrameSize(sampleSizeInBits, channels), (float) sampleRate, false);
return new WaveFileFormat(AudioFileFormat.Type.WAVE, totallength, format, dataLength / format.getFrameSize());
}
Aggregations