use of javazoom.spi.mpeg.sampled.file.MpegAudioFileFormat in project Minim by ddf.
the class MpegAudioFileReader method getAudioFileFormat.
/**
* Returns AudioFileFormat from inputstream and medialength.
*/
public AudioFileFormat getAudioFileFormat(InputStream inputStream, long mediaLength) throws UnsupportedAudioFileException, IOException {
system.debug("MpegAudioFileReader.getAudioFileFormat(InputStream inputStream, long mediaLength): begin");
HashMap<String, Object> aff_properties = new HashMap<String, Object>();
HashMap<String, Object> af_properties = new HashMap<String, Object>();
int mLength = (int) mediaLength;
int size = inputStream.available();
PushbackInputStream pis = new PushbackInputStream(inputStream, MARK_LIMIT);
byte[] head = new byte[22];
pis.read(head);
system.debug("InputStream : " + inputStream + " =>" + new String(head));
// Next check for Shoutcast (supported) and OGG (unsupported) streams.
if ((head[0] == 'R') && (head[1] == 'I') && (head[2] == 'F') && (head[3] == 'F') && (head[8] == 'W') && (head[9] == 'A') && (head[10] == 'V') && (head[11] == 'E')) {
system.debug("RIFF/WAV stream found");
int isPCM = ((head[21] << 8) & 0x0000FF00) | ((head[20]) & 0x00000FF);
if (weak == null) {
if (isPCM == 1)
throw new UnsupportedAudioFileException("WAV PCM stream found");
}
} else if ((head[0] == '.') && (head[1] == 's') && (head[2] == 'n') && (head[3] == 'd')) {
system.debug("AU stream found");
if (weak == null)
throw new UnsupportedAudioFileException("AU stream found");
} else if ((head[0] == 'F') && (head[1] == 'O') && (head[2] == 'R') && (head[3] == 'M') && (head[8] == 'A') && (head[9] == 'I') && (head[10] == 'F') && (head[11] == 'F')) {
system.debug("AIFF stream found");
if (weak == null)
throw new UnsupportedAudioFileException("AIFF stream found");
} else if (((head[0] == 'M') | (head[0] == 'm')) && ((head[1] == 'A') | (head[1] == 'a')) && ((head[2] == 'C') | (head[2] == 'c'))) {
system.debug("APE stream found");
if (weak == null)
throw new UnsupportedAudioFileException("APE stream found");
} else if (((head[0] == 'F') | (head[0] == 'f')) && ((head[1] == 'L') | (head[1] == 'l')) && ((head[2] == 'A') | (head[2] == 'a')) && ((head[3] == 'C') | (head[3] == 'c'))) {
system.debug("FLAC stream found");
if (weak == null)
throw new UnsupportedAudioFileException("FLAC stream found");
} else // Shoutcast stream ?
if (((head[0] == 'I') | (head[0] == 'i')) && ((head[1] == 'C') | (head[1] == 'c')) && ((head[2] == 'Y') | (head[2] == 'y'))) {
pis.unread(head);
// Load shoutcast meta data.
loadShoutcastInfo(pis, aff_properties);
} else // Ogg stream ?
if (((head[0] == 'O') | (head[0] == 'o')) && ((head[1] == 'G') | (head[1] == 'g')) && ((head[2] == 'G') | (head[2] == 'g'))) {
system.debug("Ogg stream found");
if (weak == null)
throw new UnsupportedAudioFileException("Ogg stream found");
} else // No, so pushback.
{
pis.unread(head);
}
// MPEG header info.
int nVersion = AudioSystem.NOT_SPECIFIED;
int nLayer = AudioSystem.NOT_SPECIFIED;
// int nSFIndex = AudioSystem.NOT_SPECIFIED;
int nMode = AudioSystem.NOT_SPECIFIED;
int FrameSize = AudioSystem.NOT_SPECIFIED;
// int nFrameSize = AudioSystem.NOT_SPECIFIED;
int nFrequency = AudioSystem.NOT_SPECIFIED;
int nTotalFrames = AudioSystem.NOT_SPECIFIED;
float FrameRate = AudioSystem.NOT_SPECIFIED;
int BitRate = AudioSystem.NOT_SPECIFIED;
int nChannels = AudioSystem.NOT_SPECIFIED;
int nHeader = AudioSystem.NOT_SPECIFIED;
int nTotalMS = AudioSystem.NOT_SPECIFIED;
boolean nVBR = false;
AudioFormat.Encoding encoding = null;
try {
Bitstream m_bitstream = new Bitstream(pis);
aff_properties.put("mp3.header.pos", new Integer(m_bitstream.header_pos()));
Header m_header = m_bitstream.readFrame();
if (m_header == null) {
throw new UnsupportedAudioFileException("Unable to read mp3 header");
}
// nVersion = 0 => MPEG2-LSF (Including MPEG2.5), nVersion = 1 => MPEG1
nVersion = m_header.version();
if (nVersion == 2)
aff_properties.put("mp3.version.mpeg", Float.toString(2.5f));
else
aff_properties.put("mp3.version.mpeg", Integer.toString(2 - nVersion));
// nLayer = 1,2,3
nLayer = m_header.layer();
aff_properties.put("mp3.version.layer", Integer.toString(nLayer));
// nSFIndex = m_header.sample_frequency();
nMode = m_header.mode();
aff_properties.put("mp3.mode", new Integer(nMode));
nChannels = nMode == 3 ? 1 : 2;
aff_properties.put("mp3.channels", new Integer(nChannels));
nVBR = m_header.vbr();
af_properties.put("vbr", new Boolean(nVBR));
aff_properties.put("mp3.vbr", new Boolean(nVBR));
aff_properties.put("mp3.vbr.scale", new Integer(m_header.vbr_scale()));
FrameSize = m_header.calculate_framesize();
aff_properties.put("mp3.framesize.bytes", new Integer(FrameSize));
if (FrameSize < 0) {
throw new UnsupportedAudioFileException("Invalid FrameSize : " + FrameSize);
}
nFrequency = m_header.frequency();
aff_properties.put("mp3.frequency.hz", new Integer(nFrequency));
FrameRate = (float) ((1.0 / (m_header.ms_per_frame())) * 1000.0);
aff_properties.put("mp3.framerate.fps", new Float(FrameRate));
if (FrameRate < 0) {
throw new UnsupportedAudioFileException("Invalid FrameRate : " + FrameRate);
}
if (mLength != AudioSystem.NOT_SPECIFIED) {
aff_properties.put("mp3.length.bytes", new Integer(mLength));
nTotalFrames = m_header.max_number_of_frames(mLength);
aff_properties.put("mp3.length.frames", new Integer(nTotalFrames));
}
BitRate = m_header.bitrate();
af_properties.put("bitrate", new Integer(BitRate));
aff_properties.put("mp3.bitrate.nominal.bps", new Integer(BitRate));
nHeader = m_header.getSyncHeader();
encoding = sm_aEncodings[nVersion][nLayer - 1];
aff_properties.put("mp3.version.encoding", encoding.toString());
if (mLength != AudioSystem.NOT_SPECIFIED) {
nTotalMS = Math.round(m_header.total_ms(mLength));
aff_properties.put("duration", new Long((long) nTotalMS * 1000L));
}
aff_properties.put("mp3.copyright", new Boolean(m_header.copyright()));
aff_properties.put("mp3.original", new Boolean(m_header.original()));
aff_properties.put("mp3.crc", new Boolean(m_header.checksums()));
aff_properties.put("mp3.padding", new Boolean(m_header.padding()));
InputStream id3v2 = m_bitstream.getRawID3v2();
if (id3v2 != null) {
aff_properties.put("mp3.id3tag.v2", id3v2);
parseID3v2Frames(id3v2, aff_properties);
}
if (TDebug.TraceAudioFileReader)
TDebug.out(m_header.toString());
} catch (Exception e) {
system.debug("not a MPEG stream: " + e.toString());
throw new UnsupportedAudioFileException("not a MPEG stream: " + e.toString());
}
// Deeper checks ?
int cVersion = (nHeader >> 19) & 0x3;
if (cVersion == 1) {
system.debug("not a MPEG stream: wrong version");
throw new UnsupportedAudioFileException("not a MPEG stream: wrong version");
}
int cSFIndex = (nHeader >> 10) & 0x3;
if (cSFIndex == 3) {
system.debug("not a MPEG stream: wrong sampling rate");
throw new UnsupportedAudioFileException("not a MPEG stream: wrong sampling rate");
}
// Look up for ID3v1 tag
if ((size == mediaLength) && (mediaLength != AudioSystem.NOT_SPECIFIED)) {
// FileInputStream fis = (FileInputStream) inputStream;
byte[] id3v1 = new byte[128];
int toSkip = inputStream.available() - id3v1.length;
if (toSkip > 0) {
inputStream.skip(inputStream.available() - id3v1.length);
}
inputStream.read(id3v1, 0, id3v1.length);
if ((id3v1[0] == 'T') && (id3v1[1] == 'A') && (id3v1[2] == 'G')) {
parseID3v1Frames(id3v1, aff_properties);
}
}
AudioFormat format = new MpegAudioFormat(encoding, (float) nFrequency, // SampleSizeInBits
AudioSystem.NOT_SPECIFIED, // Channels - The
nChannels, // The number of bytes in
-1, // FrameRate - The
FrameRate, // second
true, af_properties);
return new MpegAudioFileFormat(MpegFileFormatType.MP3, format, nTotalFrames, mLength, aff_properties);
}
Aggregations