use of javazoom.spi.mpeg.sampled.file.MpegAudioFormat in project Minim by ddf.
the class MpegAudioFileReader method getAudioFileFormat.
/**
* Returns AudioFileFormat from inputstream and medialength.
*/
public AudioFileFormat getAudioFileFormat(InputStream inputStream, long mediaLength) throws UnsupportedAudioFileException, IOException {
system.debug("MpegAudioFileReader.getAudioFileFormat(InputStream inputStream, long mediaLength): begin");
HashMap<String, Object> aff_properties = new HashMap<String, Object>();
HashMap<String, Object> af_properties = new HashMap<String, Object>();
int mLength = (int) mediaLength;
int size = inputStream.available();
PushbackInputStream pis = new PushbackInputStream(inputStream, MARK_LIMIT);
byte[] head = new byte[22];
pis.read(head);
system.debug("InputStream : " + inputStream + " =>" + new String(head));
// Next check for Shoutcast (supported) and OGG (unsupported) streams.
if ((head[0] == 'R') && (head[1] == 'I') && (head[2] == 'F') && (head[3] == 'F') && (head[8] == 'W') && (head[9] == 'A') && (head[10] == 'V') && (head[11] == 'E')) {
system.debug("RIFF/WAV stream found");
int isPCM = ((head[21] << 8) & 0x0000FF00) | ((head[20]) & 0x00000FF);
if (weak == null) {
if (isPCM == 1)
throw new UnsupportedAudioFileException("WAV PCM stream found");
}
} else if ((head[0] == '.') && (head[1] == 's') && (head[2] == 'n') && (head[3] == 'd')) {
system.debug("AU stream found");
if (weak == null)
throw new UnsupportedAudioFileException("AU stream found");
} else if ((head[0] == 'F') && (head[1] == 'O') && (head[2] == 'R') && (head[3] == 'M') && (head[8] == 'A') && (head[9] == 'I') && (head[10] == 'F') && (head[11] == 'F')) {
system.debug("AIFF stream found");
if (weak == null)
throw new UnsupportedAudioFileException("AIFF stream found");
} else if (((head[0] == 'M') | (head[0] == 'm')) && ((head[1] == 'A') | (head[1] == 'a')) && ((head[2] == 'C') | (head[2] == 'c'))) {
system.debug("APE stream found");
if (weak == null)
throw new UnsupportedAudioFileException("APE stream found");
} else if (((head[0] == 'F') | (head[0] == 'f')) && ((head[1] == 'L') | (head[1] == 'l')) && ((head[2] == 'A') | (head[2] == 'a')) && ((head[3] == 'C') | (head[3] == 'c'))) {
system.debug("FLAC stream found");
if (weak == null)
throw new UnsupportedAudioFileException("FLAC stream found");
} else // Shoutcast stream ?
if (((head[0] == 'I') | (head[0] == 'i')) && ((head[1] == 'C') | (head[1] == 'c')) && ((head[2] == 'Y') | (head[2] == 'y'))) {
pis.unread(head);
// Load shoutcast meta data.
loadShoutcastInfo(pis, aff_properties);
} else // Ogg stream ?
if (((head[0] == 'O') | (head[0] == 'o')) && ((head[1] == 'G') | (head[1] == 'g')) && ((head[2] == 'G') | (head[2] == 'g'))) {
system.debug("Ogg stream found");
if (weak == null)
throw new UnsupportedAudioFileException("Ogg stream found");
} else // No, so pushback.
{
pis.unread(head);
}
// MPEG header info.
int nVersion = AudioSystem.NOT_SPECIFIED;
int nLayer = AudioSystem.NOT_SPECIFIED;
// int nSFIndex = AudioSystem.NOT_SPECIFIED;
int nMode = AudioSystem.NOT_SPECIFIED;
int FrameSize = AudioSystem.NOT_SPECIFIED;
// int nFrameSize = AudioSystem.NOT_SPECIFIED;
int nFrequency = AudioSystem.NOT_SPECIFIED;
int nTotalFrames = AudioSystem.NOT_SPECIFIED;
float FrameRate = AudioSystem.NOT_SPECIFIED;
int BitRate = AudioSystem.NOT_SPECIFIED;
int nChannels = AudioSystem.NOT_SPECIFIED;
int nHeader = AudioSystem.NOT_SPECIFIED;
int nTotalMS = AudioSystem.NOT_SPECIFIED;
boolean nVBR = false;
AudioFormat.Encoding encoding = null;
try {
Bitstream m_bitstream = new Bitstream(pis);
aff_properties.put("mp3.header.pos", new Integer(m_bitstream.header_pos()));
Header m_header = m_bitstream.readFrame();
if (m_header == null) {
throw new UnsupportedAudioFileException("Unable to read mp3 header");
}
// nVersion = 0 => MPEG2-LSF (Including MPEG2.5), nVersion = 1 => MPEG1
nVersion = m_header.version();
if (nVersion == 2)
aff_properties.put("mp3.version.mpeg", Float.toString(2.5f));
else
aff_properties.put("mp3.version.mpeg", Integer.toString(2 - nVersion));
// nLayer = 1,2,3
nLayer = m_header.layer();
aff_properties.put("mp3.version.layer", Integer.toString(nLayer));
// nSFIndex = m_header.sample_frequency();
nMode = m_header.mode();
aff_properties.put("mp3.mode", new Integer(nMode));
nChannels = nMode == 3 ? 1 : 2;
aff_properties.put("mp3.channels", new Integer(nChannels));
nVBR = m_header.vbr();
af_properties.put("vbr", new Boolean(nVBR));
aff_properties.put("mp3.vbr", new Boolean(nVBR));
aff_properties.put("mp3.vbr.scale", new Integer(m_header.vbr_scale()));
FrameSize = m_header.calculate_framesize();
aff_properties.put("mp3.framesize.bytes", new Integer(FrameSize));
if (FrameSize < 0) {
throw new UnsupportedAudioFileException("Invalid FrameSize : " + FrameSize);
}
nFrequency = m_header.frequency();
aff_properties.put("mp3.frequency.hz", new Integer(nFrequency));
FrameRate = (float) ((1.0 / (m_header.ms_per_frame())) * 1000.0);
aff_properties.put("mp3.framerate.fps", new Float(FrameRate));
if (FrameRate < 0) {
throw new UnsupportedAudioFileException("Invalid FrameRate : " + FrameRate);
}
if (mLength != AudioSystem.NOT_SPECIFIED) {
aff_properties.put("mp3.length.bytes", new Integer(mLength));
nTotalFrames = m_header.max_number_of_frames(mLength);
aff_properties.put("mp3.length.frames", new Integer(nTotalFrames));
}
BitRate = m_header.bitrate();
af_properties.put("bitrate", new Integer(BitRate));
aff_properties.put("mp3.bitrate.nominal.bps", new Integer(BitRate));
nHeader = m_header.getSyncHeader();
encoding = sm_aEncodings[nVersion][nLayer - 1];
aff_properties.put("mp3.version.encoding", encoding.toString());
if (mLength != AudioSystem.NOT_SPECIFIED) {
nTotalMS = Math.round(m_header.total_ms(mLength));
aff_properties.put("duration", new Long((long) nTotalMS * 1000L));
}
aff_properties.put("mp3.copyright", new Boolean(m_header.copyright()));
aff_properties.put("mp3.original", new Boolean(m_header.original()));
aff_properties.put("mp3.crc", new Boolean(m_header.checksums()));
aff_properties.put("mp3.padding", new Boolean(m_header.padding()));
InputStream id3v2 = m_bitstream.getRawID3v2();
if (id3v2 != null) {
aff_properties.put("mp3.id3tag.v2", id3v2);
parseID3v2Frames(id3v2, aff_properties);
}
if (TDebug.TraceAudioFileReader)
TDebug.out(m_header.toString());
} catch (Exception e) {
system.debug("not a MPEG stream: " + e.toString());
throw new UnsupportedAudioFileException("not a MPEG stream: " + e.toString());
}
// Deeper checks ?
int cVersion = (nHeader >> 19) & 0x3;
if (cVersion == 1) {
system.debug("not a MPEG stream: wrong version");
throw new UnsupportedAudioFileException("not a MPEG stream: wrong version");
}
int cSFIndex = (nHeader >> 10) & 0x3;
if (cSFIndex == 3) {
system.debug("not a MPEG stream: wrong sampling rate");
throw new UnsupportedAudioFileException("not a MPEG stream: wrong sampling rate");
}
// Look up for ID3v1 tag
if ((size == mediaLength) && (mediaLength != AudioSystem.NOT_SPECIFIED)) {
// FileInputStream fis = (FileInputStream) inputStream;
byte[] id3v1 = new byte[128];
int toSkip = inputStream.available() - id3v1.length;
if (toSkip > 0) {
inputStream.skip(inputStream.available() - id3v1.length);
}
inputStream.read(id3v1, 0, id3v1.length);
if ((id3v1[0] == 'T') && (id3v1[1] == 'A') && (id3v1[2] == 'G')) {
parseID3v1Frames(id3v1, aff_properties);
}
}
AudioFormat format = new MpegAudioFormat(encoding, (float) nFrequency, // SampleSizeInBits
AudioSystem.NOT_SPECIFIED, // Channels - The
nChannels, // The number of bytes in
-1, // FrameRate - The
FrameRate, // second
true, af_properties);
return new MpegAudioFileFormat(MpegFileFormatType.MP3, format, nTotalFrames, mLength, aff_properties);
}
use of javazoom.spi.mpeg.sampled.file.MpegAudioFormat in project Minim by ddf.
the class JSMinim method getAudioSample.
public AudioSample getAudioSample(String filename, int bufferSize) {
AudioInputStream ais = getAudioInputStream(filename);
if (ais != null) {
AudioMetaData meta = null;
AudioFormat format = ais.getFormat();
FloatSampleBuffer samples = null;
if (format instanceof MpegAudioFormat) {
AudioFormat baseFormat = format;
format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false);
// converts the stream to PCM audio from mp3 audio
ais = getAudioInputStream(format, ais);
// get a map of properties so we can find out how long it is
Map<String, Object> props = getID3Tags(filename);
// there is a property called mp3.length.bytes, but that is
// the length in bytes of the mp3 file, which will of course
// be much shorter than the decoded version. so we use the
// duration of the file to figure out how many bytes the
// decoded file will be.
long dur = ((Long) props.get("duration")).longValue();
int toRead = (int) AudioUtils.millis2Bytes(dur / 1000, format);
samples = loadFloatAudio(ais, toRead);
meta = new MP3MetaData(filename, dur / 1000, props);
} else {
samples = loadFloatAudio(ais, (int) ais.getFrameLength() * format.getFrameSize());
long length = AudioUtils.frames2Millis(samples.getSampleCount(), format);
meta = new BasicMetaData(filename, length, samples.getSampleCount());
}
AudioOut out = getAudioOutput(format.getChannels(), bufferSize, format.getSampleRate(), format.getSampleSizeInBits());
if (out != null) {
SampleSignal ssig = new SampleSignal(samples);
out.setAudioSignal(ssig);
return new JSAudioSample(meta, ssig, out);
} else {
error("Couldn't acquire an output.");
}
}
return null;
}
use of javazoom.spi.mpeg.sampled.file.MpegAudioFormat in project Minim by ddf.
the class JSMinim method getAudioRecordingStream.
public AudioRecordingStream getAudioRecordingStream(String filename, int bufferSize, boolean inMemory) {
// TODO: deal with the case of wanting to have the file fully in memory
AudioRecordingStream mstream = null;
AudioInputStream ais = getAudioInputStream(filename);
if (ais != null) {
if (inMemory && ais.markSupported()) {
ais.mark((int) ais.getFrameLength() * ais.getFormat().getFrameSize());
}
debug("Reading from " + ais.getClass().toString());
debug("File format is: " + ais.getFormat().toString());
AudioFormat format = ais.getFormat();
// they need to be converted to PCM
if (format instanceof MpegAudioFormat) {
AudioFormat baseFormat = format;
format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false);
// converts the stream to PCM audio from mp3 audio
AudioInputStream decAis = getAudioInputStream(format, ais);
// source data line is for sending the file audio out to the
// speakers
SourceDataLine line = getSourceDataLine(format, bufferSize);
if (decAis != null && line != null) {
Map<String, Object> props = getID3Tags(filename);
long lengthInMillis = -1;
if (props.containsKey("duration")) {
Long dur = (Long) props.get("duration");
if (dur.longValue() > 0) {
lengthInMillis = dur.longValue() / 1000;
}
}
MP3MetaData meta = new MP3MetaData(filename, lengthInMillis, props);
mstream = new JSMPEGAudioRecordingStream(this, meta, ais, decAis, line, bufferSize);
}
} else // format instanceof MpegAudioFormat
{
// source data line is for sending the file audio out to the
// speakers
SourceDataLine line = getSourceDataLine(format, bufferSize);
if (line != null) {
long length = AudioUtils.frames2Millis(ais.getFrameLength(), format);
BasicMetaData meta = new BasicMetaData(filename, length, ais.getFrameLength());
mstream = new JSPCMAudioRecordingStream(this, meta, ais, line, bufferSize);
}
}
// else
}
// ais != null
return mstream;
}
use of javazoom.spi.mpeg.sampled.file.MpegAudioFormat in project Minim by ddf.
the class JSMinim method getAudioRecording.
/** @deprecated */
public AudioRecording getAudioRecording(String filename) {
AudioMetaData meta = null;
AudioInputStream ais = getAudioInputStream(filename);
byte[] samples;
if (ais != null) {
AudioFormat format = ais.getFormat();
if (format instanceof MpegAudioFormat) {
AudioFormat baseFormat = format;
format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false);
// converts the stream to PCM audio from mp3 audio
ais = getAudioInputStream(format, ais);
// get a map of properties so we can find out how long it is
Map<String, Object> props = getID3Tags(filename);
// there is a property called mp3.length.bytes, but that is
// the length in bytes of the mp3 file, which will of course
// be much shorter than the decoded version. so we use the
// duration of the file to figure out how many bytes the
// decoded file will be.
long dur = ((Long) props.get("duration")).longValue();
int toRead = (int) AudioUtils.millis2Bytes(dur / 1000, format);
samples = loadByteAudio(ais, toRead);
meta = new MP3MetaData(filename, dur / 1000, props);
} else {
samples = loadByteAudio(ais, (int) ais.getFrameLength() * format.getFrameSize());
long length = AudioUtils.bytes2Millis(samples.length, format);
meta = new BasicMetaData(filename, length, samples.length);
}
SourceDataLine line = getSourceDataLine(format, 2048);
if (line != null) {
return new JSAudioRecording(this, samples, line, meta);
}
}
return null;
}
use of javazoom.spi.mpeg.sampled.file.MpegAudioFormat in project Minim by ddf.
the class JSMinim method getAudioRecordingClip.
/** @deprecated */
public AudioRecording getAudioRecordingClip(String filename) {
Clip clip = null;
AudioMetaData meta = null;
AudioInputStream ais = getAudioInputStream(filename);
if (ais != null) {
AudioFormat format = ais.getFormat();
if (format instanceof MpegAudioFormat) {
AudioFormat baseFormat = format;
format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false);
// converts the stream to PCM audio from mp3 audio
ais = getAudioInputStream(format, ais);
}
DataLine.Info info = new DataLine.Info(Clip.class, ais.getFormat());
if (AudioSystem.isLineSupported(info)) {
// Obtain and open the line.
try {
clip = (Clip) AudioSystem.getLine(info);
clip.open(ais);
} catch (Exception e) {
error("Error obtaining Javasound Clip: " + e.getMessage());
return null;
}
Map<String, Object> props = getID3Tags(filename);
long lengthInMillis = -1;
if (props.containsKey("duration")) {
Long dur = (Long) props.get("duration");
lengthInMillis = dur.longValue() / 1000;
}
meta = new MP3MetaData(filename, lengthInMillis, props);
} else {
error("File format not supported.");
return null;
}
}
if (meta == null) {
// this means we're dealing with not-an-mp3
meta = new BasicMetaData(filename, clip.getMicrosecondLength() / 1000, -1);
}
return new JSAudioRecordingClip(clip, meta);
}
Aggregations