use of javax.sound.sampled.UnsupportedAudioFileException in project jdk8u_jdk by JetBrains.
the class AudioFileSoundbankReader method getSoundbank.
public Soundbank getSoundbank(URL url) throws InvalidMidiDataException, IOException {
try {
AudioInputStream ais = AudioSystem.getAudioInputStream(url);
Soundbank sbk = getSoundbank(ais);
ais.close();
return sbk;
} catch (UnsupportedAudioFileException e) {
return null;
} catch (IOException e) {
return null;
}
}
use of javax.sound.sampled.UnsupportedAudioFileException in project jdk8u_jdk by JetBrains.
the class SoftMidiAudioFileReader method getAudioInputStream.
public AudioInputStream getAudioInputStream(Sequence seq) throws UnsupportedAudioFileException, IOException {
AudioSynthesizer synth = (AudioSynthesizer) new SoftSynthesizer();
AudioInputStream stream;
Receiver recv;
try {
stream = synth.openStream(format, null);
recv = synth.getReceiver();
} catch (MidiUnavailableException e) {
throw new IOException(e.toString());
}
float divtype = seq.getDivisionType();
Track[] tracks = seq.getTracks();
int[] trackspos = new int[tracks.length];
int mpq = 500000;
int seqres = seq.getResolution();
long lasttick = 0;
long curtime = 0;
while (true) {
MidiEvent selevent = null;
int seltrack = -1;
for (int i = 0; i < tracks.length; i++) {
int trackpos = trackspos[i];
Track track = tracks[i];
if (trackpos < track.size()) {
MidiEvent event = track.get(trackpos);
if (selevent == null || event.getTick() < selevent.getTick()) {
selevent = event;
seltrack = i;
}
}
}
if (seltrack == -1)
break;
trackspos[seltrack]++;
long tick = selevent.getTick();
if (divtype == Sequence.PPQ)
curtime += ((tick - lasttick) * mpq) / seqres;
else
curtime = (long) ((tick * 1000000.0 * divtype) / seqres);
lasttick = tick;
MidiMessage msg = selevent.getMessage();
if (msg instanceof MetaMessage) {
if (divtype == Sequence.PPQ) {
if (((MetaMessage) msg).getType() == 0x51) {
byte[] data = ((MetaMessage) msg).getData();
if (data.length < 3) {
throw new UnsupportedAudioFileException();
}
mpq = ((data[0] & 0xff) << 16) | ((data[1] & 0xff) << 8) | (data[2] & 0xff);
}
}
} else {
recv.send(msg, curtime);
}
}
long totallen = curtime / 1000000;
long len = (long) (stream.getFormat().getFrameRate() * (totallen + 4));
stream = new AudioInputStream(stream, stream.getFormat(), len);
return stream;
}
use of javax.sound.sampled.UnsupportedAudioFileException in project jdk8u_jdk by JetBrains.
the class SoftMidiAudioFileReader method getAudioInputStream.
public AudioInputStream getAudioInputStream(InputStream inputstream) throws UnsupportedAudioFileException, IOException {
inputstream.mark(200);
Sequence seq;
try {
seq = MidiSystem.getSequence(inputstream);
} catch (InvalidMidiDataException e) {
inputstream.reset();
throw new UnsupportedAudioFileException();
} catch (IOException e) {
inputstream.reset();
throw new UnsupportedAudioFileException();
}
return getAudioInputStream(seq);
}
use of javax.sound.sampled.UnsupportedAudioFileException in project jdk8u_jdk by JetBrains.
the class WaveFileReader method getFMT.
//--------------------------------------------------------------------
private AudioFileFormat getFMT(InputStream stream, boolean doReset) throws UnsupportedAudioFileException, IOException {
// assumes sream is rewound
int bytesRead;
int nread = 0;
int fmt;
int length = 0;
int wav_type = 0;
short channels;
long sampleRate;
long avgBytesPerSec;
short blockAlign;
int sampleSizeInBits;
AudioFormat.Encoding encoding = null;
DataInputStream dis = new DataInputStream(stream);
if (doReset) {
dis.mark(MAX_READ_LENGTH);
}
int magic = dis.readInt();
int fileLength = rllong(dis);
int waveMagic = dis.readInt();
int totallength;
if (fileLength <= 0) {
fileLength = AudioSystem.NOT_SPECIFIED;
totallength = AudioSystem.NOT_SPECIFIED;
} else {
totallength = fileLength + 8;
}
if ((magic != WaveFileFormat.RIFF_MAGIC) || (waveMagic != WaveFileFormat.WAVE_MAGIC)) {
// not WAVE, throw UnsupportedAudioFileException
if (doReset) {
dis.reset();
}
throw new UnsupportedAudioFileException("not a WAVE file");
}
// we break out of this loop either by hitting EOF or finding "fmt "
while (true) {
try {
fmt = dis.readInt();
nread += 4;
if (fmt == WaveFileFormat.FMT_MAGIC) {
// we've found the 'fmt' chunk
break;
} else {
// else not 'fmt', skip this chunk
length = rllong(dis);
nread += 4;
if (length % 2 > 0)
length++;
nread += dis.skipBytes(length);
}
} catch (EOFException eof) {
// we've reached the end of the file without finding the 'fmt' chunk
throw new UnsupportedAudioFileException("Not a valid WAV file");
}
}
// Read the format chunk size.
length = rllong(dis);
nread += 4;
// This is the nread position at the end of the format chunk
int endLength = nread + length;
// Read the wave format data out of the format chunk.
// encoding.
wav_type = rlshort(dis);
nread += 2;
if (wav_type == WaveFileFormat.WAVE_FORMAT_PCM)
// if 8-bit, we need PCM_UNSIGNED, below...
encoding = AudioFormat.Encoding.PCM_SIGNED;
else if (wav_type == WaveFileFormat.WAVE_FORMAT_ALAW)
encoding = AudioFormat.Encoding.ALAW;
else if (wav_type == WaveFileFormat.WAVE_FORMAT_MULAW)
encoding = AudioFormat.Encoding.ULAW;
else {
// we don't support any other WAVE formats....
throw new UnsupportedAudioFileException("Not a supported WAV file");
}
// channels
channels = rlshort(dis);
nread += 2;
if (channels <= 0) {
throw new UnsupportedAudioFileException("Invalid number of channels");
}
// sample rate.
sampleRate = rllong(dis);
nread += 4;
// this is the avgBytesPerSec
avgBytesPerSec = rllong(dis);
nread += 4;
// this is blockAlign value
blockAlign = rlshort(dis);
nread += 2;
// this is the PCM-specific value bitsPerSample
sampleSizeInBits = (int) rlshort(dis);
nread += 2;
if (sampleSizeInBits <= 0) {
throw new UnsupportedAudioFileException("Invalid bitsPerSample");
}
// if sampleSizeInBits==8, we need to use PCM_UNSIGNED
if ((sampleSizeInBits == 8) && encoding.equals(AudioFormat.Encoding.PCM_SIGNED))
encoding = AudioFormat.Encoding.PCM_UNSIGNED;
if (length % 2 != 0)
length += 1;
// This fixes #4257986
if (endLength > nread)
nread += dis.skipBytes(endLength - nread);
// we have a format now, so find the "data" chunk
// we break out of this loop either by hitting EOF or finding "data"
// $$kk: if "data" chunk precedes "fmt" chunk we are hosed -- can this legally happen?
nread = 0;
while (true) {
try {
int datahdr = dis.readInt();
nread += 4;
if (datahdr == WaveFileFormat.DATA_MAGIC) {
// we've found the 'data' chunk
break;
} else {
// else not 'data', skip this chunk
int thisLength = rllong(dis);
nread += 4;
if (thisLength % 2 > 0)
thisLength++;
nread += dis.skipBytes(thisLength);
}
} catch (EOFException eof) {
// we've reached the end of the file without finding the 'data' chunk
throw new UnsupportedAudioFileException("Not a valid WAV file");
}
}
// this is the length of the data chunk
int dataLength = rllong(dis);
nread += 4;
// now build the new AudioFileFormat and return
AudioFormat format = new AudioFormat(encoding, (float) sampleRate, sampleSizeInBits, channels, calculatePCMFrameSize(sampleSizeInBits, channels), (float) sampleRate, false);
return new WaveFileFormat(AudioFileFormat.Type.WAVE, totallength, format, dataLength / format.getFrameSize());
}
use of javax.sound.sampled.UnsupportedAudioFileException in project tika by apache.
the class AudioParser method parse.
public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException {
// AudioSystem expects the stream to support the mark feature
if (!stream.markSupported()) {
stream = new BufferedInputStream(stream);
}
try {
AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(stream);
Type type = fileFormat.getType();
if (type == Type.AIFC || type == Type.AIFF) {
metadata.set(Metadata.CONTENT_TYPE, "audio/x-aiff");
} else if (type == Type.AU || type == Type.SND) {
metadata.set(Metadata.CONTENT_TYPE, "audio/basic");
} else if (type == Type.WAVE) {
metadata.set(Metadata.CONTENT_TYPE, "audio/x-wav");
}
AudioFormat audioFormat = fileFormat.getFormat();
int channels = audioFormat.getChannels();
if (channels != AudioSystem.NOT_SPECIFIED) {
metadata.set("channels", String.valueOf(channels));
// TODO: Use XMPDM.TRACKS? (see also frame rate in AudioFormat)
}
float rate = audioFormat.getSampleRate();
if (rate != AudioSystem.NOT_SPECIFIED) {
metadata.set("samplerate", String.valueOf(rate));
metadata.set(XMPDM.AUDIO_SAMPLE_RATE, Integer.toString((int) rate));
}
int bits = audioFormat.getSampleSizeInBits();
if (bits != AudioSystem.NOT_SPECIFIED) {
metadata.set("bits", String.valueOf(bits));
if (bits == 8) {
metadata.set(XMPDM.AUDIO_SAMPLE_TYPE, "8Int");
} else if (bits == 16) {
metadata.set(XMPDM.AUDIO_SAMPLE_TYPE, "16Int");
} else if (bits == 32) {
metadata.set(XMPDM.AUDIO_SAMPLE_TYPE, "32Int");
}
}
metadata.set("encoding", audioFormat.getEncoding().toString());
// Javadoc suggests that some of the following properties might
// be available, but I had no success in finding any:
// "duration" Long playback duration of the file in microseconds
// "author" String name of the author of this file
// "title" String title of this file
// "copyright" String copyright message
// "date" Date date of the recording or release
// "comment" String an arbitrary text
addMetadata(metadata, fileFormat.properties());
addMetadata(metadata, audioFormat.properties());
} catch (UnsupportedAudioFileException e) {
// There is no way to know whether this exception was
// caused by the document being corrupted or by the format
// just being unsupported. So we do nothing.
}
XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata);
xhtml.startDocument();
xhtml.endDocument();
}
Aggregations