use of javax.sound.sampled.UnsupportedAudioFileException in project JMRI by JMRI.
the class JavaSoundAudioBuffer method loadBuffer.
@Override
protected boolean loadBuffer() {
if (!initialised) {
return false;
}
// Reinitialise
init();
// Retrieve filename of specified .wav file
File file = new File(FileUtil.getExternalFilename(this.getURL()));
// Create the input stream for the audio file
try {
audioInputStream = AudioSystem.getAudioInputStream(file);
} catch (UnsupportedAudioFileException ex) {
log.error("Unsupported audio file format when loading buffer:" + ex);
return false;
} catch (IOException ex) {
log.error("Error loading buffer:" + ex);
return false;
}
return (this.processBuffer());
}
use of javax.sound.sampled.UnsupportedAudioFileException in project jdk8u_jdk by JetBrains.
the class WaveExtensibleFileReader method internal_getAudioFileFormat.
private AudioFileFormat internal_getAudioFileFormat(InputStream stream) throws UnsupportedAudioFileException, IOException {
RIFFReader riffiterator = new RIFFReader(stream);
if (!riffiterator.getFormat().equals("RIFF"))
throw new UnsupportedAudioFileException();
if (!riffiterator.getType().equals("WAVE"))
throw new UnsupportedAudioFileException();
boolean fmt_found = false;
boolean data_found = false;
int channels = 1;
long samplerate = 1;
// long framerate = 1;
int framesize = 1;
int bits = 1;
int validBitsPerSample = 1;
long channelMask = 0;
GUID subFormat = null;
while (riffiterator.hasNextChunk()) {
RIFFReader chunk = riffiterator.nextChunk();
if (chunk.getFormat().equals("fmt ")) {
fmt_found = true;
int format = chunk.readUnsignedShort();
if (format != 0xFFFE)
// WAVE_FORMAT_EXTENSIBLE
throw new UnsupportedAudioFileException();
// only
channels = chunk.readUnsignedShort();
samplerate = chunk.readUnsignedInt();
/* framerate = */
chunk.readUnsignedInt();
framesize = chunk.readUnsignedShort();
bits = chunk.readUnsignedShort();
int cbSize = chunk.readUnsignedShort();
if (cbSize != 22)
throw new UnsupportedAudioFileException();
validBitsPerSample = chunk.readUnsignedShort();
if (validBitsPerSample > bits)
throw new UnsupportedAudioFileException();
channelMask = chunk.readUnsignedInt();
subFormat = GUID.read(chunk);
}
if (chunk.getFormat().equals("data")) {
data_found = true;
break;
}
}
if (!fmt_found)
throw new UnsupportedAudioFileException();
if (!data_found)
throw new UnsupportedAudioFileException();
Map<String, Object> p = new HashMap<String, Object>();
String s_channelmask = decodeChannelMask(channelMask);
if (s_channelmask != null)
p.put("channelOrder", s_channelmask);
if (channelMask != 0)
p.put("channelMask", channelMask);
// validBitsPerSample is only informational for PCM data,
// data is still encode according to SampleSizeInBits.
p.put("validBitsPerSample", validBitsPerSample);
AudioFormat audioformat = null;
if (subFormat.equals(SUBTYPE_PCM)) {
if (bits == 8) {
audioformat = new AudioFormat(Encoding.PCM_UNSIGNED, samplerate, bits, channels, framesize, samplerate, false, p);
} else {
audioformat = new AudioFormat(Encoding.PCM_SIGNED, samplerate, bits, channels, framesize, samplerate, false, p);
}
} else if (subFormat.equals(SUBTYPE_IEEE_FLOAT)) {
audioformat = new AudioFormat(Encoding.PCM_FLOAT, samplerate, bits, channels, framesize, samplerate, false, p);
} else
throw new UnsupportedAudioFileException();
AudioFileFormat fileformat = new AudioFileFormat(AudioFileFormat.Type.WAVE, audioformat, AudioSystem.NOT_SPECIFIED);
return fileformat;
}
use of javax.sound.sampled.UnsupportedAudioFileException in project jdk8u_jdk by JetBrains.
the class WaveExtensibleFileReader method getAudioInputStream.
public AudioInputStream getAudioInputStream(InputStream stream) throws UnsupportedAudioFileException, IOException {
AudioFileFormat format = getAudioFileFormat(stream);
RIFFReader riffiterator = new RIFFReader(stream);
if (!riffiterator.getFormat().equals("RIFF"))
throw new UnsupportedAudioFileException();
if (!riffiterator.getType().equals("WAVE"))
throw new UnsupportedAudioFileException();
while (riffiterator.hasNextChunk()) {
RIFFReader chunk = riffiterator.nextChunk();
if (chunk.getFormat().equals("data")) {
return new AudioInputStream(chunk, format.getFormat(), chunk.getSize());
}
}
throw new UnsupportedAudioFileException();
}
use of javax.sound.sampled.UnsupportedAudioFileException in project jdk8u_jdk by JetBrains.
the class SoftMidiAudioFileReader method getAudioFileFormat.
public AudioFileFormat getAudioFileFormat(InputStream inputstream) throws UnsupportedAudioFileException, IOException {
inputstream.mark(200);
Sequence seq;
try {
seq = MidiSystem.getSequence(inputstream);
} catch (InvalidMidiDataException e) {
inputstream.reset();
throw new UnsupportedAudioFileException();
} catch (IOException e) {
inputstream.reset();
throw new UnsupportedAudioFileException();
}
return getAudioFileFormat(seq);
}
use of javax.sound.sampled.UnsupportedAudioFileException in project jdk8u_jdk by JetBrains.
the class WaveFloatFileReader method internal_getAudioFileFormat.
private AudioFileFormat internal_getAudioFileFormat(InputStream stream) throws UnsupportedAudioFileException, IOException {
RIFFReader riffiterator = new RIFFReader(stream);
if (!riffiterator.getFormat().equals("RIFF"))
throw new UnsupportedAudioFileException();
if (!riffiterator.getType().equals("WAVE"))
throw new UnsupportedAudioFileException();
boolean fmt_found = false;
boolean data_found = false;
int channels = 1;
long samplerate = 1;
int framesize = 1;
int bits = 1;
while (riffiterator.hasNextChunk()) {
RIFFReader chunk = riffiterator.nextChunk();
if (chunk.getFormat().equals("fmt ")) {
fmt_found = true;
int format = chunk.readUnsignedShort();
if (// WAVE_FORMAT_IEEE_FLOAT only
format != 3)
throw new UnsupportedAudioFileException();
channels = chunk.readUnsignedShort();
samplerate = chunk.readUnsignedInt();
/* framerate = */
chunk.readUnsignedInt();
framesize = chunk.readUnsignedShort();
bits = chunk.readUnsignedShort();
}
if (chunk.getFormat().equals("data")) {
data_found = true;
break;
}
}
if (!fmt_found)
throw new UnsupportedAudioFileException();
if (!data_found)
throw new UnsupportedAudioFileException();
AudioFormat audioformat = new AudioFormat(Encoding.PCM_FLOAT, samplerate, bits, channels, framesize, samplerate, false);
AudioFileFormat fileformat = new AudioFileFormat(AudioFileFormat.Type.WAVE, audioformat, AudioSystem.NOT_SPECIFIED);
return fileformat;
}
Aggregations