use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class AiffFileWriter method getFileStream.
private InputStream getFileStream(AiffFileFormat aiffFileFormat, InputStream audioStream) throws IOException {
// private method ... assumes aiffFileFormat is a supported file format
AudioFormat format = aiffFileFormat.getFormat();
AudioFormat streamFormat = null;
AudioFormat.Encoding encoding = null;
//$$fb a little bit nicer handling of constants
//int headerSize = 54;
int headerSize = aiffFileFormat.getHeaderSize();
//int fverChunkSize = 0;
int fverChunkSize = aiffFileFormat.getFverChunkSize();
//int commChunkSize = 26;
int commChunkSize = aiffFileFormat.getCommChunkSize();
int aiffLength = -1;
int ssndChunkSize = -1;
//int ssndOffset = headerSize - 16;
int ssndOffset = aiffFileFormat.getSsndChunkOffset();
short channels = (short) format.getChannels();
short sampleSize = (short) format.getSampleSizeInBits();
int ssndBlockSize = (channels * sampleSize);
int numFrames = aiffFileFormat.getFrameLength();
long dataSize = -1;
if (numFrames != AudioSystem.NOT_SPECIFIED) {
dataSize = (long) numFrames * ssndBlockSize / 8;
ssndChunkSize = (int) dataSize + 16;
aiffLength = (int) dataSize + headerSize;
}
float sampleFramesPerSecond = format.getSampleRate();
int compCode = AiffFileFormat.AIFC_PCM;
byte[] header = null;
ByteArrayInputStream headerStream = null;
ByteArrayOutputStream baos = null;
DataOutputStream dos = null;
SequenceInputStream aiffStream = null;
InputStream codedAudioStream = audioStream;
if (audioStream instanceof AudioInputStream) {
streamFormat = ((AudioInputStream) audioStream).getFormat();
encoding = streamFormat.getEncoding();
// $$jb: Note that AIFF samples are ALWAYS signed
if ((AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding)) || ((AudioFormat.Encoding.PCM_SIGNED.equals(encoding)) && !streamFormat.isBigEndian())) {
// plug in the transcoder to convert to PCM_SIGNED. big endian
codedAudioStream = AudioSystem.getAudioInputStream(new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, streamFormat.getSampleRate(), streamFormat.getSampleSizeInBits(), streamFormat.getChannels(), streamFormat.getFrameSize(), streamFormat.getFrameRate(), true), (AudioInputStream) audioStream);
} else if ((AudioFormat.Encoding.ULAW.equals(encoding)) || (AudioFormat.Encoding.ALAW.equals(encoding))) {
if (streamFormat.getSampleSizeInBits() != 8) {
throw new IllegalArgumentException("unsupported encoding");
}
//$$fb 2001-07-13: this is probably not what we want:
// writing PCM when ULAW/ALAW is requested. AIFC is able to write ULAW !
// plug in the transcoder to convert to PCM_SIGNED_BIG_ENDIAN
codedAudioStream = AudioSystem.getAudioInputStream(new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, streamFormat.getSampleRate(), streamFormat.getSampleSizeInBits() * 2, streamFormat.getChannels(), streamFormat.getFrameSize() * 2, streamFormat.getFrameRate(), true), (AudioInputStream) audioStream);
}
}
// Now create an AIFF stream header...
baos = new ByteArrayOutputStream();
dos = new DataOutputStream(baos);
// Write the outer FORM chunk
dos.writeInt(AiffFileFormat.AIFF_MAGIC);
dos.writeInt((aiffLength - 8));
dos.writeInt(AiffFileFormat.AIFF_MAGIC2);
// Write a FVER chunk - only for AIFC
//dos.writeInt(FVER_MAGIC);
//dos.writeInt( (fverChunkSize-8) );
//dos.writeInt(FVER_TIMESTAMP);
// Write a COMM chunk
dos.writeInt(AiffFileFormat.COMM_MAGIC);
dos.writeInt((commChunkSize - 8));
dos.writeShort(channels);
dos.writeInt(numFrames);
dos.writeShort(sampleSize);
// 10 bytes
write_ieee_extended(dos, sampleFramesPerSecond);
//Only for AIFC
//dos.writeInt(compCode);
//dos.writeInt(compCode);
//dos.writeShort(0);
// Write the SSND chunk header
dos.writeInt(AiffFileFormat.SSND_MAGIC);
dos.writeInt((ssndChunkSize - 8));
// ssndOffset and ssndBlockSize set to 0 upon
// recommendation in "Sound Manager" chapter in
// "Inside Macintosh Sound", pp 2-87 (from Babu)
// ssndOffset
dos.writeInt(0);
// ssndBlockSize
dos.writeInt(0);
// Concat this with the audioStream and return it
dos.close();
header = baos.toByteArray();
headerStream = new ByteArrayInputStream(header);
aiffStream = new SequenceInputStream(headerStream, new NoCloseInputStream(codedAudioStream));
return aiffStream;
}
use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class AiffFileReader method getAudioInputStream.
/**
* Obtains an audio stream from the File provided. The File must
* point to valid audio file data.
* @param file the File for which the <code>AudioInputStream</code> should be
* constructed
* @return an <code>AudioInputStream</code> object based on the audio file data pointed
* to by the File
* @throws UnsupportedAudioFileException if the File does not point to valid audio
* file data recognized by the system
* @throws IOException if an I/O exception occurs
*/
public AudioInputStream getAudioInputStream(File file) throws UnsupportedAudioFileException, IOException {
// throws IOException
FileInputStream fis = new FileInputStream(file);
AudioFileFormat fileFormat = null;
// part of fix for 4325421
try {
fileFormat = getCOMM(fis, false);
} finally {
if (fileFormat == null) {
fis.close();
}
}
return new AudioInputStream(fis, fileFormat.getFormat(), fileFormat.getFrameLength());
}
use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class AudioFileSoundbankReader method getSoundbank.
public Soundbank getSoundbank(InputStream stream) throws InvalidMidiDataException, IOException {
stream.mark(512);
try {
AudioInputStream ais = AudioSystem.getAudioInputStream(stream);
Soundbank sbk = getSoundbank(ais);
if (sbk != null)
return sbk;
} catch (UnsupportedAudioFileException e) {
} catch (IOException e) {
}
stream.reset();
return null;
}
use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class AudioFileSoundbankReader method getSoundbank.
public Soundbank getSoundbank(URL url) throws InvalidMidiDataException, IOException {
try {
AudioInputStream ais = AudioSystem.getAudioInputStream(url);
Soundbank sbk = getSoundbank(ais);
ais.close();
return sbk;
} catch (UnsupportedAudioFileException e) {
return null;
} catch (IOException e) {
return null;
}
}
use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class SoftMidiAudioFileReader method getAudioInputStream.
public AudioInputStream getAudioInputStream(Sequence seq) throws UnsupportedAudioFileException, IOException {
AudioSynthesizer synth = (AudioSynthesizer) new SoftSynthesizer();
AudioInputStream stream;
Receiver recv;
try {
stream = synth.openStream(format, null);
recv = synth.getReceiver();
} catch (MidiUnavailableException e) {
throw new IOException(e.toString());
}
float divtype = seq.getDivisionType();
Track[] tracks = seq.getTracks();
int[] trackspos = new int[tracks.length];
int mpq = 500000;
int seqres = seq.getResolution();
long lasttick = 0;
long curtime = 0;
while (true) {
MidiEvent selevent = null;
int seltrack = -1;
for (int i = 0; i < tracks.length; i++) {
int trackpos = trackspos[i];
Track track = tracks[i];
if (trackpos < track.size()) {
MidiEvent event = track.get(trackpos);
if (selevent == null || event.getTick() < selevent.getTick()) {
selevent = event;
seltrack = i;
}
}
}
if (seltrack == -1)
break;
trackspos[seltrack]++;
long tick = selevent.getTick();
if (divtype == Sequence.PPQ)
curtime += ((tick - lasttick) * mpq) / seqres;
else
curtime = (long) ((tick * 1000000.0 * divtype) / seqres);
lasttick = tick;
MidiMessage msg = selevent.getMessage();
if (msg instanceof MetaMessage) {
if (divtype == Sequence.PPQ) {
if (((MetaMessage) msg).getType() == 0x51) {
byte[] data = ((MetaMessage) msg).getData();
if (data.length < 3) {
throw new UnsupportedAudioFileException();
}
mpq = ((data[0] & 0xff) << 16) | ((data[1] & 0xff) << 8) | (data[2] & 0xff);
}
}
} else {
recv.send(msg, curtime);
}
}
long totallen = curtime / 1000000;
long len = (long) (stream.getFormat().getFrameRate() * (totallen + 4));
stream = new AudioInputStream(stream, stream.getFormat(), len);
return stream;
}
Aggregations