use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class WaveExtensibleFileReader method getAudioInputStream.
public AudioInputStream getAudioInputStream(InputStream stream) throws UnsupportedAudioFileException, IOException {
AudioFileFormat format = getAudioFileFormat(stream);
RIFFReader riffiterator = new RIFFReader(stream);
if (!riffiterator.getFormat().equals("RIFF"))
throw new UnsupportedAudioFileException();
if (!riffiterator.getType().equals("WAVE"))
throw new UnsupportedAudioFileException();
while (riffiterator.hasNextChunk()) {
RIFFReader chunk = riffiterator.nextChunk();
if (chunk.getFormat().equals("data")) {
return new AudioInputStream(chunk, format.getFormat(), chunk.getSize());
}
}
throw new UnsupportedAudioFileException();
}
use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class SoftMixingMixer method close.
public void close() {
if (!isOpen())
return;
sendEvent(new LineEvent(this, LineEvent.Type.CLOSE, AudioSystem.NOT_SPECIFIED));
SoftAudioPusher pusher_to_be_closed = null;
AudioInputStream pusher_stream_to_be_closed = null;
synchronized (control_mutex) {
if (pusher != null) {
pusher_to_be_closed = pusher;
pusher_stream_to_be_closed = pusher_stream;
pusher = null;
pusher_stream = null;
}
}
if (pusher_to_be_closed != null) {
// Pusher must not be closed synchronized against control_mutex
// this may result in synchronized conflict between pusher and
// current thread.
pusher_to_be_closed.stop();
try {
pusher_stream_to_be_closed.close();
} catch (IOException e) {
e.printStackTrace();
}
}
synchronized (control_mutex) {
if (mainmixer != null)
mainmixer.close();
open = false;
if (sourceDataLine != null) {
sourceDataLine.drain();
sourceDataLine.close();
sourceDataLine = null;
}
}
}
use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class PCMtoPCMCodec method getConvertedStream.
// OLD CODE
/**
* Opens the codec with the specified parameters.
* @param stream stream from which data to be processed should be read
* @param outputFormat desired data format of the stream after processing
* @return stream from which processed data may be read
* @throws IllegalArgumentException if the format combination supplied is
* not supported.
*/
/* public AudioInputStream getConvertedStream(AudioFormat outputFormat, AudioInputStream stream) {*/
private AudioInputStream getConvertedStream(AudioFormat outputFormat, AudioInputStream stream) {
AudioInputStream cs = null;
AudioFormat inputFormat = stream.getFormat();
if (inputFormat.matches(outputFormat)) {
cs = stream;
} else {
cs = (AudioInputStream) (new PCMtoPCMCodecStream(stream, outputFormat));
tempBuffer = new byte[tempBufferSize];
}
return cs;
}
use of javax.sound.sampled.AudioInputStream in project Minim by ddf.
the class JSStreamingSampleRecorder method save.
/**
* Finishes the recording process by closing the file.
*/
public AudioRecordingStream save() {
try {
aos.close();
} catch (IOException e) {
Minim.error("AudioRecorder.save: An error occurred when trying to save the file:\n" + e.getMessage());
}
String filePath = filePath();
AudioInputStream ais = system.getAudioInputStream(filePath);
SourceDataLine sdl = system.getSourceDataLine(ais.getFormat(), 1024);
// this is fine because the recording will always be
// in a raw format (WAV, AU, etc).
long length = AudioUtils.frames2Millis(ais.getFrameLength(), format);
BasicMetaData meta = new BasicMetaData(filePath, length, ais.getFrameLength());
JSPCMAudioRecordingStream recording = new JSPCMAudioRecordingStream(system, meta, ais, sdl, 1024);
return recording;
}
use of javax.sound.sampled.AudioInputStream in project Minim by ddf.
the class JSMinim method getAudioRecordingStream.
public AudioRecordingStream getAudioRecordingStream(String filename, int bufferSize, boolean inMemory) {
// TODO: deal with the case of wanting to have the file fully in memory
AudioRecordingStream mstream = null;
AudioInputStream ais = getAudioInputStream(filename);
if (ais != null) {
if (inMemory && ais.markSupported()) {
ais.mark((int) ais.getFrameLength() * ais.getFormat().getFrameSize());
}
debug("Reading from " + ais.getClass().toString());
debug("File format is: " + ais.getFormat().toString());
AudioFormat format = ais.getFormat();
// they need to be converted to PCM
if (format instanceof MpegAudioFormat) {
AudioFormat baseFormat = format;
format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false);
// converts the stream to PCM audio from mp3 audio
AudioInputStream decAis = getAudioInputStream(format, ais);
// source data line is for sending the file audio out to the
// speakers
SourceDataLine line = getSourceDataLine(format, bufferSize);
if (decAis != null && line != null) {
Map<String, Object> props = getID3Tags(filename);
long lengthInMillis = -1;
if (props.containsKey("duration")) {
Long dur = (Long) props.get("duration");
if (dur.longValue() > 0) {
lengthInMillis = dur.longValue() / 1000;
}
}
MP3MetaData meta = new MP3MetaData(filename, lengthInMillis, props);
mstream = new JSMPEGAudioRecordingStream(this, meta, ais, decAis, line, bufferSize);
}
} else // format instanceof MpegAudioFormat
{
// source data line is for sending the file audio out to the
// speakers
SourceDataLine line = getSourceDataLine(format, bufferSize);
if (line != null) {
long length = AudioUtils.frames2Millis(ais.getFrameLength(), format);
BasicMetaData meta = new BasicMetaData(filename, length, ais.getFrameLength());
mstream = new JSPCMAudioRecordingStream(this, meta, ais, line, bufferSize);
}
}
// else
}
// ais != null
return mstream;
}
Aggregations