use of javax.sound.sampled.AudioInputStream in project Minim by ddf.
the class JSBufferedSampleRecorder method save.
/**
* Saves the audio in the internal buffer to a file using the current settings for
* file type and file name.
*/
public AudioRecordingStream save() {
if (isRecording()) {
system.error("You must stop recording before you can write to a file.");
} else {
int channels = format.getChannels();
int length = left.capacity();
int totalSamples = (buffers.size() / channels) * length;
FloatSampleBuffer fsb = new FloatSampleBuffer(channels, totalSamples, format.getSampleRate());
if (channels == 1) {
for (int i = 0; i < buffers.size(); i++) {
int offset = i * length;
FloatBuffer fb = (FloatBuffer) buffers.get(i);
fb.rewind();
// copy all the floats in fb to the first channel
// of fsb, starting at the index offset, and copy
// the whole FloatBuffer over.
fb.get(fsb.getChannel(0), offset, length);
}
} else {
for (int i = 0; i < buffers.size(); i += 2) {
int offset = (i / 2) * length;
FloatBuffer fbL = (FloatBuffer) buffers.get(i);
FloatBuffer fbR = (FloatBuffer) buffers.get(i + 1);
fbL.rewind();
fbL.get(fsb.getChannel(0), offset, length);
fbR.rewind();
fbR.get(fsb.getChannel(1), offset, length);
}
}
int sampleFrames = fsb.getByteArrayBufferSize(format) / format.getFrameSize();
ByteArrayInputStream bais = new ByteArrayInputStream(fsb.convertToByteArray(format));
AudioInputStream ais = new AudioInputStream(bais, format, sampleFrames);
if (AudioSystem.isFileTypeSupported(type, ais)) {
File out = new File(name);
try {
AudioSystem.write(ais, type, out);
} catch (IOException e) {
system.error("AudioRecorder.save: Error attempting to save buffer to " + name + "\n" + e.getMessage());
}
if (out.length() == 0) {
system.error("AudioRecorder.save: Error attempting to save buffer to " + name + ", the output file is empty.");
}
} else {
system.error("AudioRecorder.save: Can't write " + type.toString() + " using format " + format.toString() + ".");
}
}
String filePath = filePath();
AudioInputStream ais = system.getAudioInputStream(filePath);
SourceDataLine sdl = system.getSourceDataLine(ais.getFormat(), 1024);
// this is fine because the recording will always be
// in a raw format (WAV, AU, etc).
long length = AudioUtils.frames2Millis(ais.getFrameLength(), format);
BasicMetaData meta = new BasicMetaData(filePath, length, ais.getFrameLength());
JSPCMAudioRecordingStream recording = new JSPCMAudioRecordingStream(system, meta, ais, sdl, 1024);
return recording;
}
use of javax.sound.sampled.AudioInputStream in project Minim by ddf.
the class MpegAudioFileReader method getAudioInputStream.
/**
* Returns AudioInputStream from url.
*/
public AudioInputStream getAudioInputStream(URL url) throws UnsupportedAudioFileException, IOException {
system.debug("MpegAudioFileReader.getAudioInputStream(URL): begin");
long lFileLengthInBytes = AudioSystem.NOT_SPECIFIED;
URLConnection conn = url.openConnection();
// Tell shoucast server (if any) that SPI support shoutcast stream.
boolean isShout = false;
int toRead = 4;
byte[] head = new byte[toRead];
conn.setRequestProperty("Icy-Metadata", "1");
BufferedInputStream bInputStream = new BufferedInputStream(conn.getInputStream());
bInputStream.mark(toRead);
int read = bInputStream.read(head, 0, toRead);
if ((read > 2) && (((head[0] == 'I') | (head[0] == 'i')) && ((head[1] == 'C') | (head[1] == 'c')) && ((head[2] == 'Y') | (head[2] == 'y'))))
isShout = true;
bInputStream.reset();
InputStream inputStream = null;
// Is is a shoutcast server ?
if (isShout == true) {
// Yes
IcyInputStream icyStream = new IcyInputStream(bInputStream);
icyStream.addTagParseListener(IcyListener.getInstance());
inputStream = icyStream;
} else {
// No, is Icecast 2 ?
String metaint = conn.getHeaderField("icy-metaint");
if (metaint != null) {
// Yes, it might be icecast 2 mp3 stream.
IcyInputStream icyStream = new IcyInputStream(bInputStream, metaint);
icyStream.addTagParseListener(IcyListener.getInstance());
inputStream = icyStream;
} else {
// No
inputStream = bInputStream;
}
}
AudioInputStream audioInputStream = null;
try {
audioInputStream = getAudioInputStream(inputStream, lFileLengthInBytes);
} catch (UnsupportedAudioFileException e) {
inputStream.close();
throw e;
} catch (IOException e) {
inputStream.close();
throw e;
}
system.debug("MpegAudioFileReader.getAudioInputStream(URL): end");
return audioInputStream;
}
use of javax.sound.sampled.AudioInputStream in project Minim by ddf.
the class JSMinim method getAudioInputStream.
/**
*
* @param filename the
* @param is
* @return
*/
AudioInputStream getAudioInputStream(String filename) {
AudioInputStream ais = null;
BufferedInputStream bis = null;
if (filename.startsWith("http")) {
try {
ais = getAudioInputStream(new URL(filename));
} catch (MalformedURLException e) {
error("Bad URL: " + e.getMessage());
} catch (UnsupportedAudioFileException e) {
error("URL is in an unsupported audio file format: " + e.getMessage());
} catch (IOException e) {
Minim.error("Error reading the URL: " + e.getMessage());
}
} else {
try {
InputStream is = (InputStream) createInput.invoke(fileLoader, filename);
if (is != null) {
debug("Base input stream is: " + is.toString());
bis = new BufferedInputStream(is);
ais = getAudioInputStream(bis);
if (ais != null) {
// don't mark it like this because it means the entire
// file will be loaded into memory as it plays. this
// will cause out-of-memory problems with very large files.
// ais.mark((int)ais.available());
debug("Acquired AudioInputStream.\n" + "It is " + ais.getFrameLength() + " frames long.\n" + "Marking support: " + ais.markSupported());
}
} else {
throw new FileNotFoundException(filename);
}
} catch (Exception e) {
error(e.toString());
}
}
return ais;
}
use of javax.sound.sampled.AudioInputStream in project Minim by ddf.
the class JSMinim method getAudioSample.
public AudioSample getAudioSample(String filename, int bufferSize) {
AudioInputStream ais = getAudioInputStream(filename);
if (ais != null) {
AudioMetaData meta = null;
AudioFormat format = ais.getFormat();
FloatSampleBuffer samples = null;
if (format instanceof MpegAudioFormat) {
AudioFormat baseFormat = format;
format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false);
// converts the stream to PCM audio from mp3 audio
ais = getAudioInputStream(format, ais);
// get a map of properties so we can find out how long it is
Map<String, Object> props = getID3Tags(filename);
// there is a property called mp3.length.bytes, but that is
// the length in bytes of the mp3 file, which will of course
// be much shorter than the decoded version. so we use the
// duration of the file to figure out how many bytes the
// decoded file will be.
long dur = ((Long) props.get("duration")).longValue();
int toRead = (int) AudioUtils.millis2Bytes(dur / 1000, format);
samples = loadFloatAudio(ais, toRead);
meta = new MP3MetaData(filename, dur / 1000, props);
} else {
samples = loadFloatAudio(ais, (int) ais.getFrameLength() * format.getFrameSize());
long length = AudioUtils.frames2Millis(samples.getSampleCount(), format);
meta = new BasicMetaData(filename, length, samples.getSampleCount());
}
AudioOut out = getAudioOutput(format.getChannels(), bufferSize, format.getSampleRate(), format.getSampleSizeInBits());
if (out != null) {
SampleSignal ssig = new SampleSignal(samples);
out.setAudioSignal(ssig);
return new JSAudioSample(meta, ssig, out);
} else {
error("Couldn't acquire an output.");
}
}
return null;
}
use of javax.sound.sampled.AudioInputStream in project Minim by ddf.
the class MpegAudioFileReaderWorkaround method getAudioInputStream.
/**
* Returns AudioInputStream from url and userAgent
*/
public AudioInputStream getAudioInputStream(URL url, String userAgent) throws UnsupportedAudioFileException, IOException {
system.debug("MpegAudioFileReaderWorkaround.getAudioInputStream(" + url.toString() + ", " + userAgent + "): begin");
long lFileLengthInBytes = AudioSystem.NOT_SPECIFIED;
URLConnection conn = url.openConnection();
// Tell shoucast server (if any) that SPI support shoutcast stream.
boolean isShout = false;
int toRead = 4;
byte[] head = new byte[toRead];
if (userAgent != null)
conn.setRequestProperty("User-Agent", userAgent);
conn.setRequestProperty("Accept", "*/*");
conn.setRequestProperty("Icy-Metadata", "1");
conn.setRequestProperty("Connection", "close");
system.debug("Base input stream is: " + conn.getInputStream().toString());
BufferedInputStream bInputStream = new BufferedInputStream(conn.getInputStream());
bInputStream.mark(toRead);
int read = bInputStream.read(head, 0, toRead);
if ((read > 2) && (((head[0] == 'I') | (head[0] == 'i')) && ((head[1] == 'C') | (head[1] == 'c')) && ((head[2] == 'Y') | (head[2] == 'y')))) {
isShout = true;
}
bInputStream.reset();
InputStream inputStream = null;
// Is it a shoutcast server ?
if (isShout == true) {
// Yes
system.debug("URL is a shoutcast server.");
IcyInputStream icyStream = new IcyInputStream(bInputStream);
icyStream.addTagParseListener(IcyListener.getInstance());
inputStream = icyStream;
} else {
// No, is it Icecast 2 ?
String metaint = conn.getHeaderField("icy-metaint");
if (metaint != null) {
// Yes, it might be icecast 2 mp3 stream.
system.debug("URL is probably an icecast 2 mp3 stream");
IcyInputStream icyStream = new IcyInputStream(bInputStream, metaint);
icyStream.addTagParseListener(IcyListener.getInstance());
inputStream = icyStream;
} else {
system.debug("URL is not shoutcast or icecast 2.");
inputStream = bInputStream;
}
}
AudioInputStream audioInputStream = null;
try {
system.debug("Attempting to get audioInputStream.");
audioInputStream = getAudioInputStream(inputStream, lFileLengthInBytes);
} catch (UnsupportedAudioFileException e) {
inputStream.close();
throw e;
} catch (IOException e) {
inputStream.close();
throw e;
}
system.debug("MpegAudioFileReaderWorkaround.getAudioInputStream(URL,String): end");
return audioInputStream;
}
Aggregations