use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class PCMtoPCMCodec method getAudioInputStream.
/**
*/
public AudioInputStream getAudioInputStream(AudioFormat.Encoding targetEncoding, AudioInputStream sourceStream) {
if (isConversionSupported(targetEncoding, sourceStream.getFormat())) {
AudioFormat sourceFormat = sourceStream.getFormat();
AudioFormat targetFormat = new AudioFormat(targetEncoding, sourceFormat.getSampleRate(), sourceFormat.getSampleSizeInBits(), sourceFormat.getChannels(), sourceFormat.getFrameSize(), sourceFormat.getFrameRate(), sourceFormat.isBigEndian());
return getAudioInputStream(targetFormat, sourceStream);
} else {
throw new IllegalArgumentException("Unsupported conversion: " + sourceStream.getFormat().toString() + " to " + targetEncoding.toString());
}
}
use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class WaveFloatFileReader method internal_getAudioFileFormat.
private AudioFileFormat internal_getAudioFileFormat(InputStream stream) throws UnsupportedAudioFileException, IOException {
RIFFReader riffiterator = new RIFFReader(stream);
if (!riffiterator.getFormat().equals("RIFF"))
throw new UnsupportedAudioFileException();
if (!riffiterator.getType().equals("WAVE"))
throw new UnsupportedAudioFileException();
boolean fmt_found = false;
boolean data_found = false;
int channels = 1;
long samplerate = 1;
int framesize = 1;
int bits = 1;
while (riffiterator.hasNextChunk()) {
RIFFReader chunk = riffiterator.nextChunk();
if (chunk.getFormat().equals("fmt ")) {
fmt_found = true;
int format = chunk.readUnsignedShort();
if (// WAVE_FORMAT_IEEE_FLOAT only
format != 3)
throw new UnsupportedAudioFileException();
channels = chunk.readUnsignedShort();
samplerate = chunk.readUnsignedInt();
/* framerate = */
chunk.readUnsignedInt();
framesize = chunk.readUnsignedShort();
bits = chunk.readUnsignedShort();
}
if (chunk.getFormat().equals("data")) {
data_found = true;
break;
}
}
if (!fmt_found)
throw new UnsupportedAudioFileException();
if (!data_found)
throw new UnsupportedAudioFileException();
AudioFormat audioformat = new AudioFormat(Encoding.PCM_FLOAT, samplerate, bits, channels, framesize, samplerate, false);
AudioFileFormat fileformat = new AudioFileFormat(AudioFileFormat.Type.WAVE, audioformat, AudioSystem.NOT_SPECIFIED);
return fileformat;
}
use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class WaveFloatFileWriter method write.
public void write(AudioInputStream stream, RIFFWriter writer) throws IOException {
RIFFWriter fmt_chunk = writer.writeChunk("fmt ");
AudioFormat format = stream.getFormat();
// WAVE_FORMAT_IEEE_FLOAT
fmt_chunk.writeUnsignedShort(3);
fmt_chunk.writeUnsignedShort(format.getChannels());
fmt_chunk.writeUnsignedInt((int) format.getSampleRate());
fmt_chunk.writeUnsignedInt(((int) format.getFrameRate()) * format.getFrameSize());
fmt_chunk.writeUnsignedShort(format.getFrameSize());
fmt_chunk.writeUnsignedShort(format.getSampleSizeInBits());
fmt_chunk.close();
RIFFWriter data_chunk = writer.writeChunk("data");
byte[] buff = new byte[1024];
int len;
while ((len = stream.read(buff, 0, buff.length)) != -1) data_chunk.write(buff, 0, len);
data_chunk.close();
}
use of javax.sound.sampled.AudioFormat in project Minim by ddf.
the class JSMinim method getAudioRecordingStream.
public AudioRecordingStream getAudioRecordingStream(String filename, int bufferSize, boolean inMemory) {
// TODO: deal with the case of wanting to have the file fully in memory
AudioRecordingStream mstream = null;
AudioInputStream ais = getAudioInputStream(filename);
if (ais != null) {
if (inMemory && ais.markSupported()) {
ais.mark((int) ais.getFrameLength() * ais.getFormat().getFrameSize());
}
debug("Reading from " + ais.getClass().toString());
debug("File format is: " + ais.getFormat().toString());
AudioFormat format = ais.getFormat();
// they need to be converted to PCM
if (format instanceof MpegAudioFormat) {
AudioFormat baseFormat = format;
format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false);
// converts the stream to PCM audio from mp3 audio
AudioInputStream decAis = getAudioInputStream(format, ais);
// source data line is for sending the file audio out to the
// speakers
SourceDataLine line = getSourceDataLine(format, bufferSize);
if (decAis != null && line != null) {
Map<String, Object> props = getID3Tags(filename);
long lengthInMillis = -1;
if (props.containsKey("duration")) {
Long dur = (Long) props.get("duration");
if (dur.longValue() > 0) {
lengthInMillis = dur.longValue() / 1000;
}
}
MP3MetaData meta = new MP3MetaData(filename, lengthInMillis, props);
mstream = new JSMPEGAudioRecordingStream(this, meta, ais, decAis, line, bufferSize);
}
} else // format instanceof MpegAudioFormat
{
// source data line is for sending the file audio out to the
// speakers
SourceDataLine line = getSourceDataLine(format, bufferSize);
if (line != null) {
long length = AudioUtils.frames2Millis(ais.getFrameLength(), format);
BasicMetaData meta = new BasicMetaData(filename, length, ais.getFrameLength());
mstream = new JSPCMAudioRecordingStream(this, meta, ais, line, bufferSize);
}
}
// else
}
// ais != null
return mstream;
}
use of javax.sound.sampled.AudioFormat in project Minim by ddf.
the class JSMinim method getAudioRecording.
/** @deprecated */
public AudioRecording getAudioRecording(String filename) {
AudioMetaData meta = null;
AudioInputStream ais = getAudioInputStream(filename);
byte[] samples;
if (ais != null) {
AudioFormat format = ais.getFormat();
if (format instanceof MpegAudioFormat) {
AudioFormat baseFormat = format;
format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false);
// converts the stream to PCM audio from mp3 audio
ais = getAudioInputStream(format, ais);
// get a map of properties so we can find out how long it is
Map<String, Object> props = getID3Tags(filename);
// there is a property called mp3.length.bytes, but that is
// the length in bytes of the mp3 file, which will of course
// be much shorter than the decoded version. so we use the
// duration of the file to figure out how many bytes the
// decoded file will be.
long dur = ((Long) props.get("duration")).longValue();
int toRead = (int) AudioUtils.millis2Bytes(dur / 1000, format);
samples = loadByteAudio(ais, toRead);
meta = new MP3MetaData(filename, dur / 1000, props);
} else {
samples = loadByteAudio(ais, (int) ais.getFrameLength() * format.getFrameSize());
long length = AudioUtils.bytes2Millis(samples.length, format);
meta = new BasicMetaData(filename, length, samples.length);
}
SourceDataLine line = getSourceDataLine(format, 2048);
if (line != null) {
return new JSAudioRecording(this, samples, line, meta);
}
}
return null;
}
Aggregations