use of javax.sound.sampled.AudioFileFormat in project bigbluebutton by bigbluebutton.
the class AudioSender method main.
// ******************************* MAIN *******************************
/** The main method. */
public static void main(String[] args) {
String daddr = null;
int dport = 0;
int payload_type = 0;
int tone_freq = 500;
double tone_amp = 1.0;
int sample_rate = 8000;
int sample_size = 1;
int frame_size = 500;
//=sample_rate/(frame_size/sample_size);
int frame_rate;
// byte_rate=frame_rate/frame_size=8000
boolean linear_signed = false;
boolean pcmu = false;
boolean big_endian = false;
String filename = null;
boolean sound = true;
boolean help = true;
for (int i = 0; i < args.length; i++) {
if (args[i].equals("-h")) {
break;
}
if (i == 0 && args.length > 1) {
daddr = args[i];
dport = Integer.parseInt(args[++i]);
help = false;
continue;
}
if (args[i].equals("-p") && args.length > (i + 1)) {
payload_type = Integer.parseInt(args[++i]);
continue;
}
if (args[i].equals("-F") && args.length > (i + 1)) {
sound = false;
filename = args[++i];
continue;
}
if (args[i].equals("-T") && args.length > (i + 1)) {
sound = false;
tone_freq = Integer.parseInt(args[++i]);
continue;
}
if (args[i].equals("-A") && args.length > (i + 1)) {
tone_amp = Double.parseDouble(args[++i]);
continue;
}
if (args[i].equals("-S") && args.length > (i + 2)) {
sample_rate = Integer.parseInt(args[++i]);
sample_size = Integer.parseInt(args[++i]);
continue;
}
if (args[i].equals("-L") && args.length > (i + 1)) {
frame_size = Integer.parseInt(args[++i]);
continue;
}
if (args[i].equals("-Z")) {
linear_signed = true;
continue;
}
if (args[i].equals("-U")) {
pcmu = true;
continue;
}
if (args[i].equals("-E")) {
big_endian = true;
continue;
}
// else, do:
System.out.println("unrecognized param '" + args[i] + "'\n");
help = true;
}
if (help) {
System.out.println("usage:\n java AudioSender <dest_addr> <dest_port> [options]");
System.out.println(" options:");
System.out.println(" -h this help");
System.out.println(" -p <type> payload type");
System.out.println(" -F <audio_file> sends an audio file");
System.out.println(" -T <frequency> sends a tone of given frequency [Hz]");
System.out.println(" -A <amplitude> sets an amplitude factor [0:1]");
System.out.println(" -S <rate> <size> sample rate [B/s], and size [B]");
System.out.println(" -L <size> frame size");
System.out.println(" -Z uses PCM linear signed format (linear unsigned is used as default)");
System.out.println(" -U uses PCMU format");
System.out.println(" -E uses big endian format");
System.exit(0);
}
frame_rate = sample_rate / (frame_size / sample_size);
AudioFormat.Encoding codec;
if (pcmu)
codec = AudioFormat.Encoding.ULAW;
else if (linear_signed)
codec = AudioFormat.Encoding.PCM_SIGNED;
else
// default
codec = AudioFormat.Encoding.PCM_UNSIGNED;
int tone_codec = ToneInputStream.PCM_LINEAR_UNSIGNED;
if (linear_signed)
tone_codec = ToneInputStream.PCM_LINEAR_SIGNED;
try {
RtpStreamSender sender;
AudioInput audio_input = null;
if (sound)
AudioInput.initAudioLine();
if (sound) {
AudioFormat format = new AudioFormat(codec, sample_rate, 8 * sample_size, 1, sample_size, sample_rate, big_endian);
System.out.println("System audio format: " + format);
audio_input = new AudioInput(format);
sender = new RtpStreamSender(audio_input.getInputStream(), false, payload_type, frame_rate, frame_size, daddr, dport);
} else if (filename != null) {
File file = new File(filename);
if (filename.indexOf(".wav") > 0) {
AudioFileFormat format = AudioSystem.getAudioFileFormat(file);
System.out.println("File audio format: " + format);
AudioInputStream audio_input_stream = AudioSystem.getAudioInputStream(file);
sender = new RtpStreamSender(audio_input_stream, true, payload_type, frame_rate, frame_size, daddr, dport);
} else {
FileInputStream input_stream = new FileInputStream(file);
sender = new RtpStreamSender(input_stream, true, payload_type, frame_rate, frame_size, daddr, dport);
}
} else {
ToneInputStream tone = new ToneInputStream(tone_freq, tone_amp, sample_rate, sample_size, tone_codec, big_endian);
sender = new RtpStreamSender(tone, true, payload_type, frame_rate, frame_size, daddr, dport);
}
if (sender != null) {
sender.start();
if (sound)
audio_input.play();
System.out.println("Press 'Return' to stop");
System.in.read();
sender.halt();
if (sound)
audio_input.stop();
if (sound)
AudioInput.closeAudioLine();
} else {
System.out.println("Error creating the rtp stream.");
}
} catch (Exception e) {
e.printStackTrace();
}
}
use of javax.sound.sampled.AudioFileFormat in project ACS by ACS-Community.
the class AlarmSound method dumpAudioInformation.
/**
* Dump info about supported audio, file types and so on...
* <P>
* This method is useful while updating the audio files.
*/
private void dumpAudioInformation() {
// Java supported file types
AudioFileFormat.Type[] fileTypes = AudioSystem.getAudioFileTypes();
if (fileTypes == null || fileTypes.length == 0) {
System.out.println("No audio file types supported.");
} else {
for (AudioFileFormat.Type type : fileTypes) {
System.out.println(type.toString() + ", extension " + type.getExtension());
}
}
Mixer.Info[] mixerInfos = AudioSystem.getMixerInfo();
System.out.println("Mixers found: " + mixerInfos.length);
for (Mixer.Info mi : mixerInfos) {
System.out.println("\tMixer " + mi.getName() + ": " + mi.getVendor() + ", " + mi.getDescription());
}
// Dump info about the alarm files
for (URL url : soundURLs) {
AudioFileFormat format = null;
try {
format = AudioSystem.getAudioFileFormat(url);
} catch (IOException ioe) {
System.err.println("Error " + ioe.getMessage() + " accessing URL " + url.toString());
continue;
} catch (UnsupportedAudioFileException ue) {
System.err.println("Unsupported audio format for " + url + " (" + ue.getMessage() + ")");
}
System.out.println("Properties of " + url);
System.out.println("\tAudio file type " + format.getType().toString());
System.out.println("\tIs file type supported: " + AudioSystem.isFileTypeSupported(format.getType()));
System.out.println("\tLength in byes " + format.getByteLength());
Map<String, Object> props = format.properties();
Set<String> keys = props.keySet();
for (String str : keys) {
System.out.println("\t[" + str + ", " + props.get(str).toString() + "]");
}
AudioFormat aFormat = format.getFormat();
System.out.println("\tEncoding " + aFormat.getEncoding().toString());
System.out.print("\tByte order ");
if (aFormat.isBigEndian()) {
System.out.println("big endian");
} else {
System.out.println("little endian");
}
System.out.println("\tSample rate: " + aFormat.getSampleRate());
System.out.println("\tNum. of bits of a sample: " + aFormat.getSampleSizeInBits());
System.out.println("\tNum. of channels: " + aFormat.getChannels());
}
}
use of javax.sound.sampled.AudioFileFormat in project JMRI by JMRI.
the class SoundUtil method bufferFromFile.
public static byte[] bufferFromFile(String filename, float sampleRate, int sampleSizeInBits, int channels, boolean signed, boolean bigEndian) throws java.io.IOException, javax.sound.sampled.UnsupportedAudioFileException {
File sourceFile = new File(filename);
// Get the type of the source file. We need this information
// later to write the audio data to a file of the same type.
AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(sourceFile);
//AudioFileFormat.Type targetFileType = fileFormat.getType();
AudioFormat audioFormat = fileFormat.getFormat();
// get desired output format
AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
// get a conversion stream
// (Errors not checked yet)
AudioInputStream stream = AudioSystem.getAudioInputStream(sourceFile);
AudioInputStream inputAIS = AudioSystem.getAudioInputStream(format, stream);
// Read the audio data into a memory buffer.
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int nBufferSize = BUFFER_LENGTH * audioFormat.getFrameSize();
byte[] abBuffer = new byte[nBufferSize];
while (true) {
if (log.isDebugEnabled()) {
log.debug("trying to read (bytes): " + abBuffer.length);
}
int nBytesRead = inputAIS.read(abBuffer);
if (log.isDebugEnabled()) {
log.debug("read (bytes): " + nBytesRead);
}
if (nBytesRead == -1) {
break;
}
baos.write(abBuffer, 0, nBytesRead);
}
// Create byte array
byte[] abAudioData = baos.toByteArray();
return abAudioData;
}
use of javax.sound.sampled.AudioFileFormat in project jdk8u_jdk by JetBrains.
the class WaveFloatFileReader method getAudioInputStream.
public AudioInputStream getAudioInputStream(InputStream stream) throws UnsupportedAudioFileException, IOException {
AudioFileFormat format = getAudioFileFormat(stream);
RIFFReader riffiterator = new RIFFReader(stream);
if (!riffiterator.getFormat().equals("RIFF"))
throw new UnsupportedAudioFileException();
if (!riffiterator.getType().equals("WAVE"))
throw new UnsupportedAudioFileException();
while (riffiterator.hasNextChunk()) {
RIFFReader chunk = riffiterator.nextChunk();
if (chunk.getFormat().equals("data")) {
return new AudioInputStream(chunk, format.getFormat(), chunk.getSize());
}
}
throw new UnsupportedAudioFileException();
}
use of javax.sound.sampled.AudioFileFormat in project jdk8u_jdk by JetBrains.
the class AuFileReader method getAudioInputStream.
/**
* Obtains an audio stream from the input stream provided. The stream must
* point to valid audio file data. In general, audio file providers may
* need to read some data from the stream before determining whether they
* support it. These parsers must
* be able to mark the stream, read enough data to determine whether they
* support the stream, and, if not, reset the stream's read pointer to its original
* position. If the input stream does not support this, this method may fail
* with an IOException.
* @param stream the input stream from which the <code>AudioInputStream</code> should be
* constructed
* @return an <code>AudioInputStream</code> object based on the audio file data contained
* in the input stream.
* @throws UnsupportedAudioFileException if the stream does not point to valid audio
* file data recognized by the system
* @throws IOException if an I/O exception occurs
* @see InputStream#markSupported
* @see InputStream#mark
*/
public AudioInputStream getAudioInputStream(InputStream stream) throws UnsupportedAudioFileException, IOException {
DataInputStream dis = null;
int headerSize;
AudioFileFormat fileFormat = null;
AudioFormat format = null;
// throws UnsupportedAudioFileException, IOException
fileFormat = getAudioFileFormat(stream);
// if we passed this call, we have an AU file.
format = fileFormat.getFormat();
dis = new DataInputStream(stream);
// now seek past the header
// magic
dis.readInt();
headerSize = (format.isBigEndian() == true ? dis.readInt() : rllong(dis));
dis.skipBytes(headerSize - 8);
return new AudioInputStream(dis, format, fileFormat.getFrameLength());
}
Aggregations