use of javax.sound.sampled.SourceDataLine in project jdk8u_jdk by JetBrains.
the class DataLine_ArrayIndexOutOfBounds method testSDL.
static void testSDL(Mixer mixer, Scenario scenario) {
log(" Testing SDL (scenario: " + scenario + ")...");
Line.Info linfo = new Line.Info(SourceDataLine.class);
SourceDataLine line = null;
try {
line = (SourceDataLine) mixer.getLine(linfo);
log(" got line: " + line);
log(" open...");
line.open();
} catch (IllegalArgumentException ex) {
log(" unsupported (IllegalArgumentException)");
return;
} catch (LineUnavailableException ex) {
log(" unavailable: " + ex);
return;
}
total++;
log(" start...");
line.start();
AsyncLineStopper lineStopper = new AsyncLineStopper(line, STOPPER_DELAY);
int offset = scenario.getBufferOffset(line);
int len = scenario.getBufferLength(line);
// ensure len represents integral number of frames
len -= len % line.getFormat().getFrameSize();
log(" write...");
lineStopper.schedule();
try {
line.write(buffer, offset, len);
log(" ERROR: didn't get ArrayIndexOutOfBoundsException");
failed++;
} catch (ArrayIndexOutOfBoundsException ex) {
log(" OK: got ArrayIndexOutOfBoundsException: " + ex);
}
lineStopper.force();
}
use of javax.sound.sampled.SourceDataLine in project jdk8u_jdk by JetBrains.
the class SoftAudioPusher method run.
public void run() {
byte[] buffer = SoftAudioPusher.this.buffer;
AudioInputStream ais = SoftAudioPusher.this.ais;
SourceDataLine sourceDataLine = SoftAudioPusher.this.sourceDataLine;
try {
while (active) {
// Read from audio source
int count = ais.read(buffer);
if (count < 0)
break;
// Write byte buffer to source output
sourceDataLine.write(buffer, 0, count);
}
} catch (IOException e) {
active = false;
//e.printStackTrace();
}
}
use of javax.sound.sampled.SourceDataLine in project JMRI by JMRI.
the class Sound method playSoundBuffer.
/**
* Play a sound from a buffer
*
* @param wavData data to play
*/
public static void playSoundBuffer(byte[] wavData) {
// get characteristics from buffer
float sampleRate = 11200.0f;
int sampleSizeInBits = 8;
int channels = 1;
boolean signed = (sampleSizeInBits > 8);
boolean bigEndian = true;
AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
SourceDataLine line;
// format is an AudioFormat object
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
if (!AudioSystem.isLineSupported(info)) {
// Handle the error.
log.warn("line not supported: " + info);
return;
}
// Obtain and open the line.
try {
line = (SourceDataLine) AudioSystem.getLine(info);
line.open(format);
} catch (LineUnavailableException ex) {
// Handle the error.
log.error("error opening line: " + ex);
return;
}
line.start();
// write(byte[] b, int off, int len)
line.write(wavData, 0, wavData.length);
}
use of javax.sound.sampled.SourceDataLine in project Minim by ddf.
the class JSBufferedSampleRecorder method save.
/**
* Saves the audio in the internal buffer to a file using the current settings for
* file type and file name.
*/
public AudioRecordingStream save() {
if (isRecording()) {
system.error("You must stop recording before you can write to a file.");
} else {
int channels = format.getChannels();
int length = left.capacity();
int totalSamples = (buffers.size() / channels) * length;
FloatSampleBuffer fsb = new FloatSampleBuffer(channels, totalSamples, format.getSampleRate());
if (channels == 1) {
for (int i = 0; i < buffers.size(); i++) {
int offset = i * length;
FloatBuffer fb = (FloatBuffer) buffers.get(i);
fb.rewind();
// copy all the floats in fb to the first channel
// of fsb, starting at the index offset, and copy
// the whole FloatBuffer over.
fb.get(fsb.getChannel(0), offset, length);
}
} else {
for (int i = 0; i < buffers.size(); i += 2) {
int offset = (i / 2) * length;
FloatBuffer fbL = (FloatBuffer) buffers.get(i);
FloatBuffer fbR = (FloatBuffer) buffers.get(i + 1);
fbL.rewind();
fbL.get(fsb.getChannel(0), offset, length);
fbR.rewind();
fbR.get(fsb.getChannel(1), offset, length);
}
}
int sampleFrames = fsb.getByteArrayBufferSize(format) / format.getFrameSize();
ByteArrayInputStream bais = new ByteArrayInputStream(fsb.convertToByteArray(format));
AudioInputStream ais = new AudioInputStream(bais, format, sampleFrames);
if (AudioSystem.isFileTypeSupported(type, ais)) {
File out = new File(name);
try {
AudioSystem.write(ais, type, out);
} catch (IOException e) {
system.error("AudioRecorder.save: Error attempting to save buffer to " + name + "\n" + e.getMessage());
}
if (out.length() == 0) {
system.error("AudioRecorder.save: Error attempting to save buffer to " + name + ", the output file is empty.");
}
} else {
system.error("AudioRecorder.save: Can't write " + type.toString() + " using format " + format.toString() + ".");
}
}
String filePath = filePath();
AudioInputStream ais = system.getAudioInputStream(filePath);
SourceDataLine sdl = system.getSourceDataLine(ais.getFormat(), 1024);
// this is fine because the recording will always be
// in a raw format (WAV, AU, etc).
long length = AudioUtils.frames2Millis(ais.getFrameLength(), format);
BasicMetaData meta = new BasicMetaData(filePath, length, ais.getFrameLength());
JSPCMAudioRecordingStream recording = new JSPCMAudioRecordingStream(system, meta, ais, sdl, 1024);
return recording;
}
use of javax.sound.sampled.SourceDataLine in project Minim by ddf.
the class JSMinim method getAudioOutput.
public AudioOut getAudioOutput(int type, int bufferSize, float sampleRate, int bitDepth) {
if (bitDepth != 8 && bitDepth != 16) {
throw new IllegalArgumentException("Unsupported bit depth, use either 8 or 16.");
}
AudioFormat format = new AudioFormat(sampleRate, bitDepth, type, true, false);
SourceDataLine sdl = getSourceDataLine(format, bufferSize);
if (sdl != null) {
return new JSAudioOutput(sdl, bufferSize);
}
return null;
}
Aggregations