use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class WriterCloseInput method test.
static void test(AudioFileFormat.Type fileType, int length, boolean isFile) {
testTotal++;
out("Testing fileType: " + fileType + ", frameLength: " + (length >= 0 ? length : "unspecified") + ", output: " + (isFile ? "File" : "OutputStream"));
AudioInputStream inStream = new ThrowAfterCloseStream(new ByteArrayInputStream(dataBuffer), audioFormat, length);
AudioSystem.isFileTypeSupported(fileType, inStream);
try {
if (isFile) {
File f = File.createTempFile("WriterCloseInput" + testTotal, "tmp");
AudioSystem.write(inStream, fileType, f);
f.delete();
} else {
OutputStream outStream = new NullOutputStream();
AudioSystem.write(inStream, fileType, outStream);
}
} catch (Exception ex) {
// this is not failure
out("SKIPPED (AudioSystem.write exception): " + ex.getMessage());
//out(ex);
inStream = null;
}
if (inStream != null) {
try {
// test if the stream is closed
inStream.available();
out("PASSED");
} catch (IOException ex) {
testFailed++;
out("FAILED: " + ex.getMessage());
//out(ex);
}
}
out("");
}
use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class SkipTest method main.
public static void main(String[] args) throws Exception {
AudioFloatFormatConverter converter = new AudioFloatFormatConverter();
byte[] data = { 10, 20, 30, 40, 30, 20, 10 };
AudioFormat format = new AudioFormat(8000, 8, 1, true, false);
AudioFormat format2 = new AudioFormat(16000, 8, 1, true, false);
AudioInputStream ais = new AudioInputStream(new ByteArrayInputStream(data), format, data.length);
AudioInputStream ais2 = converter.getAudioInputStream(format2, ais);
byte[] data2 = new byte[30];
int ret = ais2.read(data2, 0, data2.length);
ais.reset();
AudioInputStream ais3 = converter.getAudioInputStream(format2, ais);
byte[] data3 = new byte[100];
ais3.skip(7);
int ret2 = ais3.read(data3, 7, data3.length);
if (ret2 != ret - 7)
throw new Exception("Skip doesn't work correctly (" + ret2 + " != " + (ret - 7) + ")");
for (int i = 7; i < ret2 + 7; i++) {
if (data3[i] != data2[i])
throw new Exception("Skip doesn't work correctly (" + data3[i] + " != " + data2[i] + ")");
}
}
use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class NoteOverFlowTest method main.
public static void main(String[] args) throws Exception {
AudioSynthesizer synth = new SoftSynthesizer();
AudioFormat format = new AudioFormat(44100, 16, 2, true, false);
AudioInputStream stream = synth.openStream(format, null);
// Make all voices busy, e.g.
// send midi on and midi off on all available voices
MidiChannel ch1 = synth.getChannels()[0];
// Use contionus instrument like string ensemble
ch1.programChange(48);
for (int i = 0; i < synth.getMaxPolyphony(); i++) {
ch1.noteOn(64, 64);
ch1.noteOff(64);
}
// Now send single midi on, and midi off message
ch1.noteOn(64, 64);
ch1.noteOff(64);
// Read 10 sec from stream, by this time all voices should be inactvie
stream.skip(format.getFrameSize() * ((int) (format.getFrameRate() * 20)));
// If no voice are active, then this test will pass
VoiceStatus[] v = synth.getVoiceStatus();
for (int i = 0; i < v.length; i++) {
if (v[i].active) {
throw new RuntimeException("Not all voices are inactive!");
}
}
// Close the synthesizer after use
synth.close();
}
use of javax.sound.sampled.AudioInputStream in project JMRI by JMRI.
the class SoundUtil method bufferFromFile.
public static byte[] bufferFromFile(String filename, float sampleRate, int sampleSizeInBits, int channels, boolean signed, boolean bigEndian) throws java.io.IOException, javax.sound.sampled.UnsupportedAudioFileException {
File sourceFile = new File(filename);
// Get the type of the source file. We need this information
// later to write the audio data to a file of the same type.
AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(sourceFile);
//AudioFileFormat.Type targetFileType = fileFormat.getType();
AudioFormat audioFormat = fileFormat.getFormat();
// get desired output format
AudioFormat format = new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
// get a conversion stream
// (Errors not checked yet)
AudioInputStream stream = AudioSystem.getAudioInputStream(sourceFile);
AudioInputStream inputAIS = AudioSystem.getAudioInputStream(format, stream);
// Read the audio data into a memory buffer.
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int nBufferSize = BUFFER_LENGTH * audioFormat.getFrameSize();
byte[] abBuffer = new byte[nBufferSize];
while (true) {
if (log.isDebugEnabled()) {
log.debug("trying to read (bytes): " + abBuffer.length);
}
int nBytesRead = inputAIS.read(abBuffer);
if (log.isDebugEnabled()) {
log.debug("read (bytes): " + nBytesRead);
}
if (nBytesRead == -1) {
break;
}
baos.write(abBuffer, 0, nBytesRead);
}
// Create byte array
byte[] abAudioData = baos.toByteArray();
return abAudioData;
}
use of javax.sound.sampled.AudioInputStream in project jdk8u_jdk by JetBrains.
the class WaveFileWriter method getFileStream.
private InputStream getFileStream(WaveFileFormat waveFileFormat, InputStream audioStream) throws IOException {
// private method ... assumes audioFileFormat is a supported file type
// WAVE header fields
AudioFormat audioFormat = waveFileFormat.getFormat();
int headerLength = waveFileFormat.getHeaderSize();
int riffMagic = WaveFileFormat.RIFF_MAGIC;
int waveMagic = WaveFileFormat.WAVE_MAGIC;
int fmtMagic = WaveFileFormat.FMT_MAGIC;
int fmtLength = WaveFileFormat.getFmtChunkSize(waveFileFormat.getWaveType());
short wav_type = (short) waveFileFormat.getWaveType();
short channels = (short) audioFormat.getChannels();
short sampleSizeInBits = (short) audioFormat.getSampleSizeInBits();
int sampleRate = (int) audioFormat.getSampleRate();
int frameSizeInBytes = (int) audioFormat.getFrameSize();
int frameRate = (int) audioFormat.getFrameRate();
int avgBytesPerSec = channels * sampleSizeInBits * sampleRate / 8;
;
short blockAlign = (short) ((sampleSizeInBits / 8) * channels);
int dataMagic = WaveFileFormat.DATA_MAGIC;
int dataLength = waveFileFormat.getFrameLength() * frameSizeInBytes;
int length = waveFileFormat.getByteLength();
int riffLength = dataLength + headerLength - 8;
byte[] header = null;
ByteArrayInputStream headerStream = null;
ByteArrayOutputStream baos = null;
DataOutputStream dos = null;
SequenceInputStream waveStream = null;
AudioFormat audioStreamFormat = null;
AudioFormat.Encoding encoding = null;
InputStream codedAudioStream = audioStream;
// if audioStream is an AudioInputStream and we need to convert, do it here...
if (audioStream instanceof AudioInputStream) {
audioStreamFormat = ((AudioInputStream) audioStream).getFormat();
encoding = audioStreamFormat.getEncoding();
if (AudioFormat.Encoding.PCM_SIGNED.equals(encoding)) {
if (sampleSizeInBits == 8) {
wav_type = WaveFileFormat.WAVE_FORMAT_PCM;
// plug in the transcoder to convert from PCM_SIGNED to PCM_UNSIGNED
codedAudioStream = AudioSystem.getAudioInputStream(new AudioFormat(AudioFormat.Encoding.PCM_UNSIGNED, audioStreamFormat.getSampleRate(), audioStreamFormat.getSampleSizeInBits(), audioStreamFormat.getChannels(), audioStreamFormat.getFrameSize(), audioStreamFormat.getFrameRate(), false), (AudioInputStream) audioStream);
}
}
if ((AudioFormat.Encoding.PCM_SIGNED.equals(encoding) && audioStreamFormat.isBigEndian()) || (AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding) && !audioStreamFormat.isBigEndian()) || (AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding) && audioStreamFormat.isBigEndian())) {
if (sampleSizeInBits != 8) {
wav_type = WaveFileFormat.WAVE_FORMAT_PCM;
// plug in the transcoder to convert to PCM_SIGNED_LITTLE_ENDIAN
codedAudioStream = AudioSystem.getAudioInputStream(new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, audioStreamFormat.getSampleRate(), audioStreamFormat.getSampleSizeInBits(), audioStreamFormat.getChannels(), audioStreamFormat.getFrameSize(), audioStreamFormat.getFrameRate(), false), (AudioInputStream) audioStream);
}
}
}
// Now push the header into a stream, concat, and return the new SequenceInputStream
baos = new ByteArrayOutputStream();
dos = new DataOutputStream(baos);
// we write in littleendian...
dos.writeInt(riffMagic);
dos.writeInt(big2little(riffLength));
dos.writeInt(waveMagic);
dos.writeInt(fmtMagic);
dos.writeInt(big2little(fmtLength));
dos.writeShort(big2littleShort(wav_type));
dos.writeShort(big2littleShort(channels));
dos.writeInt(big2little(sampleRate));
dos.writeInt(big2little(avgBytesPerSec));
dos.writeShort(big2littleShort(blockAlign));
dos.writeShort(big2littleShort(sampleSizeInBits));
//$$fb 2002-04-16: Fix for 4636355: RIFF audio headers could be _more_ spec compliant
if (wav_type != WaveFileFormat.WAVE_FORMAT_PCM) {
// add length 0 for "codec specific data length"
dos.writeShort(0);
}
dos.writeInt(dataMagic);
dos.writeInt(big2little(dataLength));
dos.close();
header = baos.toByteArray();
headerStream = new ByteArrayInputStream(header);
waveStream = new SequenceInputStream(headerStream, new NoCloseInputStream(codedAudioStream));
return (InputStream) waveStream;
}
Aggregations