use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class EmergencySoundbank method newSimpleFFTSample_dist.
public static SF2Sample newSimpleFFTSample_dist(SF2Soundbank sf2, String name, double[] data, double base, double preamp) {
int fftsize = data.length / 2;
AudioFormat format = new AudioFormat(44100, 16, 1, true, false);
double basefreq = (base / fftsize) * format.getSampleRate() * 0.5;
randomPhase(data);
ifft(data);
data = realPart(data);
for (int i = 0; i < data.length; i++) {
data[i] = (1 - Math.exp(-Math.abs(data[i] * preamp))) * Math.signum(data[i]);
}
normalize(data, 0.9);
float[] fdata = toFloat(data);
fdata = loopExtend(fdata, fdata.length + 512);
fadeUp(fdata, 80);
byte[] bdata = toBytes(fdata, format);
/*
* Create SoundFont2 sample.
*/
SF2Sample sample = new SF2Sample(sf2);
sample.setName(name);
sample.setData(bdata);
sample.setStartLoop(256);
sample.setEndLoop(fftsize + 256);
sample.setSampleRate((long) format.getSampleRate());
double orgnote = (69 + 12) + (12 * Math.log(basefreq / 440.0) / Math.log(2));
sample.setOriginalPitch((int) orgnote);
sample.setPitchCorrection((byte) (-(orgnote - (int) orgnote) * 100.0));
sf2.addResource(sample);
return sample;
}
use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class ModelByteBufferWavetable method getFormat.
public AudioFormat getFormat() {
if (format == null) {
if (buffer == null)
return null;
InputStream is = buffer.getInputStream();
AudioFormat format = null;
try {
format = AudioSystem.getAudioFileFormat(is).getFormat();
} catch (Exception e) {
//e.printStackTrace();
}
try {
is.close();
} catch (IOException e) {
//e.printStackTrace();
}
return format;
}
return format;
}
use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class WaveFileWriter method getFileStream.
private InputStream getFileStream(WaveFileFormat waveFileFormat, InputStream audioStream) throws IOException {
// private method ... assumes audioFileFormat is a supported file type
// WAVE header fields
AudioFormat audioFormat = waveFileFormat.getFormat();
int headerLength = waveFileFormat.getHeaderSize();
int riffMagic = WaveFileFormat.RIFF_MAGIC;
int waveMagic = WaveFileFormat.WAVE_MAGIC;
int fmtMagic = WaveFileFormat.FMT_MAGIC;
int fmtLength = WaveFileFormat.getFmtChunkSize(waveFileFormat.getWaveType());
short wav_type = (short) waveFileFormat.getWaveType();
short channels = (short) audioFormat.getChannels();
short sampleSizeInBits = (short) audioFormat.getSampleSizeInBits();
int sampleRate = (int) audioFormat.getSampleRate();
int frameSizeInBytes = (int) audioFormat.getFrameSize();
int frameRate = (int) audioFormat.getFrameRate();
int avgBytesPerSec = channels * sampleSizeInBits * sampleRate / 8;
;
short blockAlign = (short) ((sampleSizeInBits / 8) * channels);
int dataMagic = WaveFileFormat.DATA_MAGIC;
int dataLength = waveFileFormat.getFrameLength() * frameSizeInBytes;
int length = waveFileFormat.getByteLength();
int riffLength = dataLength + headerLength - 8;
byte[] header = null;
ByteArrayInputStream headerStream = null;
ByteArrayOutputStream baos = null;
DataOutputStream dos = null;
SequenceInputStream waveStream = null;
AudioFormat audioStreamFormat = null;
AudioFormat.Encoding encoding = null;
InputStream codedAudioStream = audioStream;
// if audioStream is an AudioInputStream and we need to convert, do it here...
if (audioStream instanceof AudioInputStream) {
audioStreamFormat = ((AudioInputStream) audioStream).getFormat();
encoding = audioStreamFormat.getEncoding();
if (AudioFormat.Encoding.PCM_SIGNED.equals(encoding)) {
if (sampleSizeInBits == 8) {
wav_type = WaveFileFormat.WAVE_FORMAT_PCM;
// plug in the transcoder to convert from PCM_SIGNED to PCM_UNSIGNED
codedAudioStream = AudioSystem.getAudioInputStream(new AudioFormat(AudioFormat.Encoding.PCM_UNSIGNED, audioStreamFormat.getSampleRate(), audioStreamFormat.getSampleSizeInBits(), audioStreamFormat.getChannels(), audioStreamFormat.getFrameSize(), audioStreamFormat.getFrameRate(), false), (AudioInputStream) audioStream);
}
}
if ((AudioFormat.Encoding.PCM_SIGNED.equals(encoding) && audioStreamFormat.isBigEndian()) || (AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding) && !audioStreamFormat.isBigEndian()) || (AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding) && audioStreamFormat.isBigEndian())) {
if (sampleSizeInBits != 8) {
wav_type = WaveFileFormat.WAVE_FORMAT_PCM;
// plug in the transcoder to convert to PCM_SIGNED_LITTLE_ENDIAN
codedAudioStream = AudioSystem.getAudioInputStream(new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, audioStreamFormat.getSampleRate(), audioStreamFormat.getSampleSizeInBits(), audioStreamFormat.getChannels(), audioStreamFormat.getFrameSize(), audioStreamFormat.getFrameRate(), false), (AudioInputStream) audioStream);
}
}
}
// Now push the header into a stream, concat, and return the new SequenceInputStream
baos = new ByteArrayOutputStream();
dos = new DataOutputStream(baos);
// we write in littleendian...
dos.writeInt(riffMagic);
dos.writeInt(big2little(riffLength));
dos.writeInt(waveMagic);
dos.writeInt(fmtMagic);
dos.writeInt(big2little(fmtLength));
dos.writeShort(big2littleShort(wav_type));
dos.writeShort(big2littleShort(channels));
dos.writeInt(big2little(sampleRate));
dos.writeInt(big2little(avgBytesPerSec));
dos.writeShort(big2littleShort(blockAlign));
dos.writeShort(big2littleShort(sampleSizeInBits));
//$$fb 2002-04-16: Fix for 4636355: RIFF audio headers could be _more_ spec compliant
if (wav_type != WaveFileFormat.WAVE_FORMAT_PCM) {
// add length 0 for "codec specific data length"
dos.writeShort(0);
}
dos.writeInt(dataMagic);
dos.writeInt(big2little(dataLength));
dos.close();
header = baos.toByteArray();
headerStream = new ByteArrayInputStream(header);
waveStream = new SequenceInputStream(headerStream, new NoCloseInputStream(codedAudioStream));
return (InputStream) waveStream;
}
use of javax.sound.sampled.AudioFormat in project jdk8u_jdk by JetBrains.
the class WaveFloatFileWriter method toLittleEndian.
private AudioInputStream toLittleEndian(AudioInputStream ais) {
AudioFormat format = ais.getFormat();
AudioFormat targetFormat = new AudioFormat(format.getEncoding(), format.getSampleRate(), format.getSampleSizeInBits(), format.getChannels(), format.getFrameSize(), format.getFrameRate(), false);
return AudioSystem.getAudioInputStream(targetFormat, ais);
}
use of javax.sound.sampled.AudioFormat in project tika by apache.
the class AudioParser method parse.
public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException {
// AudioSystem expects the stream to support the mark feature
if (!stream.markSupported()) {
stream = new BufferedInputStream(stream);
}
try {
AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(stream);
Type type = fileFormat.getType();
if (type == Type.AIFC || type == Type.AIFF) {
metadata.set(Metadata.CONTENT_TYPE, "audio/x-aiff");
} else if (type == Type.AU || type == Type.SND) {
metadata.set(Metadata.CONTENT_TYPE, "audio/basic");
} else if (type == Type.WAVE) {
metadata.set(Metadata.CONTENT_TYPE, "audio/x-wav");
}
AudioFormat audioFormat = fileFormat.getFormat();
int channels = audioFormat.getChannels();
if (channels != AudioSystem.NOT_SPECIFIED) {
metadata.set("channels", String.valueOf(channels));
// TODO: Use XMPDM.TRACKS? (see also frame rate in AudioFormat)
}
float rate = audioFormat.getSampleRate();
if (rate != AudioSystem.NOT_SPECIFIED) {
metadata.set("samplerate", String.valueOf(rate));
metadata.set(XMPDM.AUDIO_SAMPLE_RATE, Integer.toString((int) rate));
}
int bits = audioFormat.getSampleSizeInBits();
if (bits != AudioSystem.NOT_SPECIFIED) {
metadata.set("bits", String.valueOf(bits));
if (bits == 8) {
metadata.set(XMPDM.AUDIO_SAMPLE_TYPE, "8Int");
} else if (bits == 16) {
metadata.set(XMPDM.AUDIO_SAMPLE_TYPE, "16Int");
} else if (bits == 32) {
metadata.set(XMPDM.AUDIO_SAMPLE_TYPE, "32Int");
}
}
metadata.set("encoding", audioFormat.getEncoding().toString());
// Javadoc suggests that some of the following properties might
// be available, but I had no success in finding any:
// "duration" Long playback duration of the file in microseconds
// "author" String name of the author of this file
// "title" String title of this file
// "copyright" String copyright message
// "date" Date date of the recording or release
// "comment" String an arbitrary text
addMetadata(metadata, fileFormat.properties());
addMetadata(metadata, audioFormat.properties());
} catch (UnsupportedAudioFileException e) {
// There is no way to know whether this exception was
// caused by the document being corrupted or by the format
// just being unsupported. So we do nothing.
}
XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata);
xhtml.startDocument();
xhtml.endDocument();
}
Aggregations