use of javax.sound.sampled.AudioInputStream in project janrufmonitor by tbrandt77.
the class TamMessagePlay method run.
public void run() {
Viewer v = this.m_app.getApplication().getViewer();
if (v != null) {
IStructuredSelection selection = (IStructuredSelection) v.getSelection();
if (!selection.isEmpty()) {
Object o = selection.getFirstElement();
if (o instanceof ICall) {
if (((ICall) o).getAttributes().contains("fritzbox.tamurl")) {
File message_file = null;
String url = ((ICall) o).getAttributes().get("fritzbox.tamurl").getValue();
File tamMessageDir = new File(PathResolver.getInstance(PIMRuntime.getInstance()).getDataDirectory() + File.separator + "fritzbox-messages");
tamMessageDir.mkdirs();
if (tamMessageDir.exists() && tamMessageDir.isDirectory()) {
message_file = new File(tamMessageDir, ((ICall) o).getUUID() + ".wav");
if (!message_file.exists()) {
FirmwareManager fwm = FirmwareManager.getInstance();
try {
if (!fwm.isLoggedIn())
fwm.login();
String data = fwm.getTamMessage(url);
if (data == null)
return;
ByteArrayInputStream bin = new ByteArrayInputStream(data.getBytes());
Base64Decoder b64 = new Base64Decoder(bin);
FileOutputStream fos = new FileOutputStream(message_file);
Stream.copy(b64, fos);
fos.flush();
fos.close();
} catch (IOException e) {
this.m_logger.warning(e.toString());
} catch (FritzBoxLoginException e) {
this.m_logger.warning(e.toString());
}
}
}
try {
AudioInputStream stream = AudioSystem.getAudioInputStream(message_file);
AudioFormat format = stream.getFormat();
DataLine.Info info = new DataLine.Info(Clip.class, format);
Clip clip = (Clip) AudioSystem.getLine(info);
clip.open(stream);
clip.start();
} catch (IOException e) {
this.m_logger.severe(e.getMessage());
} catch (LineUnavailableException e) {
this.m_logger.severe(e.getMessage());
} catch (UnsupportedAudioFileException e) {
this.m_logger.severe(e.getMessage());
}
} else {
new SWTExecuter() {
protected void execute() {
int style = SWT.APPLICATION_MODAL | SWT.OK;
MessageBox messageBox = new MessageBox(new Shell(DisplayManager.getDefaultDisplay()), style);
messageBox.setMessage(getI18nManager().getString(getNamespace(), "notam", "label", getLanguage()));
messageBox.open();
}
}.start();
return;
}
}
}
}
}
use of javax.sound.sampled.AudioInputStream in project blue by kunstmusik.
the class AudioWaveformCacheGenerator method analyzeWaveform.
private void analyzeWaveform(final AudioWaveformData waveData) {
try {
File f = new File(waveData.fileName);
AudioFileFormat aFormat = AudioSystem.getAudioFileFormat(f);
AudioFormat format = aFormat.getFormat();
int numChannels = format.getChannels();
try (AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(new BufferedInputStream(new FileInputStream(f)))) {
int sr = (int) format.getSampleRate();
int numBytesPerSample = audioInputStream.getFormat().getSampleSizeInBits() / 8;
int numFramesToRead = sr / waveData.pixelSeconds;
boolean bigEndian = format.isBigEndian();
int len = format.getFrameSize() * numFramesToRead;
byte[] dataBuffer = new byte[len];
int maxWidth = (aFormat.getFrameLength() / numFramesToRead) + 1;
waveData.data = new double[numChannels][maxWidth * 2];
for (int i = 0; i < maxWidth && running; i++) {
int numRead = audioInputStream.read(dataBuffer, 0, len);
if (numRead <= 0) {
waveData.percentLoadingComplete = 1.0;
break;
} else {
waveData.percentLoadingComplete = i / maxWidth;
if (i % 100 == 0) {
SwingUtilities.invokeLater(() -> {
audioWaveformCache.fireAudioWaveformDataGenerated(waveData.fileName);
});
}
}
prepareSamples(waveData, dataBuffer, i, numChannels, numBytesPerSample, bigEndian);
}
waveData.percentLoadingComplete = 1.0;
}
} catch (UnsupportedAudioFileException | IOException e) {
waveData.data = null;
e.printStackTrace();
}
}
use of javax.sound.sampled.AudioInputStream in project lavaplayer by sedmelluq.
the class LocalPlayerDemo method main.
public static void main(String[] args) throws LineUnavailableException, IOException {
AudioPlayerManager manager = new DefaultAudioPlayerManager();
AudioSourceManagers.registerRemoteSources(manager);
manager.getConfiguration().setOutputFormat(new AudioDataFormat(2, 44100, 960, AudioDataFormat.Codec.PCM_S16_BE));
AudioPlayer player = manager.createPlayer();
manager.loadItem("ytsearch: epic soundtracks", new FunctionalResultHandler(null, playlist -> {
player.playTrack(playlist.getTracks().get(0));
}, null, null));
AudioDataFormat format = manager.getConfiguration().getOutputFormat();
AudioInputStream stream = AudioPlayerInputStream.createStream(player, format, 10000L, false);
SourceDataLine.Info info = new DataLine.Info(SourceDataLine.class, stream.getFormat());
SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);
line.open(stream.getFormat());
line.start();
byte[] buffer = new byte[format.bufferSize(2)];
int chunkSize;
while ((chunkSize = stream.read(buffer)) >= 0) {
line.write(buffer, 0, chunkSize);
}
}
use of javax.sound.sampled.AudioInputStream in project java-google-speech-api by goxr3plus.
the class FlacEncoder method convertWaveToFlac.
/**
* Converts a wave file to a FLAC file(in order to POST the data to Google and retrieve a response) <br>
* Sample Rate is 8000 by default
*
* @param inputFile Input wave file
* @param outputFile Output FLAC file
*/
public void convertWaveToFlac(File inputFile, File outputFile) {
StreamConfiguration streamConfiguration = new StreamConfiguration();
streamConfiguration.setSampleRate(8000);
streamConfiguration.setBitsPerSample(16);
streamConfiguration.setChannelCount(1);
try {
AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(inputFile);
AudioFormat format = audioInputStream.getFormat();
int frameSize = format.getFrameSize();
FLACEncoder flacEncoder = new FLACEncoder();
FLACFileOutputStream flacOutputStream = new FLACFileOutputStream(outputFile);
flacEncoder.setStreamConfiguration(streamConfiguration);
flacEncoder.setOutputStream(flacOutputStream);
flacEncoder.openFLACStream();
int frameLength = (int) audioInputStream.getFrameLength();
if (frameLength <= AudioSystem.NOT_SPECIFIED) {
// Arbitrary file size
frameLength = 16384;
}
int[] sampleData = new int[frameLength];
byte[] samplesIn = new byte[frameSize];
int i = 0;
while (audioInputStream.read(samplesIn, 0, frameSize) != -1) {
if (frameSize != 1) {
ByteBuffer bb = ByteBuffer.wrap(samplesIn);
bb.order(ByteOrder.LITTLE_ENDIAN);
short shortVal = bb.getShort();
sampleData[i] = shortVal;
} else {
sampleData[i] = samplesIn[0];
}
i++;
}
sampleData = truncateNullData(sampleData, i);
flacEncoder.addSamples(sampleData, i);
flacEncoder.encodeSamples(i, false);
flacEncoder.encodeSamples(flacEncoder.samplesAvailableToEncode(), true);
audioInputStream.close();
flacOutputStream.close();
} catch (Exception ex) {
ex.printStackTrace();
}
}
use of javax.sound.sampled.AudioInputStream in project java-google-speech-api by goxr3plus.
the class GSpeechDuplex method openHttpsPostConnection.
/**
* Opens a HTTPSPostConnection that posts data from a TargetDataLine input
*
* @param murl
* The URL you want to post to.
* @param mtl
* The TargetDataLine you want to post data from. <b>Note should be open</b>
*/
private void openHttpsPostConnection(String murl, TargetDataLine mtl, int sampleRate) {
URL url;
try {
url = new URL(murl);
HttpsURLConnection httpConn = getHttpsURLConnection(sampleRate, url);
// this opens a connection, then sends POST & headers.
final OutputStream out = httpConn.getOutputStream();
// Note : if the audio is more than 15 seconds
// don't write it to UrlConnInputStream all in one block as this sample does.
// Rather, segment the byteArray and on intermittently, sleeping thread
// supply bytes to the urlConn Stream at a rate that approaches
// the bitrate ( =30K per sec. in this instance ).
System.out.println("Starting to write data to output...");
ais = new AudioInputStream(mtl);
AudioSystem.write(ais, FLACFileWriter.FLAC, out);
// Output Stream is automatically closed
// do you need the trailer?
// NOW you can look at the status.
// Diagonostic Code.
/*
* int resCode = httpConn.getResponseCode(); if (resCode / 100 != 2) { System.out.println("ERROR"); } Scanner scanner = new
* Scanner(httpConn.getInputStream()); while(scanner.hasNextLine()){ System.out.println("UPSTREAM READS:" + scanner.nextLine()); }
* scanner.close();
*/
System.out.println("Upstream Closed...");
} catch (IOException ex) {
ex.printStackTrace();
}
}
Aggregations