use of javax.sound.sampled.AudioInputStream in project Minim by ddf.
the class JSMinim method getAudioRecording.
/** @deprecated */
public AudioRecording getAudioRecording(String filename) {
AudioMetaData meta = null;
AudioInputStream ais = getAudioInputStream(filename);
byte[] samples;
if (ais != null) {
AudioFormat format = ais.getFormat();
if (format instanceof MpegAudioFormat) {
AudioFormat baseFormat = format;
format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false);
// converts the stream to PCM audio from mp3 audio
ais = getAudioInputStream(format, ais);
// get a map of properties so we can find out how long it is
Map<String, Object> props = getID3Tags(filename);
// there is a property called mp3.length.bytes, but that is
// the length in bytes of the mp3 file, which will of course
// be much shorter than the decoded version. so we use the
// duration of the file to figure out how many bytes the
// decoded file will be.
long dur = ((Long) props.get("duration")).longValue();
int toRead = (int) AudioUtils.millis2Bytes(dur / 1000, format);
samples = loadByteAudio(ais, toRead);
meta = new MP3MetaData(filename, dur / 1000, props);
} else {
samples = loadByteAudio(ais, (int) ais.getFrameLength() * format.getFrameSize());
long length = AudioUtils.bytes2Millis(samples.length, format);
meta = new BasicMetaData(filename, length, samples.length);
}
SourceDataLine line = getSourceDataLine(format, 2048);
if (line != null) {
return new JSAudioRecording(this, samples, line, meta);
}
}
return null;
}
use of javax.sound.sampled.AudioInputStream in project Minim by ddf.
the class JSMinim method getAudioRecordingClip.
/** @deprecated */
public AudioRecording getAudioRecordingClip(String filename) {
Clip clip = null;
AudioMetaData meta = null;
AudioInputStream ais = getAudioInputStream(filename);
if (ais != null) {
AudioFormat format = ais.getFormat();
if (format instanceof MpegAudioFormat) {
AudioFormat baseFormat = format;
format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false);
// converts the stream to PCM audio from mp3 audio
ais = getAudioInputStream(format, ais);
}
DataLine.Info info = new DataLine.Info(Clip.class, ais.getFormat());
if (AudioSystem.isLineSupported(info)) {
// Obtain and open the line.
try {
clip = (Clip) AudioSystem.getLine(info);
clip.open(ais);
} catch (Exception e) {
error("Error obtaining Javasound Clip: " + e.getMessage());
return null;
}
Map<String, Object> props = getID3Tags(filename);
long lengthInMillis = -1;
if (props.containsKey("duration")) {
Long dur = (Long) props.get("duration");
lengthInMillis = dur.longValue() / 1000;
}
meta = new MP3MetaData(filename, lengthInMillis, props);
} else {
error("File format not supported.");
return null;
}
}
if (meta == null) {
// this means we're dealing with not-an-mp3
meta = new BasicMetaData(filename, clip.getMicrosecondLength() / 1000, -1);
}
return new JSAudioRecordingClip(clip, meta);
}
use of javax.sound.sampled.AudioInputStream in project lionengine by b3dgs.
the class WavImpl method play.
/**
* Play sound.
*
* @param media The sound media.
* @param alignment The sound alignment.
*/
private void play(Media media, Align alignment) {
try (Playback playback = createPlayback(media, alignment, volume)) {
if (opened.containsKey(media)) {
opened.get(media).close();
}
opened.put(media, playback);
final AudioInputStream input = openStream(media);
final SourceDataLine dataLine = playback.getDataLine();
dataLine.start();
readSound(input, dataLine);
close(input, dataLine);
} catch (final IOException exception) {
if (last == null || !exception.getMessage().equals(last.getMessage())) {
Verbose.exception(exception, media.toString());
last = exception;
}
}
}
use of javax.sound.sampled.AudioInputStream in project lionengine by b3dgs.
the class WavImpl method createPlayback.
/**
* Play a sound.
*
* @param media The audio media.
* @param alignment The alignment type.
* @param volume The audio volume value.
* @return The created and opened playback ready to be played.
* @throws IOException If playback error.
*/
private static Playback createPlayback(Media media, Align alignment, int volume) throws IOException {
final AudioInputStream input = openStream(media);
final SourceDataLine dataLine = getDataLine(input);
dataLine.start();
updateAlignment(dataLine, alignment);
updateVolume(dataLine, volume);
return new Playback(input, dataLine);
}
use of javax.sound.sampled.AudioInputStream in project 490 by pauleibye.
the class MainFrame method initialize.
/**
* Initialize the contents of the frame.
*/
private void initialize() {
frame = new JFrame();
frame.getContentPane().setFont(new Font("Tahoma", Font.PLAIN, 18));
frame.setBounds(100, 100, 615, 377);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
GridBagLayout gridBagLayout = new GridBagLayout();
gridBagLayout.columnWidths = new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
gridBagLayout.rowHeights = new int[] { 0, 0, 0, 0, 0 };
gridBagLayout.columnWeights = new double[] { 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, Double.MIN_VALUE };
gridBagLayout.rowWeights = new double[] { 0.0, 0.0, 1.0, 1.0, Double.MIN_VALUE };
frame.getContentPane().setLayout(gridBagLayout);
JButton btnParse = new JButton("Parse");
GridBagConstraints gbc_btnParse = new GridBagConstraints();
gbc_btnParse.insets = new Insets(0, 0, 5, 5);
gbc_btnParse.gridx = 2;
gbc_btnParse.gridy = 1;
frame.getContentPane().add(btnParse, gbc_btnParse);
for (int i = 0; i < numFiles; i++) {
listModel.addElement("Audio File: " + i);
}
lblFeatures = new JLabel("Features");
lblFeatures.setFont(new Font("Tahoma", Font.PLAIN, 36));
GridBagConstraints gbc_lblFeatures = new GridBagConstraints();
gbc_lblFeatures.insets = new Insets(0, 0, 5, 5);
gbc_lblFeatures.gridx = 4;
gbc_lblFeatures.gridy = 1;
frame.getContentPane().add(lblFeatures, gbc_lblFeatures);
btnPlayAudio = new JButton("Play Audio");
GridBagConstraints gbc_btnPlayAudio = new GridBagConstraints();
gbc_btnPlayAudio.insets = new Insets(0, 0, 5, 5);
gbc_btnPlayAudio.gridx = 5;
gbc_btnPlayAudio.gridy = 1;
frame.getContentPane().add(btnPlayAudio, gbc_btnPlayAudio);
final JList<JCheckBox> list = new JList(listModel);
GridBagConstraints gbc_list = new GridBagConstraints();
gbc_list.gridheight = 2;
gbc_list.insets = new Insets(0, 0, 0, 5);
gbc_list.fill = GridBagConstraints.BOTH;
gbc_list.gridx = 2;
gbc_list.gridy = 2;
frame.getContentPane().add(list, gbc_list);
resultLabel = new JLabel("New label");
resultLabel.setFont(new Font("Tahoma", Font.PLAIN, 18));
GridBagConstraints gbc_resultLabel = new GridBagConstraints();
gbc_resultLabel.insets = new Insets(0, 0, 5, 5);
gbc_resultLabel.gridx = 4;
gbc_resultLabel.gridy = 2;
frame.getContentPane().add(resultLabel, gbc_resultLabel);
featuresDisplay = new JLabel("No Image Proccessed");
featuresDisplay.setFont(new Font("Tahoma", Font.PLAIN, 18));
GridBagConstraints gbc_featuresDisplay = new GridBagConstraints();
gbc_featuresDisplay.insets = new Insets(0, 0, 0, 5);
gbc_featuresDisplay.gridx = 4;
gbc_featuresDisplay.gridy = 3;
frame.getContentPane().add(featuresDisplay, gbc_featuresDisplay);
this.resultLabel.setText("No Image Processed");
labelCorrect = new JLabel("");
GridBagConstraints gbc_labelCorrect = new GridBagConstraints();
gbc_labelCorrect.insets = new Insets(0, 0, 0, 5);
gbc_labelCorrect.gridx = 5;
gbc_labelCorrect.gridy = 3;
frame.getContentPane().add(labelCorrect, gbc_labelCorrect);
createCSVandArff(featureExtractor);
buildModel();
/*
* Parse button handler
* Takes selected file and runs it against the machine learning model
* updates label to display if algorithm was correct or not
*/
btnParse.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
try {
if (list.isSelectionEmpty()) {
return;
}
selected = list.getSelectedIndex();
String path = convertToImagePath();
FeatureExtractor fe = new FeatureExtractor();
Double[] features = fe.extractFeaturesForTest("resources/testing/" + path);
Instance inst = new DenseInstance(features.length);
inst.setDataset(trainDataSet);
for (int i = 0; i < features.length; i++) {
inst.setValue(i, features[i]);
}
double result = nb.classifyInstance(inst);
String predStr = trainDataSet.classAttribute().value((int) result);
String actual = convertToImagePath();
resultLabel.setText("Selected File: " + actual);
featuresDisplay.setText("Algorithm Prediction: " + predStr);
if (predStr.substring(0, 1).toLowerCase().equals(actual.substring(0, 1).toLowerCase())) {
labelCorrect.setText("Yes, Prediction is correct");
} else {
labelCorrect.setText("No, Prediction is wrong");
}
} catch (IOException e1) {
e1.printStackTrace();
} catch (UnsupportedAudioFileException e1) {
e1.printStackTrace();
} catch (Exception e1) {
e1.printStackTrace();
}
}
});
/*
* Plays selected audio file on click, NOTE, file has to parsed first
*/
btnPlayAudio.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
File file = new File("resources/testing/" + convertToImagePath());
AudioInputStream audioIn;
try {
audioIn = AudioSystem.getAudioInputStream(file);
Clip clip = AudioSystem.getClip();
clip.open(audioIn);
clip.start();
} catch (UnsupportedAudioFileException e1) {
e1.printStackTrace();
} catch (IOException e1) {
e1.printStackTrace();
} catch (LineUnavailableException e1) {
e1.printStackTrace();
}
}
});
}
Aggregations