use of com.codename1.media.AudioBuffer in project CodenameOne by codenameone.
the class AudioBufferSample method lowLevelUsageSample.
private void lowLevelUsageSample() {
// Step 1: Create your audio buffer
// This can be any size you like really.
int bufferSize = 256;
// Audio buffer path
String path = "mybuffer.pcm";
// Can be any string, as it doesn't correspond
// to a real file. It is just used internally
// to identify audio buffers.
AudioBuffer audioBuffer = MediaManager.getAudioBuffer(path, true, bufferSize);
float[] myFloatBuffer = new float[bufferSize];
// This float array will be used to copy data out of the audioBuffer
// Step 2: Add callback to audio buffer
audioBuffer.addCallback(floatSamples -> {
// This callback will be called whenever the contents of the data buffer
// are changed.
// This is your "net" to grab the raw PCM samples.
// floatSamples is a float[] array with the PCM samples. Each sample
// ranges from -1 to 1.
// IMPORTANT!: This callback is not run on the EDT. It is called
// on an internal audio capture thread.
audioBuffer.copyTo(myFloatBuffer);
// All of the new PCM data in in the myFloatBuffer array
// Do what you like with it - send it to a server, save it to a file,
// etc...
});
// Step 3: Create a MediaRecorder
MediaRecorderBuilder mrb = new MediaRecorderBuilder().path(path).redirectToAudioBuffer(true);
try {
Media recorder = MediaManager.createMediaRecorder(mrb);
// This actually starts recording.
recorder.play();
// Record for 5 seconds... use a timer to stop the recorder after that
Timer timer = new Timer();
timer.schedule(new TimerTask() {
@Override
public void run() {
recorder.cleanup();
}
}, 5000);
} catch (IOException ex) {
Log.p("Failed to create media recorder");
Log.e(ex);
}
}
use of com.codename1.media.AudioBuffer in project CodenameOne by codenameone.
the class AndroidImplementation method createMediaRecorder.
private Media createMediaRecorder(final String path, final String mimeType, final int sampleRate, final int bitRate, final int audioChannels, final int maxDuration, final boolean redirectToAudioBuffer) throws IOException {
if (getActivity() == null) {
return null;
}
if (!checkForPermission(Manifest.permission.RECORD_AUDIO, "This is required to record audio")) {
return null;
}
final Media[] record = new Media[1];
final IOException[] error = new IOException[1];
final Object lock = new Object();
synchronized (lock) {
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
synchronized (lock) {
if (redirectToAudioBuffer) {
final int channelConfig = audioChannels == 1 ? android.media.AudioFormat.CHANNEL_IN_MONO : audioChannels == 2 ? android.media.AudioFormat.CHANNEL_IN_STEREO : android.media.AudioFormat.CHANNEL_IN_MONO;
final AudioRecord recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT, AudioRecord.getMinBufferSize(sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT));
final com.codename1.media.AudioBuffer audioBuffer = com.codename1.media.MediaManager.getAudioBuffer(path, true, 64);
final boolean[] stop = new boolean[1];
record[0] = new AbstractMedia() {
private int lastTime;
private boolean isRecording;
@Override
protected void playImpl() {
if (isRecording) {
return;
}
isRecording = true;
recorder.startRecording();
fireMediaStateChange(State.Playing);
new Thread(new Runnable() {
public void run() {
float[] audioData = new float[audioBuffer.getMaxSize()];
short[] buffer = new short[AudioRecord.getMinBufferSize(recorder.getSampleRate(), recorder.getChannelCount(), AudioFormat.ENCODING_PCM_16BIT)];
int read = -1;
int index = 0;
while (isRecording && (read = recorder.read(buffer, 0, buffer.length)) >= 0) {
if (read > 0) {
for (int i = 0; i < read; i++) {
audioData[index] = ((float) buffer[i]) / 0x8000;
index++;
if (index >= audioData.length) {
audioBuffer.copyFrom(sampleRate, audioChannels, audioData, 0, index);
index = 0;
}
}
if (index > 0) {
audioBuffer.copyFrom(sampleRate, audioChannels, audioData, 0, index);
index = 0;
}
}
}
}
}).start();
}
@Override
protected void pauseImpl() {
if (!isRecording) {
return;
}
isRecording = false;
recorder.stop();
fireMediaStateChange(State.Paused);
}
@Override
public void prepare() {
}
@Override
public void cleanup() {
pauseImpl();
recorder.release();
com.codename1.media.MediaManager.releaseAudioBuffer(path);
}
@Override
public int getTime() {
if (isRecording) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
AudioTimestamp ts = new AudioTimestamp();
recorder.getTimestamp(ts, AudioTimestamp.TIMEBASE_MONOTONIC);
lastTime = (int) (ts.framePosition / ((float) sampleRate / 1000f));
}
}
return lastTime;
}
@Override
public void setTime(int time) {
}
@Override
public int getDuration() {
return getTime();
}
@Override
public void setVolume(int vol) {
}
@Override
public int getVolume() {
return 0;
}
@Override
public boolean isPlaying() {
return recorder.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING;
}
@Override
public Component getVideoComponent() {
return null;
}
@Override
public boolean isVideo() {
return false;
}
@Override
public boolean isFullScreen() {
return false;
}
@Override
public void setFullScreen(boolean fullScreen) {
}
@Override
public void setNativePlayerMode(boolean nativePlayer) {
}
@Override
public boolean isNativePlayerMode() {
return false;
}
@Override
public void setVariable(String key, Object value) {
}
@Override
public Object getVariable(String key) {
return null;
}
};
lock.notify();
} else {
MediaRecorder recorder = new MediaRecorder();
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
if (mimeType.contains("amr")) {
recorder.setOutputFormat(MediaRecorder.OutputFormat.AMR_NB);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
} else {
recorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
recorder.setAudioSamplingRate(sampleRate);
recorder.setAudioEncodingBitRate(bitRate);
}
if (audioChannels > 0) {
recorder.setAudioChannels(audioChannels);
}
if (maxDuration > 0) {
recorder.setMaxDuration(maxDuration);
}
recorder.setOutputFile(removeFilePrefix(path));
try {
recorder.prepare();
record[0] = new AndroidRecorder(recorder);
} catch (IllegalStateException ex) {
Logger.getLogger(AndroidImplementation.class.getName()).log(Level.SEVERE, null, ex);
} catch (IOException ex) {
error[0] = ex;
} finally {
lock.notify();
}
}
}
}
});
try {
lock.wait();
} catch (InterruptedException ex) {
ex.printStackTrace();
}
if (error[0] != null) {
throw error[0];
}
return record[0];
}
}
use of com.codename1.media.AudioBuffer in project CodenameOne by codenameone.
the class JavaSEPort method createMediaRecorder.
private Media createMediaRecorder(final String path, String mime, final int samplingRate, final int bitRate, final int audioChannels, final int maxDuration, final boolean redirectToAudioBuffer) throws IOException {
checkMicrophoneUsageDescription();
if (!checkForPermission("android.permission.READ_PHONE_STATE", "This is required to access the mic")) {
return null;
}
if (!redirectToAudioBuffer) {
if (mime == null) {
if (path.endsWith(".wav") || path.endsWith(".WAV")) {
mime = "audio/wav";
} else if (path.endsWith(".mp3") || path.endsWith(".MP3")) {
mime = "audio/mp3";
}
}
if (mime == null) {
mime = getAvailableRecordingMimeTypes()[0];
}
boolean foundMimetype = false;
for (String mt : getAvailableRecordingMimeTypes()) {
if (mt.equalsIgnoreCase(mime)) {
foundMimetype = true;
break;
}
}
if (!foundMimetype) {
throw new IOException("Mimetype " + mime + " not supported on this platform. Use getAvailableMimetypes() to find out what is supported");
}
}
final File file = redirectToAudioBuffer ? null : new File(unfile(path));
if (!redirectToAudioBuffer) {
if (!file.getParentFile().exists()) {
throw new IOException("Cannot write file " + path + " because the parent directory does not exist.");
}
}
File tmpFile = file;
if (!redirectToAudioBuffer) {
if (!"audio/wav".equalsIgnoreCase(mime) && !(tmpFile.getName().endsWith(".wav") || tmpFile.getName().endsWith(".WAV"))) {
tmpFile = new File(tmpFile.getParentFile(), tmpFile.getName() + ".wav");
}
}
final File fTmpFile = tmpFile;
final String fMime = mime;
return new AbstractMedia() {
java.io.File wavFile = fTmpFile;
File outFile = file;
AudioFileFormat.Type fileType = AudioFileFormat.Type.WAVE;
javax.sound.sampled.TargetDataLine line;
boolean recording;
javax.sound.sampled.AudioFormat getAudioFormat() {
if (redirectToAudioBuffer) {
javax.sound.sampled.AudioFormat format = new javax.sound.sampled.AudioFormat(samplingRate, 16, audioChannels, true, false);
return format;
}
float sampleRate = samplingRate;
int sampleSizeInBits = 8;
int channels = audioChannels;
boolean signed = true;
boolean bigEndian = false;
javax.sound.sampled.AudioFormat format = new javax.sound.sampled.AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
return format;
}
@Override
protected void playImpl() {
if (line == null) {
try {
final AudioFormat format = getAudioFormat();
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
if (!AudioSystem.isLineSupported(info)) {
fireMediaStateChange(State.Playing);
fireMediaStateChange(State.Paused);
throw new RuntimeException("Failed to access microphone. Check that the microphone is connected and that the app has permission to use it.");
}
line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format);
// start capturing
line.start();
recording = true;
fireMediaStateChange(State.Playing);
// start recording
new Thread(new Runnable() {
public void run() {
try {
AudioInputStream ais = new AudioInputStream(line);
if (redirectToAudioBuffer) {
AudioBuffer buf = MediaManager.getAudioBuffer(path, true, 256);
int maxBufferSize = buf.getMaxSize();
float[] sampleBuffer = new float[maxBufferSize];
byte[] byteBuffer = new byte[samplingRate * audioChannels];
int bytesRead = -1;
while ((bytesRead = ais.read(byteBuffer)) >= 0) {
if (bytesRead > 0) {
int sampleBufferPos = 0;
for (int i = 0; i < bytesRead; i += 2) {
sampleBuffer[sampleBufferPos] = ((float) ByteBuffer.wrap(byteBuffer, i, 2).order(ByteOrder.LITTLE_ENDIAN).getShort()) / 0x8000;
sampleBufferPos++;
if (sampleBufferPos >= sampleBuffer.length) {
buf.copyFrom(samplingRate, audioChannels, sampleBuffer, 0, sampleBuffer.length);
sampleBufferPos = 0;
}
}
if (sampleBufferPos > 0) {
buf.copyFrom(samplingRate, audioChannels, sampleBuffer, 0, sampleBufferPos);
}
}
}
} else {
AudioSystem.write(ais, fileType, wavFile);
}
} catch (IOException ioe) {
fireMediaError(new MediaException(MediaErrorType.Unknown, ioe));
}
}
}).start();
} catch (LineUnavailableException ex) {
fireMediaError(new MediaException(MediaErrorType.LineUnavailable, ex));
}
} else {
if (!line.isActive()) {
line.start();
recording = true;
fireMediaStateChange(State.Playing);
}
}
}
@Override
protected void pauseImpl() {
if (line == null) {
return;
}
if (!recording) {
return;
}
recording = false;
fireMediaStateChange(State.Paused);
line.stop();
}
@Override
public void prepare() {
}
@Override
public void cleanup() {
if (recording) {
pause();
}
recording = false;
if (redirectToAudioBuffer) {
MediaManager.releaseAudioBuffer(path);
}
if (line == null) {
return;
}
line.close();
if (!redirectToAudioBuffer && isMP3EncodingSupported() && "audio/mp3".equalsIgnoreCase(fMime)) {
final Throwable[] t = new Throwable[1];
CN.invokeAndBlock(new Runnable() {
public void run() {
try {
FileEncoder.getEncoder("audio/wav", "audio/mp3").encode(wavFile, outFile, getAudioFormat());
wavFile.delete();
} catch (Throwable ex) {
com.codename1.io.Log.e(ex);
t[0] = ex;
fireMediaError(new MediaException(MediaErrorType.Encode, ex));
}
}
});
// if (t[0] != null) {
// throw new RuntimeException(t[0]);
// }
}
line = null;
}
@Override
public int getTime() {
return (int) (line.getMicrosecondPosition() / 1000l);
}
@Override
public void setTime(int time) {
throw new RuntimeException("setTime() not supported on recordable Media");
}
@Override
public int getDuration() {
return (int) (line.getMicrosecondPosition() / 1000l);
}
@Override
public void setVolume(int vol) {
}
@Override
public int getVolume() {
return 100;
}
@Override
public boolean isPlaying() {
return recording;
}
@Override
public Component getVideoComponent() {
return null;
}
@Override
public boolean isVideo() {
return false;
}
@Override
public boolean isFullScreen() {
return false;
}
@Override
public void setFullScreen(boolean fullScreen) {
}
@Override
public void setNativePlayerMode(boolean nativePlayer) {
}
@Override
public boolean isNativePlayerMode() {
return false;
}
@Override
public void setVariable(String key, Object value) {
}
@Override
public Object getVariable(String key) {
return null;
}
};
}
use of com.codename1.media.AudioBuffer in project CodenameOne by codenameone.
the class AudioBufferSample method start.
public void start() {
if (current != null) {
current.show();
return;
}
Form hi = new Form("Capture", BoxLayout.y());
hi.setToolbar(new Toolbar());
Style s = UIManager.getInstance().getComponentStyle("Title");
FontImage icon = FontImage.createMaterial(FontImage.MATERIAL_MIC, s);
FileSystemStorage fs = FileSystemStorage.getInstance();
String recordingsDir = fs.getAppHomePath() + "recordings/";
fs.mkdir(recordingsDir);
try {
for (String file : fs.listFiles(recordingsDir)) {
MultiButton mb = new MultiButton(file.substring(file.lastIndexOf("/") + 1));
mb.addActionListener((e) -> {
try {
Media m = MediaManager.createMedia(recordingsDir + file, false);
m.play();
} catch (Throwable err) {
Log.e(err);
}
});
hi.add(mb);
}
hi.getToolbar().addCommandToRightBar("", icon, (ev) -> {
try {
String path = "tmpBuffer.pcm";
int wavSampleRate = 16000;
WAVWriter wavFileWriter = new WAVWriter(new File("tmpBuffer.wav"), wavSampleRate, 1, 16);
AudioBuffer audioBuffer = MediaManager.getAudioBuffer(path, true, 64);
MediaRecorderBuilder options = new MediaRecorderBuilder().audioChannels(1).redirectToAudioBuffer(true).path(path);
System.out.println("Builder isredirect? " + options.isRedirectToAudioBuffer());
float[] byteBuffer = new float[audioBuffer.getMaxSize()];
audioBuffer.addCallback(buf -> {
if (buf.getSampleRate() > wavSampleRate) {
buf.downSample(wavSampleRate);
}
buf.copyTo(byteBuffer);
try {
wavFileWriter.write(byteBuffer, 0, buf.getSize());
} catch (Throwable t) {
Log.e(t);
}
});
String file = Capture.captureAudio(options);
wavFileWriter.close();
SimpleDateFormat sd = new SimpleDateFormat("yyyy-MMM-dd-kk-mm");
String fileName = sd.format(new Date());
String filePath = recordingsDir + fileName;
Util.copy(fs.openInputStream(new File("tmpBuffer.wav").getAbsolutePath()), fs.openOutputStream(filePath));
MultiButton mb = new MultiButton(fileName);
mb.addActionListener((e) -> {
try {
Media m = MediaManager.createMedia(filePath, false);
m.play();
} catch (IOException err) {
Log.e(err);
}
});
hi.add(mb);
hi.revalidate();
if (file != null) {
System.out.println(file);
}
} catch (Throwable err) {
Log.e(err);
}
});
} catch (Throwable err) {
Log.e(err);
}
hi.show();
}
use of com.codename1.media.AudioBuffer in project CodenameOne by codenameone.
the class IOSImplementation method createMediaRecorder.
private Media createMediaRecorder(final String path, final String mimeType, final int sampleRate, final int bitRate, final int audioChannels, final int maxDuration, final boolean redirectToAudioBuffer) throws IOException {
if (!nativeInstance.checkMicrophoneUsage()) {
throw new RuntimeException("Please add the ios.NSMicrophoneUsageDescription build hint");
}
if (redirectToAudioBuffer) {
AudioBuffer buf = MediaManager.getAudioBuffer(path, true, 4096);
return new AbstractMedia() {
long peer = nativeInstance.createAudioUnit(path, audioChannels, sampleRate, new float[64]);
boolean isPlaying;
@Override
protected void playImpl() {
isPlaying = true;
nativeInstance.startAudioUnit(peer);
fireMediaStateChange(State.Playing);
}
@Override
protected void pauseImpl() {
isPlaying = false;
nativeInstance.stopAudioUnit(peer);
fireMediaStateChange(State.Paused);
}
@Override
public void prepare() {
}
@Override
public void cleanup() {
if (peer == 0) {
return;
}
if (isPlaying) {
pauseImpl();
}
MediaManager.releaseAudioBuffer(path);
nativeInstance.destroyAudioUnit(peer);
}
@Override
public int getTime() {
return -1;
}
@Override
public void setTime(int time) {
}
@Override
public int getDuration() {
return -1;
}
@Override
public void setVolume(int vol) {
}
@Override
public int getVolume() {
return -1;
}
@Override
public boolean isPlaying() {
return isPlaying;
}
@Override
public Component getVideoComponent() {
return null;
}
@Override
public boolean isVideo() {
return false;
}
@Override
public boolean isFullScreen() {
return false;
}
@Override
public void setFullScreen(boolean fullScreen) {
}
@Override
public void setNativePlayerMode(boolean nativePlayer) {
}
@Override
public boolean isNativePlayerMode() {
return false;
}
@Override
public void setVariable(String key, Object value) {
}
@Override
public Object getVariable(String key) {
return null;
}
};
}
finishedCreatingAudioRecorder = false;
createAudioRecorderException = null;
final long[] peer = new long[] { nativeInstance.createAudioRecorder(path, mimeType, sampleRate, bitRate, audioChannels, maxDuration) };
Display.getInstance().invokeAndBlock(new Runnable() {
public void run() {
while (!finishedCreatingAudioRecorder) {
synchronized (createAudioRecorderLock) {
Util.wait(createAudioRecorderLock);
}
}
}
});
if (createAudioRecorderException != null) {
throw createAudioRecorderException;
}
return new AbstractMedia() {
private boolean playing;
@Override
protected void playImpl() {
if (peer[0] != 0) {
nativeInstance.startAudioRecord(peer[0]);
playing = true;
fireMediaStateChange(State.Playing);
}
}
@Override
protected void pauseImpl() {
if (peer[0] != 0) {
nativeInstance.pauseAudioRecord(peer[0]);
playing = false;
fireMediaStateChange(State.Paused);
}
}
protected void finalize() {
if (peer[0] != 0) {
cleanup();
}
}
@Override
public void cleanup() {
if (playing) {
nativeInstance.pauseAudioRecord(peer[0]);
fireMediaStateChange(State.Paused);
}
nativeInstance.cleanupAudioRecord(peer[0]);
peer[0] = 0;
}
@Override
public int getTime() {
return -1;
}
@Override
public void setTime(int time) {
}
@Override
public int getDuration() {
return -1;
}
@Override
public void setVolume(int vol) {
}
@Override
public int getVolume() {
return -1;
}
@Override
public boolean isPlaying() {
return playing;
}
@Override
public Component getVideoComponent() {
return null;
}
@Override
public boolean isVideo() {
return false;
}
@Override
public boolean isFullScreen() {
return false;
}
@Override
public void setFullScreen(boolean fullScreen) {
}
@Override
public void setNativePlayerMode(boolean nativePlayer) {
}
@Override
public boolean isNativePlayerMode() {
return false;
}
public void setVariable(String key, Object value) {
}
public Object getVariable(String key) {
return null;
}
public void prepare() {
}
};
}
Aggregations