use of com.codename1.media.AsyncMedia.MediaException in project CodenameOne by codenameone.
the class JavaSEPort method createMediaRecorder.
private Media createMediaRecorder(final String path, String mime, final int samplingRate, final int bitRate, final int audioChannels, final int maxDuration, final boolean redirectToAudioBuffer) throws IOException {
checkMicrophoneUsageDescription();
if (!checkForPermission("android.permission.READ_PHONE_STATE", "This is required to access the mic")) {
return null;
}
if (!redirectToAudioBuffer) {
if (mime == null) {
if (path.endsWith(".wav") || path.endsWith(".WAV")) {
mime = "audio/wav";
} else if (path.endsWith(".mp3") || path.endsWith(".MP3")) {
mime = "audio/mp3";
}
}
if (mime == null) {
mime = getAvailableRecordingMimeTypes()[0];
}
boolean foundMimetype = false;
for (String mt : getAvailableRecordingMimeTypes()) {
if (mt.equalsIgnoreCase(mime)) {
foundMimetype = true;
break;
}
}
if (!foundMimetype) {
throw new IOException("Mimetype " + mime + " not supported on this platform. Use getAvailableMimetypes() to find out what is supported");
}
}
final File file = redirectToAudioBuffer ? null : new File(unfile(path));
if (!redirectToAudioBuffer) {
if (!file.getParentFile().exists()) {
throw new IOException("Cannot write file " + path + " because the parent directory does not exist.");
}
}
File tmpFile = file;
if (!redirectToAudioBuffer) {
if (!"audio/wav".equalsIgnoreCase(mime) && !(tmpFile.getName().endsWith(".wav") || tmpFile.getName().endsWith(".WAV"))) {
tmpFile = new File(tmpFile.getParentFile(), tmpFile.getName() + ".wav");
}
}
final File fTmpFile = tmpFile;
final String fMime = mime;
return new AbstractMedia() {
java.io.File wavFile = fTmpFile;
File outFile = file;
AudioFileFormat.Type fileType = AudioFileFormat.Type.WAVE;
javax.sound.sampled.TargetDataLine line;
boolean recording;
javax.sound.sampled.AudioFormat getAudioFormat() {
if (redirectToAudioBuffer) {
javax.sound.sampled.AudioFormat format = new javax.sound.sampled.AudioFormat(samplingRate, 16, audioChannels, true, false);
return format;
}
float sampleRate = samplingRate;
int sampleSizeInBits = 8;
int channels = audioChannels;
boolean signed = true;
boolean bigEndian = false;
javax.sound.sampled.AudioFormat format = new javax.sound.sampled.AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
return format;
}
@Override
protected void playImpl() {
if (line == null) {
try {
final AudioFormat format = getAudioFormat();
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
if (!AudioSystem.isLineSupported(info)) {
fireMediaStateChange(State.Playing);
fireMediaStateChange(State.Paused);
throw new RuntimeException("Failed to access microphone. Check that the microphone is connected and that the app has permission to use it.");
}
line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format);
// start capturing
line.start();
recording = true;
fireMediaStateChange(State.Playing);
// start recording
new Thread(new Runnable() {
public void run() {
try {
AudioInputStream ais = new AudioInputStream(line);
if (redirectToAudioBuffer) {
AudioBuffer buf = MediaManager.getAudioBuffer(path, true, 256);
int maxBufferSize = buf.getMaxSize();
float[] sampleBuffer = new float[maxBufferSize];
byte[] byteBuffer = new byte[samplingRate * audioChannels];
int bytesRead = -1;
while ((bytesRead = ais.read(byteBuffer)) >= 0) {
if (bytesRead > 0) {
int sampleBufferPos = 0;
for (int i = 0; i < bytesRead; i += 2) {
sampleBuffer[sampleBufferPos] = ((float) ByteBuffer.wrap(byteBuffer, i, 2).order(ByteOrder.LITTLE_ENDIAN).getShort()) / 0x8000;
sampleBufferPos++;
if (sampleBufferPos >= sampleBuffer.length) {
buf.copyFrom(samplingRate, audioChannels, sampleBuffer, 0, sampleBuffer.length);
sampleBufferPos = 0;
}
}
if (sampleBufferPos > 0) {
buf.copyFrom(samplingRate, audioChannels, sampleBuffer, 0, sampleBufferPos);
}
}
}
} else {
AudioSystem.write(ais, fileType, wavFile);
}
} catch (IOException ioe) {
fireMediaError(new MediaException(MediaErrorType.Unknown, ioe));
}
}
}).start();
} catch (LineUnavailableException ex) {
fireMediaError(new MediaException(MediaErrorType.LineUnavailable, ex));
}
} else {
if (!line.isActive()) {
line.start();
recording = true;
fireMediaStateChange(State.Playing);
}
}
}
@Override
protected void pauseImpl() {
if (line == null) {
return;
}
if (!recording) {
return;
}
recording = false;
fireMediaStateChange(State.Paused);
line.stop();
}
@Override
public void prepare() {
}
@Override
public void cleanup() {
if (recording) {
pause();
}
recording = false;
if (redirectToAudioBuffer) {
MediaManager.releaseAudioBuffer(path);
}
if (line == null) {
return;
}
line.close();
if (!redirectToAudioBuffer && isMP3EncodingSupported() && "audio/mp3".equalsIgnoreCase(fMime)) {
final Throwable[] t = new Throwable[1];
CN.invokeAndBlock(new Runnable() {
public void run() {
try {
FileEncoder.getEncoder("audio/wav", "audio/mp3").encode(wavFile, outFile, getAudioFormat());
wavFile.delete();
} catch (Throwable ex) {
com.codename1.io.Log.e(ex);
t[0] = ex;
fireMediaError(new MediaException(MediaErrorType.Encode, ex));
}
}
});
// if (t[0] != null) {
// throw new RuntimeException(t[0]);
// }
}
line = null;
}
@Override
public int getTime() {
return (int) (line.getMicrosecondPosition() / 1000l);
}
@Override
public void setTime(int time) {
throw new RuntimeException("setTime() not supported on recordable Media");
}
@Override
public int getDuration() {
return (int) (line.getMicrosecondPosition() / 1000l);
}
@Override
public void setVolume(int vol) {
}
@Override
public int getVolume() {
return 100;
}
@Override
public boolean isPlaying() {
return recording;
}
@Override
public Component getVideoComponent() {
return null;
}
@Override
public boolean isVideo() {
return false;
}
@Override
public boolean isFullScreen() {
return false;
}
@Override
public void setFullScreen(boolean fullScreen) {
}
@Override
public void setNativePlayerMode(boolean nativePlayer) {
}
@Override
public boolean isNativePlayerMode() {
return false;
}
@Override
public void setVariable(String key, Object value) {
}
@Override
public Object getVariable(String key) {
return null;
}
};
}
use of com.codename1.media.AsyncMedia.MediaException in project CodenameOne by codenameone.
the class AndroidImplementation method createMediaException.
public static MediaException createMediaException(int extra) {
MediaErrorType type;
String message;
switch(extra) {
case MediaPlayer.MEDIA_ERROR_IO:
type = MediaErrorType.Network;
message = "IO error";
break;
case MediaPlayer.MEDIA_ERROR_MALFORMED:
type = MediaErrorType.Decode;
message = "Media was malformed";
break;
case MediaPlayer.MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK:
type = MediaErrorType.SrcNotSupported;
message = "Not valie for progressive playback";
break;
case MediaPlayer.MEDIA_ERROR_SERVER_DIED:
type = MediaErrorType.Network;
message = "Server died";
break;
case MediaPlayer.MEDIA_ERROR_TIMED_OUT:
type = MediaErrorType.Network;
message = "Timed out";
break;
case MediaPlayer.MEDIA_ERROR_UNKNOWN:
type = MediaErrorType.Network;
message = "Unknown error";
break;
case MediaPlayer.MEDIA_ERROR_UNSUPPORTED:
type = MediaErrorType.SrcNotSupported;
message = "Unsupported media";
break;
default:
type = MediaErrorType.Network;
message = "Unknown error";
}
return new MediaException(type, message);
}
Aggregations