use of com.flashphoner.fpwcsapi.handler.CameraSwitchHandler in project wcs-android-sdk-samples by flashphoner.
the class MediaDevicesActivity method onCreate.
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_media_devices);
/**
* Initialization of the API.
*/
Flashphoner.init(this);
mWcsUrlView = (EditText) findViewById(R.id.wcs_url);
SharedPreferences sharedPref = this.getPreferences(Context.MODE_PRIVATE);
mWcsUrlView.setText(sharedPref.getString("wcs_url", getString(R.string.wcs_url)));
mStatusView = (TextView) findViewById(R.id.status);
mSendAudio = (CheckBox) findViewById(R.id.send_audio);
/**
* Method getMediaDevices(), which returns MediaDeviceList object, is used to request list of all available media devices.
* Then methods MediaDeviceList.getAudioList() and MediaDeviceList.getVideoList() are used to list available microphones and cameras.
*/
mMicSpinner = (LabelledSpinner) findViewById(R.id.microphone);
mMicSpinner.setItemsArray(Flashphoner.getMediaDevices().getAudioList());
mMicLevel = (TextView) findViewById(R.id.microphone_level);
mCameraSpinner = (LabelledSpinner) findViewById(R.id.camera);
mCameraSpinner.setItemsArray(Flashphoner.getMediaDevices().getVideoList());
mStripStreamerCodec = (LabelledSpinner) findViewById(R.id.strip_streamer_codec);
mStripStreamerCodec.setItemsArray(new String[] { "", "H264", "VP8" });
mStripPlayerCodec = (LabelledSpinner) findViewById(R.id.strip_player_codec);
mStripPlayerCodec.setItemsArray(new String[] { "", "H264", "VP8" });
mCameraFPS = (EditText) findViewById(R.id.camera_fps);
mWidth = (EditText) findViewById(R.id.camera_width);
mHeight = (EditText) findViewById(R.id.camera_height);
mDefaultPublishVideoBitrate = (CheckBox) findViewById(R.id.publish_video_bitrate_default);
mDefaultPublishVideoBitrate.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean b) {
mPublishVideoBitrate.setEnabled(!b);
}
});
mDefaultPublishAudioBitrate = (CheckBox) findViewById(R.id.publish_audio_bitrate_default);
mDefaultPublishAudioBitrate.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean b) {
mPublishAudioBitrate.setEnabled(!b);
}
});
mPublishVideoBitrate = (EditText) findViewById(R.id.publish_video_bitrate);
mPublishAudioBitrate = (EditText) findViewById(R.id.publish_audio_bitrate);
mSendVideo = (CheckBox) findViewById(R.id.send_video);
mUseStereo = (CheckBox) findViewById(R.id.use_stereo);
mUseFEC = (CheckBox) findViewById(R.id.use_fec);
mReceiveAudio = (CheckBox) findViewById(R.id.receive_audio);
mPlayVolume = (SeekBar) findViewById(R.id.play_volume);
mPlayVolume.setMax(Flashphoner.getMaxVolume());
mPlayVolume.setProgress(Flashphoner.getVolume());
mPlayVolume.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
Flashphoner.setVolume(i);
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
mReceiveVideo = (CheckBox) findViewById(R.id.receive_video);
mDefaultPlayResolution = (CheckBox) findViewById(R.id.play_resolution_default);
mDefaultPlayResolution.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean b) {
mPlayWidth.setEnabled(!b);
mPlayHeight.setEnabled(!b);
}
});
mPlayWidth = (EditText) findViewById(R.id.play_width);
mPlayHeight = (EditText) findViewById(R.id.play_height);
mDefaultPlayBitrate = (CheckBox) findViewById(R.id.play_bitrate_default);
mDefaultPlayBitrate.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean b) {
mPlayBitrate.setEnabled(!b);
}
});
mPlayBitrate = (EditText) findViewById(R.id.play_bitrate);
mDefaultPlayQuality = (CheckBox) findViewById(R.id.play_quality_default);
mDefaultPlayQuality.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean b) {
mPlayQuality.setEnabled(!b);
}
});
mPlayQuality = (EditText) findViewById(R.id.play_quality);
mStartButton = (Button) findViewById(R.id.connect_button);
/**
* Connection to server will be established and stream will be published when Start button is clicked.
*/
mStartButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
if (mStartButton.getTag() == null || Integer.valueOf(R.string.action_start).equals(mStartButton.getTag())) {
String url;
final String streamName;
try {
URI u = new URI(mWcsUrlView.getText().toString());
url = u.getScheme() + "://" + u.getHost() + ":" + u.getPort();
streamName = u.getPath().replaceAll("/", "");
} catch (URISyntaxException e) {
mStatusView.setText("Wrong uri");
return;
}
try {
localRender.init(null, new RendererCommon.RendererEvents() {
@Override
public void onFirstFrameRendered() {
}
@Override
public void onFrameResolutionChanged(final int i, final int i1, int i2) {
runOnUiThread(new Runnable() {
@Override
public void run() {
mLocalResolutionView.setText(i + "x" + i1);
}
});
}
});
} catch (IllegalStateException e) {
// ignore
}
try {
remoteRender.init(null, new RendererCommon.RendererEvents() {
@Override
public void onFirstFrameRendered() {
}
@Override
public void onFrameResolutionChanged(final int i, final int i1, int i2) {
runOnUiThread(new Runnable() {
@Override
public void run() {
mRemoteResolutionView.setText(i + "x" + i1);
}
});
}
});
} catch (IllegalStateException e) {
// ignore
}
/**
* The options for connection session are set.
* WCS server URL is passed when SessionOptions object is created.
* SurfaceViewRenderer to be used to display video from the camera is set with method SessionOptions.setLocalRenderer().
* SurfaceViewRenderer to be used to display preview stream video received from the server is set with method SessionOptions.setRemoteRenderer().
*/
SessionOptions sessionOptions = new SessionOptions(url);
sessionOptions.setLocalRenderer(localRender);
sessionOptions.setRemoteRenderer(remoteRender);
/**
* Session for connection to WCS server is created with method createSession().
*/
session = Flashphoner.createSession(sessionOptions);
/**
* Callback functions for session status events are added to make appropriate changes in controls of the interface and publish stream when connection is established.
*/
session.on(new SessionEvent() {
@Override
public void onAppData(Data data) {
}
@Override
public void onConnected(final Connection connection) {
runOnUiThread(new Runnable() {
@Override
public void run() {
mStartButton.setText(R.string.action_stop);
mStartButton.setTag(R.string.action_stop);
mStartButton.setEnabled(true);
mTestButton.setEnabled(false);
mStatusView.setText(connection.getStatus());
/**
* The options for the stream to publish are set.
* The stream name is passed when StreamOptions object is created.
* VideoConstraints object is used to set the source camera, FPS and resolution.
* Stream constraints are set with method StreamOptions.setConstraints().
*/
StreamOptions streamOptions = new StreamOptions(streamName);
Constraints constraints = getConstraints();
streamOptions.setConstraints(constraints);
String[] stripCodec = { (String) mStripStreamerCodec.getSpinner().getSelectedItem() };
streamOptions.setStripCodecs(stripCodec);
/**
* Stream is created with method Session.createStream().
*/
publishStream = session.createStream(streamOptions);
if (mMuteAudio.isChecked()) {
publishStream.muteAudio();
}
if (mMuteVideo.isChecked()) {
publishStream.muteVideo();
}
/**
* Callback function for stream status change is added to play the stream when it is published.
*/
publishStream.on(new StreamStatusEvent() {
@Override
public void onStreamStatus(final Stream stream, final StreamStatus streamStatus) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (StreamStatus.PUBLISHING.equals(streamStatus)) {
/**
* The options for the stream to play are set.
* The stream name is passed when StreamOptions object is created.
*/
StreamOptions streamOptions = new StreamOptions(streamName);
streamOptions.setConstraints(new Constraints(mReceiveAudio.isChecked(), mReceiveVideo.isChecked()));
VideoConstraints videoConstraints = null;
if (mReceiveVideo.isChecked()) {
videoConstraints = new VideoConstraints();
if (!mDefaultPlayResolution.isChecked() && mPlayWidth.getText().length() > 0 && mPlayHeight.getText().length() > 0) {
videoConstraints.setResolution(Integer.parseInt(mPlayWidth.getText().toString()), Integer.parseInt(mPlayHeight.getText().toString()));
}
if (!mDefaultPlayBitrate.isChecked() && mPlayBitrate.getText().length() > 0) {
videoConstraints.setBitrate(Integer.parseInt(mPlayBitrate.getText().toString()));
}
if (!mDefaultPlayQuality.isChecked() && mPlayQuality.getText().length() > 0) {
videoConstraints.setQuality(Integer.parseInt(mPlayQuality.getText().toString()));
}
}
AudioConstraints audioConstraints = null;
if (mReceiveAudio.isChecked()) {
audioConstraints = new AudioConstraints();
}
streamOptions.setConstraints(new Constraints(audioConstraints, videoConstraints));
String[] stripCodec = { (String) mStripPlayerCodec.getSpinner().getSelectedItem() };
streamOptions.setStripCodecs(stripCodec);
/**
* Stream is created with method Session.createStream().
*/
playStream = session.createStream(streamOptions);
/**
* Callback function for stream status change is added to display the status.
*/
playStream.on(new StreamStatusEvent() {
@Override
public void onStreamStatus(final Stream stream, final StreamStatus streamStatus) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (!StreamStatus.PLAYING.equals(streamStatus)) {
Log.e(TAG, "Can not play stream " + stream.getName() + " " + streamStatus);
}
mStatusView.setText(streamStatus.toString());
}
});
}
});
/**
* Method Stream.play() is called to start playback of the stream.
*/
playStream.play();
if (mSendVideo.isChecked())
mSwitchCameraButton.setEnabled(true);
} else {
Log.e(TAG, "Can not publish stream " + stream.getName() + " " + streamStatus);
}
mStatusView.setText(streamStatus.toString());
}
});
}
});
ActivityCompat.requestPermissions(MediaDevicesActivity.this, new String[] { Manifest.permission.RECORD_AUDIO, Manifest.permission.CAMERA }, PUBLISH_REQUEST_CODE);
}
});
}
@Override
public void onRegistered(Connection connection) {
}
@Override
public void onDisconnection(final Connection connection) {
runOnUiThread(new Runnable() {
@Override
public void run() {
mStartButton.setText(R.string.action_start);
mStartButton.setTag(R.string.action_start);
mStartButton.setEnabled(true);
mSwitchCameraButton.setEnabled(false);
mStatusView.setText(connection.getStatus());
mTestButton.setEnabled(true);
}
});
}
});
mStartButton.setEnabled(false);
mTestButton.setEnabled(false);
/**
* Connection to WCS server is established with method Session.connect().
*/
session.connect(new Connection());
SharedPreferences sharedPref = MediaDevicesActivity.this.getPreferences(Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharedPref.edit();
editor.putString("wcs_url", mWcsUrlView.getText().toString());
editor.apply();
} else {
mStartButton.setEnabled(false);
/**
* Connection to WCS server is closed with method Session.disconnect().
*/
session.disconnect();
}
View currentFocus = getCurrentFocus();
if (currentFocus != null) {
InputMethodManager inputManager = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE);
inputManager.hideSoftInputFromWindow(currentFocus.getWindowToken(), InputMethodManager.HIDE_NOT_ALWAYS);
}
}
});
mTestButton = (Button) findViewById(R.id.test_button);
mTestButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
if (mTestButton.getTag() == null || Integer.valueOf(R.string.action_test).equals(mTestButton.getTag())) {
ActivityCompat.requestPermissions(MediaDevicesActivity.this, new String[] { Manifest.permission.RECORD_AUDIO, Manifest.permission.CAMERA }, TEST_REQUEST_CODE);
} else {
Flashphoner.releaseLocalMediaAccess();
soundMeter.stop();
mTestButton.setText(R.string.action_test);
mTestButton.setTag(R.string.action_test);
mStartButton.setEnabled(true);
}
}
});
mSwitchCameraButton = (Button) findViewById(R.id.switch_camera_button);
/**
* Connection to server will be established and stream will be published when Start button is clicked.
*/
mSwitchCameraButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
if (publishStream != null) {
mSwitchCameraButton.setEnabled(false);
publishStream.switchCamera(new CameraSwitchHandler() {
@Override
public void onCameraSwitchDone(boolean var1) {
runOnUiThread(new Runnable() {
@Override
public void run() {
mSwitchCameraButton.setEnabled(true);
}
});
}
@Override
public void onCameraSwitchError(String var1) {
runOnUiThread(new Runnable() {
@Override
public void run() {
mSwitchCameraButton.setEnabled(true);
}
});
}
});
}
}
});
/**
* MuteAudio switch is used to mute/unmute audio of the published stream.
* Audio is muted with method Stream.muteAudio() and unmuted with method Stream.unmuteAudio().
*/
mMuteAudio = (Switch) findViewById(R.id.mute_audio);
mMuteAudio.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (publishStream != null) {
if (isChecked) {
publishStream.muteAudio();
} else {
publishStream.unmuteAudio();
}
}
}
});
/**
* MuteVideo switch is used to mute/unmute video of the published stream.
* Video is muted with method Stream.muteVideo() and unmuted with method Stream.unmuteVideo().
*/
mMuteVideo = (Switch) findViewById(R.id.mute_video);
mMuteVideo.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (publishStream != null) {
if (isChecked) {
publishStream.muteVideo();
} else {
publishStream.unmuteVideo();
}
}
}
});
localRender = (FPSurfaceViewRenderer) findViewById(R.id.local_video_view);
mLocalResolutionView = (TextView) findViewById(R.id.local_resolution);
remoteRender = (FPSurfaceViewRenderer) findViewById(R.id.remote_video_view);
mRemoteResolutionView = (TextView) findViewById(R.id.remote_resolution);
localRenderLayout = (PercentFrameLayout) findViewById(R.id.local_video_layout);
remoteRenderLayout = (PercentFrameLayout) findViewById(R.id.remote_video_layout);
localRender.setZOrderMediaOverlay(true);
remoteRenderLayout.setPosition(0, 0, 100, 100);
remoteRender.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
remoteRender.setMirror(false);
remoteRender.requestLayout();
localRenderLayout.setPosition(0, 0, 100, 100);
localRender.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
localRender.setMirror(true);
localRender.requestLayout();
}
Aggregations