use of im.actor.runtime.webrtc.WebRTCMediaTrack in project actor-platform by actorapp.
the class PeerNodeActor method startIfNeeded.
private void startIfNeeded() {
if (isEnabled && isConnected && !isStarted) {
isStarted = true;
state = PeerState.ACTIVE;
callback.onPeerStateChanged(deviceId, state);
if (theirStream != null) {
for (WebRTCMediaTrack track : theirStream.getAudioTracks()) {
track.setEnabled(isAudioEnabled);
if (isAudioEnabled) {
callback.onTrackAdded(deviceId, track);
}
}
for (WebRTCMediaTrack track : theirStream.getVideoTracks()) {
track.setEnabled(isVideoEnabled);
if (isVideoEnabled) {
callback.onTrackAdded(deviceId, track);
}
}
}
}
}
use of im.actor.runtime.webrtc.WebRTCMediaTrack in project actor-platform by actorapp.
the class PeerNodeActor method onStreamAdded.
@Override
public void onStreamAdded(WebRTCMediaStream stream) {
WebRTCMediaStream oldStream = theirStream;
theirStream = stream;
//
if (isStarted) {
for (WebRTCMediaTrack track : stream.getAudioTracks()) {
track.setEnabled(isAudioEnabled);
if (isAudioEnabled) {
callback.onTrackAdded(deviceId, track);
}
}
for (WebRTCMediaTrack track : stream.getVideoTracks()) {
track.setEnabled(isVideoEnabled);
if (isVideoEnabled) {
callback.onTrackAdded(deviceId, track);
}
}
if (oldStream != null) {
for (WebRTCMediaTrack track : oldStream.getVideoTracks()) {
callback.onTrackRemoved(deviceId, track);
}
for (WebRTCMediaTrack track : oldStream.getAudioTracks()) {
callback.onTrackRemoved(deviceId, track);
}
}
}
if (!isConnected) {
isConnected = true;
if (!isEnabled) {
state = PeerState.CONNECTED;
callback.onPeerStateChanged(deviceId, state);
} else {
// This case is handled in startIfNeeded();
}
}
startIfNeeded();
}
Aggregations