refactor webrtc video source + capture code

This commit is contained in:
Daniel Gultsch 2022-11-12 13:37:18 +01:00
parent 5dbd86155f
commit 6ececb4d2b
3 changed files with 513 additions and 324 deletions

View file

@ -0,0 +1,31 @@
package eu.siacs.conversations.xmpp.jingle;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import org.webrtc.MediaStreamTrack;
import org.webrtc.PeerConnection;
import org.webrtc.RtpSender;
class TrackWrapper<T extends MediaStreamTrack> {
private final T track;
private final RtpSender rtpSender;
private TrackWrapper(final T track, final RtpSender rtpSender) {
Preconditions.checkNotNull(track);
Preconditions.checkNotNull(rtpSender);
this.track = track;
this.rtpSender = rtpSender;
}
public static <T extends MediaStreamTrack> TrackWrapper<T> addTrack(
final PeerConnection peerConnection, final T mediaStreamTrack) {
final RtpSender rtpSender = peerConnection.addTrack(mediaStreamTrack);
return new TrackWrapper<>(mediaStreamTrack, rtpSender);
}
public static <T extends MediaStreamTrack> Optional<T> get(
final TrackWrapper<T> trackWrapper) {
return trackWrapper == null ? Optional.absent() : Optional.of(trackWrapper.track);
}
}

View file

@ -0,0 +1,181 @@
package eu.siacs.conversations.xmpp.jingle;
import android.content.Context;
import android.util.Log;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraEnumerationAndroid;
import org.webrtc.CameraEnumerator;
import org.webrtc.CameraVideoCapturer;
import org.webrtc.EglBase;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.VideoSource;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Set;
import javax.annotation.Nullable;
import eu.siacs.conversations.Config;
class VideoSourceWrapper {
private static final int CAPTURING_RESOLUTION = 1920;
private static final int CAPTURING_MAX_FRAME_RATE = 30;
private final CameraVideoCapturer cameraVideoCapturer;
private final CameraEnumerationAndroid.CaptureFormat captureFormat;
private final Set<String> availableCameras;
private boolean isFrontCamera = false;
private VideoSource videoSource;
VideoSourceWrapper(
CameraVideoCapturer cameraVideoCapturer,
CameraEnumerationAndroid.CaptureFormat captureFormat,
Set<String> cameras) {
this.cameraVideoCapturer = cameraVideoCapturer;
this.captureFormat = captureFormat;
this.availableCameras = cameras;
}
private int getFrameRate() {
return Math.max(
captureFormat.framerate.min,
Math.min(CAPTURING_MAX_FRAME_RATE, captureFormat.framerate.max));
}
public void initialize(
final PeerConnectionFactory peerConnectionFactory,
final Context context,
final EglBase.Context eglBaseContext) {
final SurfaceTextureHelper surfaceTextureHelper =
SurfaceTextureHelper.create("webrtc", eglBaseContext);
this.videoSource = peerConnectionFactory.createVideoSource(false);
this.cameraVideoCapturer.initialize(
surfaceTextureHelper, context, this.videoSource.getCapturerObserver());
}
public VideoSource getVideoSource() {
final VideoSource videoSource = this.videoSource;
if (videoSource == null) {
throw new IllegalStateException("VideoSourceWrapper was not initialized");
}
return videoSource;
}
public void startCapture() {
final int frameRate = getFrameRate();
Log.d(
Config.LOGTAG,
String.format(
"start capturing at %dx%d@%d",
captureFormat.width, captureFormat.height, frameRate));
this.cameraVideoCapturer.startCapture(captureFormat.width, captureFormat.height, frameRate);
}
public void stopCapture() throws InterruptedException {
this.cameraVideoCapturer.stopCapture();
}
public void dispose() {
this.cameraVideoCapturer.dispose();
if (this.videoSource != null) {
this.videoSource.dispose();
}
}
public ListenableFuture<Boolean> switchCamera() {
final SettableFuture<Boolean> future = SettableFuture.create();
this.cameraVideoCapturer.switchCamera(
new CameraVideoCapturer.CameraSwitchHandler() {
@Override
public void onCameraSwitchDone(final boolean isFrontCamera) {
VideoSourceWrapper.this.isFrontCamera = isFrontCamera;
future.set(isFrontCamera);
}
@Override
public void onCameraSwitchError(final String message) {
future.setException(
new IllegalStateException(
String.format("Unable to switch camera %s", message)));
}
});
return future;
}
public boolean isFrontCamera() {
return this.isFrontCamera;
}
public boolean isCameraSwitchable() {
return this.availableCameras.size() > 1;
}
public static class Factory {
final Context context;
public Factory(final Context context) {
this.context = context;
}
public Optional<VideoSourceWrapper> create() {
final CameraEnumerator enumerator = new Camera2Enumerator(context);
final Set<String> deviceNames = ImmutableSet.copyOf(enumerator.getDeviceNames());
for (final String deviceName : deviceNames) {
if (isFrontFacing(enumerator, deviceName)) {
final VideoSourceWrapper videoSourceWrapper =
of(enumerator, deviceName, deviceNames);
if (videoSourceWrapper == null) {
return Optional.absent();
}
videoSourceWrapper.isFrontCamera = true;
return Optional.of(videoSourceWrapper);
}
}
if (deviceNames.size() == 0) {
return Optional.absent();
} else {
return Optional.fromNullable(
of(enumerator, Iterables.get(deviceNames, 0), deviceNames));
}
}
@Nullable
private VideoSourceWrapper of(
final CameraEnumerator enumerator,
final String deviceName,
final Set<String> availableCameras) {
final CameraVideoCapturer capturer = enumerator.createCapturer(deviceName, null);
if (capturer == null) {
return null;
}
final ArrayList<CameraEnumerationAndroid.CaptureFormat> choices =
new ArrayList<>(enumerator.getSupportedFormats(deviceName));
Collections.sort(choices, (a, b) -> b.width - a.width);
for (final CameraEnumerationAndroid.CaptureFormat captureFormat : choices) {
if (captureFormat.width <= CAPTURING_RESOLUTION) {
return new VideoSourceWrapper(capturer, captureFormat, availableCameras);
}
}
return null;
}
private static boolean isFrontFacing(
final CameraEnumerator cameraEnumerator, final String deviceName) {
try {
return cameraEnumerator.isFrontFacing(deviceName);
} catch (final NullPointerException e) {
return false;
}
}
}
}

View file

@ -9,7 +9,6 @@ import android.util.Log;
import com.google.common.base.Optional; import com.google.common.base.Optional;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
@ -17,10 +16,6 @@ import com.google.common.util.concurrent.SettableFuture;
import org.webrtc.AudioSource; import org.webrtc.AudioSource;
import org.webrtc.AudioTrack; import org.webrtc.AudioTrack;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraEnumerationAndroid;
import org.webrtc.CameraEnumerator;
import org.webrtc.CameraVideoCapturer;
import org.webrtc.CandidatePairChangeEvent; import org.webrtc.CandidatePairChangeEvent;
import org.webrtc.DataChannel; import org.webrtc.DataChannel;
import org.webrtc.DefaultVideoDecoderFactory; import org.webrtc.DefaultVideoDecoderFactory;
@ -36,14 +31,10 @@ import org.webrtc.RtpReceiver;
import org.webrtc.RtpTransceiver; import org.webrtc.RtpTransceiver;
import org.webrtc.SdpObserver; import org.webrtc.SdpObserver;
import org.webrtc.SessionDescription; import org.webrtc.SessionDescription;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack; import org.webrtc.VideoTrack;
import org.webrtc.audio.JavaAudioDeviceModule; import org.webrtc.audio.JavaAudioDeviceModule;
import org.webrtc.voiceengine.WebRtcAudioEffects; import org.webrtc.voiceengine.WebRtcAudioEffects;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Queue; import java.util.Queue;
@ -59,13 +50,15 @@ import eu.siacs.conversations.Config;
import eu.siacs.conversations.services.AppRTCAudioManager; import eu.siacs.conversations.services.AppRTCAudioManager;
import eu.siacs.conversations.services.XmppConnectionService; import eu.siacs.conversations.services.XmppConnectionService;
@SuppressWarnings("UnstableApiUsage")
public class WebRTCWrapper { public class WebRTCWrapper {
private static final String EXTENDED_LOGGING_TAG = WebRTCWrapper.class.getSimpleName(); private static final String EXTENDED_LOGGING_TAG = WebRTCWrapper.class.getSimpleName();
private final ExecutorService executorService = Executors.newSingleThreadExecutor(); private final ExecutorService executorService = Executors.newSingleThreadExecutor();
private static final Set<String> HARDWARE_AEC_BLACKLIST = new ImmutableSet.Builder<String>() private static final Set<String> HARDWARE_AEC_BLACKLIST =
new ImmutableSet.Builder<String>()
.add("Pixel") .add("Pixel")
.add("Pixel XL") .add("Pixel XL")
.add("Moto G5") .add("Moto G5")
@ -83,22 +76,24 @@ public class WebRTCWrapper {
.add("GT-I9505") // Samsung Galaxy S4 (jfltexx) .add("GT-I9505") // Samsung Galaxy S4 (jfltexx)
.build(); .build();
private static final int CAPTURING_RESOLUTION = 1920;
private static final int CAPTURING_MAX_FRAME_RATE = 30;
private final EventCallback eventCallback; private final EventCallback eventCallback;
private final AtomicBoolean readyToReceivedIceCandidates = new AtomicBoolean(false); private final AtomicBoolean readyToReceivedIceCandidates = new AtomicBoolean(false);
private final Queue<IceCandidate> iceCandidates = new LinkedList<>(); private final Queue<IceCandidate> iceCandidates = new LinkedList<>();
private final AppRTCAudioManager.AudioManagerEvents audioManagerEvents = new AppRTCAudioManager.AudioManagerEvents() { private final AppRTCAudioManager.AudioManagerEvents audioManagerEvents =
new AppRTCAudioManager.AudioManagerEvents() {
@Override @Override
public void onAudioDeviceChanged(AppRTCAudioManager.AudioDevice selectedAudioDevice, Set<AppRTCAudioManager.AudioDevice> availableAudioDevices) { public void onAudioDeviceChanged(
AppRTCAudioManager.AudioDevice selectedAudioDevice,
Set<AppRTCAudioManager.AudioDevice> availableAudioDevices) {
eventCallback.onAudioDeviceChanged(selectedAudioDevice, availableAudioDevices); eventCallback.onAudioDeviceChanged(selectedAudioDevice, availableAudioDevices);
} }
}; };
private final Handler mainHandler = new Handler(Looper.getMainLooper()); private final Handler mainHandler = new Handler(Looper.getMainLooper());
private VideoTrack localVideoTrack = null; private TrackWrapper<AudioTrack> localAudioTrack = null;
private TrackWrapper<VideoTrack> localVideoTrack = null;
private VideoTrack remoteVideoTrack = null; private VideoTrack remoteVideoTrack = null;
private final PeerConnection.Observer peerConnectionObserver = new PeerConnection.Observer() { private final PeerConnection.Observer peerConnectionObserver =
new PeerConnection.Observer() {
@Override @Override
public void onSignalingChange(PeerConnection.SignalingState signalingState) { public void onSignalingChange(PeerConnection.SignalingState signalingState) {
Log.d(EXTENDED_LOGGING_TAG, "onSignalingChange(" + signalingState + ")"); Log.d(EXTENDED_LOGGING_TAG, "onSignalingChange(" + signalingState + ")");
@ -113,8 +108,11 @@ public class WebRTCWrapper {
} }
@Override @Override
public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) { public void onIceConnectionChange(
Log.d(EXTENDED_LOGGING_TAG, "onIceConnectionChange(" + iceConnectionState + ")"); PeerConnection.IceConnectionState iceConnectionState) {
Log.d(
EXTENDED_LOGGING_TAG,
"onIceConnectionChange(" + iceConnectionState + ")");
} }
@Override @Override
@ -124,12 +122,11 @@ public class WebRTCWrapper {
} }
@Override @Override
public void onIceConnectionReceivingChange(boolean b) { public void onIceConnectionReceivingChange(boolean b) {}
}
@Override @Override
public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) { public void onIceGatheringChange(
PeerConnection.IceGatheringState iceGatheringState) {
Log.d(EXTENDED_LOGGING_TAG, "onIceGatheringChange(" + iceGatheringState + ")"); Log.d(EXTENDED_LOGGING_TAG, "onIceGatheringChange(" + iceGatheringState + ")");
} }
@ -143,30 +140,32 @@ public class WebRTCWrapper {
} }
@Override @Override
public void onIceCandidatesRemoved(IceCandidate[] iceCandidates) { public void onIceCandidatesRemoved(IceCandidate[] iceCandidates) {}
}
@Override @Override
public void onAddStream(MediaStream mediaStream) { public void onAddStream(MediaStream mediaStream) {
Log.d(EXTENDED_LOGGING_TAG, "onAddStream(numAudioTracks=" + mediaStream.audioTracks.size() + ",numVideoTracks=" + mediaStream.videoTracks.size() + ")"); Log.d(
EXTENDED_LOGGING_TAG,
"onAddStream(numAudioTracks="
+ mediaStream.audioTracks.size()
+ ",numVideoTracks="
+ mediaStream.videoTracks.size()
+ ")");
} }
@Override @Override
public void onRemoveStream(MediaStream mediaStream) { public void onRemoveStream(MediaStream mediaStream) {}
}
@Override @Override
public void onDataChannel(DataChannel dataChannel) { public void onDataChannel(DataChannel dataChannel) {}
}
@Override @Override
public void onRenegotiationNeeded() { public void onRenegotiationNeeded() {
Log.d(EXTENDED_LOGGING_TAG, "onRenegotiationNeeded()"); Log.d(EXTENDED_LOGGING_TAG, "onRenegotiationNeeded()");
final PeerConnection.PeerConnectionState currentState = peerConnection == null ? null : peerConnection.connectionState(); final PeerConnection.PeerConnectionState currentState =
if (currentState != null && currentState != PeerConnection.PeerConnectionState.NEW) { peerConnection == null ? null : peerConnection.connectionState();
if (currentState != null
&& currentState != PeerConnection.PeerConnectionState.NEW) {
eventCallback.onRenegotiationNeeded(); eventCallback.onRenegotiationNeeded();
} }
} }
@ -174,7 +173,13 @@ public class WebRTCWrapper {
@Override @Override
public void onAddTrack(RtpReceiver rtpReceiver, MediaStream[] mediaStreams) { public void onAddTrack(RtpReceiver rtpReceiver, MediaStream[] mediaStreams) {
final MediaStreamTrack track = rtpReceiver.track(); final MediaStreamTrack track = rtpReceiver.track();
Log.d(EXTENDED_LOGGING_TAG, "onAddTrack(kind=" + (track == null ? "null" : track.kind()) + ",numMediaStreams=" + mediaStreams.length + ")"); Log.d(
EXTENDED_LOGGING_TAG,
"onAddTrack(kind="
+ (track == null ? "null" : track.kind())
+ ",numMediaStreams="
+ mediaStreams.length
+ ")");
if (track instanceof VideoTrack) { if (track instanceof VideoTrack) {
remoteVideoTrack = (VideoTrack) track; remoteVideoTrack = (VideoTrack) track;
} }
@ -182,17 +187,21 @@ public class WebRTCWrapper {
@Override @Override
public void onTrack(RtpTransceiver transceiver) { public void onTrack(RtpTransceiver transceiver) {
Log.d(EXTENDED_LOGGING_TAG, "onTrack(mid=" + transceiver.getMid() + ",media=" + transceiver.getMediaType() + ")"); Log.d(
EXTENDED_LOGGING_TAG,
"onTrack(mid="
+ transceiver.getMid()
+ ",media="
+ transceiver.getMediaType()
+ ")");
} }
}; };
@Nullable @Nullable private PeerConnection peerConnection = null;
private PeerConnection peerConnection = null;
private AudioTrack localAudioTrack = null;
private AppRTCAudioManager appRTCAudioManager = null; private AppRTCAudioManager appRTCAudioManager = null;
private ToneManager toneManager = null; private ToneManager toneManager = null;
private Context context = null; private Context context = null;
private EglBase eglBase = null; private EglBase eglBase = null;
private CapturerChoice capturerChoice; private VideoSourceWrapper videoSourceWrapper;
WebRTCWrapper(final EventCallback eventCallback) { WebRTCWrapper(final EventCallback eventCallback) {
this.eventCallback = eventCallback; this.eventCallback = eventCallback;
@ -206,37 +215,15 @@ public class WebRTCWrapper {
} }
} }
@Nullable public void setup(
private static CapturerChoice of(CameraEnumerator enumerator, final String deviceName, Set<String> availableCameras) { final XmppConnectionService service,
final CameraVideoCapturer capturer = enumerator.createCapturer(deviceName, null); final AppRTCAudioManager.SpeakerPhonePreference speakerPhonePreference)
if (capturer == null) { throws InitializationException {
return null;
}
final ArrayList<CameraEnumerationAndroid.CaptureFormat> choices = new ArrayList<>(enumerator.getSupportedFormats(deviceName));
Collections.sort(choices, (a, b) -> b.width - a.width);
for (final CameraEnumerationAndroid.CaptureFormat captureFormat : choices) {
if (captureFormat.width <= CAPTURING_RESOLUTION) {
return new CapturerChoice(capturer, captureFormat, availableCameras);
}
}
return null;
}
private static boolean isFrontFacing(final CameraEnumerator cameraEnumerator, final String deviceName) {
try {
return cameraEnumerator.isFrontFacing(deviceName);
} catch (final NullPointerException e) {
return false;
}
}
public void setup(final XmppConnectionService service, final AppRTCAudioManager.SpeakerPhonePreference speakerPhonePreference) throws InitializationException {
try { try {
PeerConnectionFactory.initialize( PeerConnectionFactory.initialize(
PeerConnectionFactory.InitializationOptions.builder(service) PeerConnectionFactory.InitializationOptions.builder(service)
.setFieldTrials("WebRTC-BindUsingInterfaceName/Enabled/") .setFieldTrials("WebRTC-BindUsingInterfaceName/Enabled/")
.createInitializationOptions() .createInitializationOptions());
);
} catch (final UnsatisfiedLinkError e) { } catch (final UnsatisfiedLinkError e) {
throw new InitializationException("Unable to initialize PeerConnectionFactory", e); throw new InitializationException("Unable to initialize PeerConnectionFactory", e);
} }
@ -247,68 +234,93 @@ public class WebRTCWrapper {
} }
this.context = service; this.context = service;
this.toneManager = service.getJingleConnectionManager().toneManager; this.toneManager = service.getJingleConnectionManager().toneManager;
mainHandler.post(() -> { mainHandler.post(
() -> {
appRTCAudioManager = AppRTCAudioManager.create(service, speakerPhonePreference); appRTCAudioManager = AppRTCAudioManager.create(service, speakerPhonePreference);
toneManager.setAppRtcAudioManagerHasControl(true); toneManager.setAppRtcAudioManagerHasControl(true);
appRTCAudioManager.start(audioManagerEvents); appRTCAudioManager.start(audioManagerEvents);
eventCallback.onAudioDeviceChanged(appRTCAudioManager.getSelectedAudioDevice(), appRTCAudioManager.getAudioDevices()); eventCallback.onAudioDeviceChanged(
appRTCAudioManager.getSelectedAudioDevice(),
appRTCAudioManager.getAudioDevices());
}); });
} }
synchronized void initializePeerConnection(final Set<Media> media, final List<PeerConnection.IceServer> iceServers) throws InitializationException { synchronized void initializePeerConnection(
final Set<Media> media, final List<PeerConnection.IceServer> iceServers)
throws InitializationException {
Preconditions.checkState(this.eglBase != null); Preconditions.checkState(this.eglBase != null);
Preconditions.checkNotNull(media); Preconditions.checkNotNull(media);
Preconditions.checkArgument(media.size() > 0, "media can not be empty when initializing peer connection"); Preconditions.checkArgument(
final boolean setUseHardwareAcousticEchoCanceler = WebRtcAudioEffects.canUseAcousticEchoCanceler() && !HARDWARE_AEC_BLACKLIST.contains(Build.MODEL); media.size() > 0, "media can not be empty when initializing peer connection");
Log.d(Config.LOGTAG, String.format("setUseHardwareAcousticEchoCanceler(%s) model=%s", setUseHardwareAcousticEchoCanceler, Build.MODEL)); final boolean setUseHardwareAcousticEchoCanceler =
PeerConnectionFactory peerConnectionFactory = PeerConnectionFactory.builder() WebRtcAudioEffects.canUseAcousticEchoCanceler()
.setVideoDecoderFactory(new DefaultVideoDecoderFactory(eglBase.getEglBaseContext())) && !HARDWARE_AEC_BLACKLIST.contains(Build.MODEL);
.setVideoEncoderFactory(new DefaultVideoEncoderFactory(eglBase.getEglBaseContext(), true, true)) Log.d(
.setAudioDeviceModule(JavaAudioDeviceModule.builder(context) Config.LOGTAG,
.setUseHardwareAcousticEchoCanceler(setUseHardwareAcousticEchoCanceler) String.format(
.createAudioDeviceModule() "setUseHardwareAcousticEchoCanceler(%s) model=%s",
) setUseHardwareAcousticEchoCanceler, Build.MODEL));
PeerConnectionFactory peerConnectionFactory =
PeerConnectionFactory.builder()
.setVideoDecoderFactory(
new DefaultVideoDecoderFactory(eglBase.getEglBaseContext()))
.setVideoEncoderFactory(
new DefaultVideoEncoderFactory(
eglBase.getEglBaseContext(), true, true))
.setAudioDeviceModule(
JavaAudioDeviceModule.builder(context)
.setUseHardwareAcousticEchoCanceler(
setUseHardwareAcousticEchoCanceler)
.createAudioDeviceModule())
.createPeerConnectionFactory(); .createPeerConnectionFactory();
final PeerConnection.RTCConfiguration rtcConfig = buildConfiguration(iceServers); final PeerConnection.RTCConfiguration rtcConfig = buildConfiguration(iceServers);
final PeerConnection peerConnection = peerConnectionFactory.createPeerConnection(rtcConfig, peerConnectionObserver); final PeerConnection peerConnection =
peerConnectionFactory.createPeerConnection(rtcConfig, peerConnectionObserver);
if (peerConnection == null) { if (peerConnection == null) {
throw new InitializationException("Unable to create PeerConnection"); throw new InitializationException("Unable to create PeerConnection");
} }
final Optional<CapturerChoice> optionalCapturerChoice = media.contains(Media.VIDEO) ? getVideoCapturer() : Optional.absent(); final Optional<VideoSourceWrapper> optionalVideoSourceWrapper =
media.contains(Media.VIDEO)
? new VideoSourceWrapper.Factory(requireContext()).create()
: Optional.absent();
if (optionalCapturerChoice.isPresent()) { if (optionalVideoSourceWrapper.isPresent()) {
this.capturerChoice = optionalCapturerChoice.get(); this.videoSourceWrapper = optionalVideoSourceWrapper.get();
final CameraVideoCapturer capturer = this.capturerChoice.cameraVideoCapturer; this.videoSourceWrapper.initialize(
final VideoSource videoSource = peerConnectionFactory.createVideoSource(false); peerConnectionFactory, context, eglBase.getEglBaseContext());
SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("webrtc", eglBase.getEglBaseContext()); this.videoSourceWrapper.startCapture();
capturer.initialize(surfaceTextureHelper, requireContext(), videoSource.getCapturerObserver());
Log.d(Config.LOGTAG, String.format("start capturing at %dx%d@%d", capturerChoice.captureFormat.width, capturerChoice.captureFormat.height, capturerChoice.getFrameRate()));
capturer.startCapture(capturerChoice.captureFormat.width, capturerChoice.captureFormat.height, capturerChoice.getFrameRate());
this.localVideoTrack = peerConnectionFactory.createVideoTrack("my-video-track", videoSource); final VideoTrack videoTrack =
peerConnectionFactory.createVideoTrack(
"my-video-track", this.videoSourceWrapper.getVideoSource());
peerConnection.addTrack(this.localVideoTrack); this.localVideoTrack = TrackWrapper.addTrack(peerConnection, videoTrack);
} }
if (media.contains(Media.AUDIO)) { if (media.contains(Media.AUDIO)) {
// set up audio track // set up audio track
final AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints()); final AudioSource audioSource =
this.localAudioTrack = peerConnectionFactory.createAudioTrack("my-audio-track", audioSource); peerConnectionFactory.createAudioSource(new MediaConstraints());
peerConnection.addTrack(this.localAudioTrack); final AudioTrack audioTrack =
peerConnectionFactory.createAudioTrack("my-audio-track", audioSource);
this.localAudioTrack = TrackWrapper.addTrack(peerConnection, audioTrack);
} }
peerConnection.setAudioPlayout(true); peerConnection.setAudioPlayout(true);
peerConnection.setAudioRecording(true); peerConnection.setAudioRecording(true);
this.peerConnection = peerConnection; this.peerConnection = peerConnection;
} }
private static PeerConnection.RTCConfiguration buildConfiguration(final List<PeerConnection.IceServer> iceServers) { private static PeerConnection.RTCConfiguration buildConfiguration(
final PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers); final List<PeerConnection.IceServer> iceServers) {
rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; //XEP-0176 doesn't support tcp final PeerConnection.RTCConfiguration rtcConfig =
rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY; new PeerConnection.RTCConfiguration(iceServers);
rtcConfig.tcpCandidatePolicy =
PeerConnection.TcpCandidatePolicy.DISABLED; // XEP-0176 doesn't support tcp
rtcConfig.continualGatheringPolicy =
PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN; rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.NEGOTIATE; rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.NEGOTIATE;
rtcConfig.enableImplicitRollback = true; rtcConfig.enableImplicitRollback = true;
@ -332,7 +344,7 @@ public class WebRTCWrapper {
synchronized void close() { synchronized void close() {
final PeerConnection peerConnection = this.peerConnection; final PeerConnection peerConnection = this.peerConnection;
final CapturerChoice capturerChoice = this.capturerChoice; final VideoSourceWrapper videoSourceWrapper = this.videoSourceWrapper;
final AppRTCAudioManager audioManager = this.appRTCAudioManager; final AppRTCAudioManager audioManager = this.appRTCAudioManager;
final EglBase eglBase = this.eglBase; final EglBase eglBase = this.eglBase;
if (peerConnection != null) { if (peerConnection != null) {
@ -345,12 +357,13 @@ public class WebRTCWrapper {
} }
this.localVideoTrack = null; this.localVideoTrack = null;
this.remoteVideoTrack = null; this.remoteVideoTrack = null;
if (capturerChoice != null) { if (videoSourceWrapper != null) {
try { try {
capturerChoice.cameraVideoCapturer.stopCapture(); videoSourceWrapper.stopCapture();
} catch (InterruptedException e) { } catch (final InterruptedException e) {
Log.e(Config.LOGTAG, "unable to stop capturing"); Log.e(Config.LOGTAG, "unable to stop capturing");
} }
// TODO call dispose
} }
if (eglBase != null) { if (eglBase != null) {
eglBase.release(); eglBase.release();
@ -363,94 +376,95 @@ public class WebRTCWrapper {
|| this.eglBase != null || this.eglBase != null
|| this.localVideoTrack != null || this.localVideoTrack != null
|| this.remoteVideoTrack != null) { || this.remoteVideoTrack != null) {
final IllegalStateException e = new IllegalStateException("WebRTCWrapper hasn't been closed properly"); final IllegalStateException e =
new IllegalStateException("WebRTCWrapper hasn't been closed properly");
Log.e(Config.LOGTAG, "verifyClosed() failed. Going to throw", e); Log.e(Config.LOGTAG, "verifyClosed() failed. Going to throw", e);
throw e; throw e;
} }
} }
boolean isCameraSwitchable() { boolean isCameraSwitchable() {
final CapturerChoice capturerChoice = this.capturerChoice; final VideoSourceWrapper videoSourceWrapper = this.videoSourceWrapper;
return capturerChoice != null && capturerChoice.availableCameras.size() > 1; return videoSourceWrapper != null && videoSourceWrapper.isCameraSwitchable();
} }
boolean isFrontCamera() { boolean isFrontCamera() {
final CapturerChoice capturerChoice = this.capturerChoice; final VideoSourceWrapper videoSourceWrapper = this.videoSourceWrapper;
return capturerChoice == null || capturerChoice.isFrontCamera; return videoSourceWrapper == null || videoSourceWrapper.isFrontCamera();
} }
ListenableFuture<Boolean> switchCamera() { ListenableFuture<Boolean> switchCamera() {
final CapturerChoice capturerChoice = this.capturerChoice; final VideoSourceWrapper videoSourceWrapper = this.videoSourceWrapper;
if (capturerChoice == null) { if (videoSourceWrapper == null) {
return Futures.immediateFailedFuture(new IllegalStateException("CameraCapturer has not been initialized")); return Futures.immediateFailedFuture(
new IllegalStateException("VideoSourceWrapper has not been initialized"));
} }
final SettableFuture<Boolean> future = SettableFuture.create(); return videoSourceWrapper.switchCamera();
capturerChoice.cameraVideoCapturer.switchCamera(new CameraVideoCapturer.CameraSwitchHandler() {
@Override
public void onCameraSwitchDone(boolean isFrontCamera) {
capturerChoice.isFrontCamera = isFrontCamera;
future.set(isFrontCamera);
}
@Override
public void onCameraSwitchError(final String message) {
future.setException(new IllegalStateException(String.format("Unable to switch camera %s", message)));
}
});
return future;
} }
boolean isMicrophoneEnabled() { boolean isMicrophoneEnabled() {
final AudioTrack audioTrack = this.localAudioTrack; final Optional<AudioTrack> audioTrack = TrackWrapper.get(this.localAudioTrack);
if (audioTrack == null) { if (audioTrack.isPresent()) {
throw new IllegalStateException("Local audio track does not exist (yet)");
}
try { try {
return audioTrack.enabled(); return audioTrack.get().enabled();
} catch (final IllegalStateException e) { } catch (final IllegalStateException e) {
//sometimes UI might still be rendering the buttons when a background thread has already ended the call // sometimes UI might still be rendering the buttons when a background thread has
// already ended the call
return false; return false;
} }
} else {
throw new IllegalStateException("Local audio track does not exist (yet)");
}
} }
boolean setMicrophoneEnabled(final boolean enabled) { boolean setMicrophoneEnabled(final boolean enabled) {
final AudioTrack audioTrack = this.localAudioTrack; final Optional<AudioTrack> audioTrack = TrackWrapper.get(this.localAudioTrack);
if (audioTrack == null) { if (audioTrack.isPresent()) {
throw new IllegalStateException("Local audio track does not exist (yet)");
}
try { try {
audioTrack.setEnabled(enabled); audioTrack.get().setEnabled(enabled);
return true; return true;
} catch (final IllegalStateException e) { } catch (final IllegalStateException e) {
Log.d(Config.LOGTAG, "unable to toggle microphone", e); Log.d(Config.LOGTAG, "unable to toggle microphone", e);
// ignoring race condition in case MediaStreamTrack has been disposed // ignoring race condition in case MediaStreamTrack has been disposed
return false; return false;
} }
} else {
throw new IllegalStateException("Local audio track does not exist (yet)");
}
} }
boolean isVideoEnabled() { boolean isVideoEnabled() {
final VideoTrack videoTrack = this.localVideoTrack; final Optional<VideoTrack> videoTrack = TrackWrapper.get(this.localVideoTrack);
if (videoTrack == null) { if (videoTrack.isPresent()) {
return false; return videoTrack.get().enabled();
} }
return videoTrack.enabled(); return false;
} }
void setVideoEnabled(final boolean enabled) { void setVideoEnabled(final boolean enabled) {
final VideoTrack videoTrack = this.localVideoTrack; final Optional<VideoTrack> videoTrack = TrackWrapper.get(this.localVideoTrack);
if (videoTrack == null) { if (videoTrack.isPresent()) {
throw new IllegalStateException("Local video track does not exist"); videoTrack.get().setEnabled(enabled);
return;
} }
videoTrack.setEnabled(enabled); throw new IllegalStateException("Local video track does not exist");
} }
synchronized ListenableFuture<SessionDescription> setLocalDescription() { synchronized ListenableFuture<SessionDescription> setLocalDescription() {
return Futures.transformAsync(getPeerConnectionFuture(), peerConnection -> { return Futures.transformAsync(
getPeerConnectionFuture(),
peerConnection -> {
if (peerConnection == null) {
return Futures.immediateFailedFuture(
new IllegalStateException("PeerConnection was null"));
}
final SettableFuture<SessionDescription> future = SettableFuture.create(); final SettableFuture<SessionDescription> future = SettableFuture.create();
peerConnection.setLocalDescription(new SetSdpObserver() { peerConnection.setLocalDescription(
new SetSdpObserver() {
@Override @Override
public void onSetSuccess() { public void onSetSuccess() {
final SessionDescription description = peerConnection.getLocalDescription(); final SessionDescription description =
peerConnection.getLocalDescription();
Log.d(EXTENDED_LOGGING_TAG, "set local description:"); Log.d(EXTENDED_LOGGING_TAG, "set local description:");
logDescription(description); logDescription(description);
future.set(description); future.set(description);
@ -458,25 +472,37 @@ public class WebRTCWrapper {
@Override @Override
public void onSetFailure(final String message) { public void onSetFailure(final String message) {
future.setException(new FailureToSetDescriptionException(message)); future.setException(
new FailureToSetDescriptionException(message));
} }
}); });
return future; return future;
}, MoreExecutors.directExecutor()); },
MoreExecutors.directExecutor());
} }
private static void logDescription(final SessionDescription sessionDescription) { private static void logDescription(final SessionDescription sessionDescription) {
for (final String line : sessionDescription.description.split(eu.siacs.conversations.xmpp.jingle.SessionDescription.LINE_DIVIDER)) { for (final String line :
sessionDescription.description.split(
eu.siacs.conversations.xmpp.jingle.SessionDescription.LINE_DIVIDER)) {
Log.d(EXTENDED_LOGGING_TAG, line); Log.d(EXTENDED_LOGGING_TAG, line);
} }
} }
synchronized ListenableFuture<Void> setRemoteDescription(final SessionDescription sessionDescription) { synchronized ListenableFuture<Void> setRemoteDescription(
final SessionDescription sessionDescription) {
Log.d(EXTENDED_LOGGING_TAG, "setting remote description:"); Log.d(EXTENDED_LOGGING_TAG, "setting remote description:");
logDescription(sessionDescription); logDescription(sessionDescription);
return Futures.transformAsync(getPeerConnectionFuture(), peerConnection -> { return Futures.transformAsync(
getPeerConnectionFuture(),
peerConnection -> {
if (peerConnection == null) {
return Futures.immediateFailedFuture(
new IllegalStateException("PeerConnection was null"));
}
final SettableFuture<Void> future = SettableFuture.create(); final SettableFuture<Void> future = SettableFuture.create();
peerConnection.setRemoteDescription(new SetSdpObserver() { peerConnection.setRemoteDescription(
new SetSdpObserver() {
@Override @Override
public void onSetSuccess() { public void onSetSuccess() {
future.set(null); future.set(null);
@ -484,11 +510,14 @@ public class WebRTCWrapper {
@Override @Override
public void onSetFailure(final String message) { public void onSetFailure(final String message) {
future.setException(new FailureToSetDescriptionException(message)); future.setException(
new FailureToSetDescriptionException(message));
} }
}, sessionDescription); },
sessionDescription);
return future; return future;
}, MoreExecutors.directExecutor()); },
MoreExecutors.directExecutor());
} }
@Nonnull @Nonnull
@ -513,26 +542,6 @@ public class WebRTCWrapper {
requirePeerConnection().addIceCandidate(iceCandidate); requirePeerConnection().addIceCandidate(iceCandidate);
} }
private Optional<CapturerChoice> getVideoCapturer() {
final CameraEnumerator enumerator = new Camera2Enumerator(requireContext());
final Set<String> deviceNames = ImmutableSet.copyOf(enumerator.getDeviceNames());
for (final String deviceName : deviceNames) {
if (isFrontFacing(enumerator, deviceName)) {
final CapturerChoice capturerChoice = of(enumerator, deviceName, deviceNames);
if (capturerChoice == null) {
return Optional.absent();
}
capturerChoice.isFrontCamera = true;
return Optional.of(capturerChoice);
}
}
if (deviceNames.size() == 0) {
return Optional.absent();
} else {
return Optional.fromNullable(of(enumerator, Iterables.get(deviceNames, 0), deviceNames));
}
}
PeerConnection.PeerConnectionState getState() { PeerConnection.PeerConnectionState getState() {
return requirePeerConnection().connectionState(); return requirePeerConnection().connectionState();
} }
@ -541,13 +550,12 @@ public class WebRTCWrapper {
return requirePeerConnection().signalingState(); return requirePeerConnection().signalingState();
} }
EglBase.Context getEglBaseContext() { EglBase.Context getEglBaseContext() {
return this.eglBase.getEglBaseContext(); return this.eglBase.getEglBaseContext();
} }
Optional<VideoTrack> getLocalVideoTrack() { Optional<VideoTrack> getLocalVideoTrack() {
return Optional.fromNullable(this.localVideoTrack); return TrackWrapper.get(this.localVideoTrack);
} }
Optional<VideoTrack> getRemoteVideoTrack() { Optional<VideoTrack> getRemoteVideoTrack() {
@ -575,12 +583,14 @@ public class WebRTCWrapper {
void onConnectionChange(PeerConnection.PeerConnectionState newState); void onConnectionChange(PeerConnection.PeerConnectionState newState);
void onAudioDeviceChanged(AppRTCAudioManager.AudioDevice selectedAudioDevice, Set<AppRTCAudioManager.AudioDevice> availableAudioDevices); void onAudioDeviceChanged(
AppRTCAudioManager.AudioDevice selectedAudioDevice,
Set<AppRTCAudioManager.AudioDevice> availableAudioDevices);
void onRenegotiationNeeded(); void onRenegotiationNeeded();
} }
private static abstract class SetSdpObserver implements SdpObserver { private abstract static class SetSdpObserver implements SdpObserver {
@Override @Override
public void onCreateSuccess(org.webrtc.SessionDescription sessionDescription) { public void onCreateSuccess(org.webrtc.SessionDescription sessionDescription) {
@ -591,22 +601,6 @@ public class WebRTCWrapper {
public void onCreateFailure(String s) { public void onCreateFailure(String s) {
throw new IllegalStateException("Not able to use SetSdpObserver"); throw new IllegalStateException("Not able to use SetSdpObserver");
} }
}
private static abstract class CreateSdpObserver implements SdpObserver {
@Override
public void onSetSuccess() {
throw new IllegalStateException("Not able to use CreateSdpObserver");
}
@Override
public void onSetFailure(String s) {
throw new IllegalStateException("Not able to use CreateSdpObserver");
}
} }
static class InitializationException extends Exception { static class InitializationException extends Exception {
@ -625,7 +619,6 @@ public class WebRTCWrapper {
private PeerConnectionNotInitialized() { private PeerConnectionNotInitialized() {
super("initialize PeerConnection first"); super("initialize PeerConnection first");
} }
} }
private static class FailureToSetDescriptionException extends IllegalArgumentException { private static class FailureToSetDescriptionException extends IllegalArgumentException {
@ -634,20 +627,4 @@ public class WebRTCWrapper {
} }
} }
private static class CapturerChoice {
private final CameraVideoCapturer cameraVideoCapturer;
private final CameraEnumerationAndroid.CaptureFormat captureFormat;
private final Set<String> availableCameras;
private boolean isFrontCamera = false;
CapturerChoice(CameraVideoCapturer cameraVideoCapturer, CameraEnumerationAndroid.CaptureFormat captureFormat, Set<String> cameras) {
this.cameraVideoCapturer = cameraVideoCapturer;
this.captureFormat = captureFormat;
this.availableCameras = cameras;
}
int getFrameRate() {
return Math.max(captureFormat.framerate.min, Math.min(CAPTURING_MAX_FRAME_RATE, captureFormat.framerate.max));
}
}
} }