rudimentary video caputuring
This commit is contained in:
parent
bfb9a6267a
commit
339bdaea06
|
@ -15,8 +15,12 @@ import android.view.View;
|
||||||
import android.view.WindowManager;
|
import android.view.WindowManager;
|
||||||
import android.widget.Toast;
|
import android.widget.Toast;
|
||||||
|
|
||||||
|
import com.google.common.base.Optional;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
|
|
||||||
|
import org.webrtc.SurfaceViewRenderer;
|
||||||
|
import org.webrtc.VideoTrack;
|
||||||
|
|
||||||
import java.lang.ref.WeakReference;
|
import java.lang.ref.WeakReference;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -37,8 +41,6 @@ import rocks.xmpp.addr.Jid;
|
||||||
import static eu.siacs.conversations.utils.PermissionUtils.getFirstDenied;
|
import static eu.siacs.conversations.utils.PermissionUtils.getFirstDenied;
|
||||||
import static java.util.Arrays.asList;
|
import static java.util.Arrays.asList;
|
||||||
|
|
||||||
//TODO if last state was BUSY (or RETRY); we want to reset action to view or something so we don’t automatically call again on recreate
|
|
||||||
|
|
||||||
public class RtpSessionActivity extends XmppActivity implements XmppConnectionService.OnJingleRtpConnectionUpdate {
|
public class RtpSessionActivity extends XmppActivity implements XmppConnectionService.OnJingleRtpConnectionUpdate {
|
||||||
|
|
||||||
private static final String PROXIMITY_WAKE_LOCK_TAG = "conversations:in-rtp-session";
|
private static final String PROXIMITY_WAKE_LOCK_TAG = "conversations:in-rtp-session";
|
||||||
|
@ -53,6 +55,7 @@ public class RtpSessionActivity extends XmppActivity implements XmppConnectionSe
|
||||||
public static final String ACTION_MAKE_VOICE_CALL = "action_make_voice_call";
|
public static final String ACTION_MAKE_VOICE_CALL = "action_make_voice_call";
|
||||||
public static final String ACTION_MAKE_VIDEO_CALL = "action_make_video_call";
|
public static final String ACTION_MAKE_VIDEO_CALL = "action_make_video_call";
|
||||||
|
|
||||||
|
|
||||||
private WeakReference<JingleRtpConnection> rtpConnectionReference;
|
private WeakReference<JingleRtpConnection> rtpConnectionReference;
|
||||||
|
|
||||||
private ActivityRtpSessionBinding binding;
|
private ActivityRtpSessionBinding binding;
|
||||||
|
@ -284,6 +287,30 @@ public class RtpSessionActivity extends XmppActivity implements XmppConnectionSe
|
||||||
setIntent(intent);
|
setIntent(intent);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void updateVideoViews() {
|
||||||
|
final Optional<VideoTrack> localVideoTrack = requireRtpConnection().geLocalVideoTrack();
|
||||||
|
if (localVideoTrack.isPresent()) {
|
||||||
|
try {
|
||||||
|
binding.localVideo.init(requireRtpConnection().getEglBaseContext(), null);
|
||||||
|
} catch (IllegalStateException e) {
|
||||||
|
Log.d(Config.LOGTAG,"ignoring already init for now",e);
|
||||||
|
}
|
||||||
|
binding.localVideo.setEnableHardwareScaler(true);
|
||||||
|
binding.localVideo.setMirror(true);
|
||||||
|
localVideoTrack.get().addSink(binding.localVideo);
|
||||||
|
}
|
||||||
|
final Optional<VideoTrack> remoteVideoTrack = requireRtpConnection().getRemoteVideoTrack();
|
||||||
|
if (remoteVideoTrack.isPresent()) {
|
||||||
|
try {
|
||||||
|
binding.remoteVideo.init(requireRtpConnection().getEglBaseContext(), null);
|
||||||
|
} catch (IllegalStateException e) {
|
||||||
|
Log.d(Config.LOGTAG,"ignoring already init for now",e);
|
||||||
|
}
|
||||||
|
binding.remoteVideo.setEnableHardwareScaler(true);
|
||||||
|
remoteVideoTrack.get().addSink(binding.remoteVideo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void updateStateDisplay(final RtpEndUserState state) {
|
private void updateStateDisplay(final RtpEndUserState state) {
|
||||||
switch (state) {
|
switch (state) {
|
||||||
case INCOMING_CALL:
|
case INCOMING_CALL:
|
||||||
|
@ -498,6 +525,7 @@ public class RtpSessionActivity extends XmppActivity implements XmppConnectionSe
|
||||||
runOnUiThread(() -> {
|
runOnUiThread(() -> {
|
||||||
updateStateDisplay(state);
|
updateStateDisplay(state);
|
||||||
updateButtonConfiguration(state);
|
updateButtonConfiguration(state);
|
||||||
|
updateVideoViews();
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
Log.d(Config.LOGTAG, "received update for other rtp session");
|
Log.d(Config.LOGTAG, "received update for other rtp session");
|
||||||
|
|
|
@ -3,13 +3,16 @@ package eu.siacs.conversations.xmpp.jingle;
|
||||||
import android.os.SystemClock;
|
import android.os.SystemClock;
|
||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
|
|
||||||
|
import com.google.common.base.Optional;
|
||||||
import com.google.common.base.Strings;
|
import com.google.common.base.Strings;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.primitives.Ints;
|
import com.google.common.primitives.Ints;
|
||||||
|
|
||||||
|
import org.webrtc.EglBase;
|
||||||
import org.webrtc.IceCandidate;
|
import org.webrtc.IceCandidate;
|
||||||
import org.webrtc.PeerConnection;
|
import org.webrtc.PeerConnection;
|
||||||
|
import org.webrtc.VideoTrack;
|
||||||
|
|
||||||
import java.util.ArrayDeque;
|
import java.util.ArrayDeque;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
@ -986,6 +989,19 @@ public class JingleRtpConnection extends AbstractJingleConnection implements Web
|
||||||
return this.state;
|
return this.state;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Optional<VideoTrack> geLocalVideoTrack() {
|
||||||
|
return webRTCWrapper.getLocalVideoTrack();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Optional<VideoTrack> getRemoteVideoTrack() {
|
||||||
|
return webRTCWrapper.getRemoteVideoTrack();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public EglBase.Context getEglBaseContext() {
|
||||||
|
return webRTCWrapper.getEglBaseContext();
|
||||||
|
}
|
||||||
|
|
||||||
private interface OnIceServersDiscovered {
|
private interface OnIceServersDiscovered {
|
||||||
void onIceServersDiscovered(List<PeerConnection.IceServer> iceServers);
|
void onIceServersDiscovered(List<PeerConnection.IceServer> iceServers);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,13 @@
|
||||||
package eu.siacs.conversations.xmpp.jingle;
|
package eu.siacs.conversations.xmpp.jingle;
|
||||||
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
|
import android.os.Build;
|
||||||
import android.os.Handler;
|
import android.os.Handler;
|
||||||
import android.os.Looper;
|
import android.os.Looper;
|
||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.base.Optional;
|
||||||
|
import com.google.common.base.Preconditions;
|
||||||
import com.google.common.util.concurrent.Futures;
|
import com.google.common.util.concurrent.Futures;
|
||||||
import com.google.common.util.concurrent.ListenableFuture;
|
import com.google.common.util.concurrent.ListenableFuture;
|
||||||
import com.google.common.util.concurrent.MoreExecutors;
|
import com.google.common.util.concurrent.MoreExecutors;
|
||||||
|
@ -13,11 +15,15 @@ import com.google.common.util.concurrent.SettableFuture;
|
||||||
|
|
||||||
import org.webrtc.AudioSource;
|
import org.webrtc.AudioSource;
|
||||||
import org.webrtc.AudioTrack;
|
import org.webrtc.AudioTrack;
|
||||||
import org.webrtc.Camera1Capturer;
|
|
||||||
import org.webrtc.Camera1Enumerator;
|
import org.webrtc.Camera1Enumerator;
|
||||||
|
import org.webrtc.Camera2Enumerator;
|
||||||
|
import org.webrtc.CameraEnumerator;
|
||||||
import org.webrtc.CameraVideoCapturer;
|
import org.webrtc.CameraVideoCapturer;
|
||||||
import org.webrtc.CandidatePairChangeEvent;
|
import org.webrtc.CandidatePairChangeEvent;
|
||||||
import org.webrtc.DataChannel;
|
import org.webrtc.DataChannel;
|
||||||
|
import org.webrtc.DefaultVideoDecoderFactory;
|
||||||
|
import org.webrtc.DefaultVideoEncoderFactory;
|
||||||
|
import org.webrtc.EglBase;
|
||||||
import org.webrtc.IceCandidate;
|
import org.webrtc.IceCandidate;
|
||||||
import org.webrtc.MediaConstraints;
|
import org.webrtc.MediaConstraints;
|
||||||
import org.webrtc.MediaStream;
|
import org.webrtc.MediaStream;
|
||||||
|
@ -26,7 +32,7 @@ import org.webrtc.PeerConnectionFactory;
|
||||||
import org.webrtc.RtpReceiver;
|
import org.webrtc.RtpReceiver;
|
||||||
import org.webrtc.SdpObserver;
|
import org.webrtc.SdpObserver;
|
||||||
import org.webrtc.SessionDescription;
|
import org.webrtc.SessionDescription;
|
||||||
import org.webrtc.VideoCapturer;
|
import org.webrtc.SurfaceTextureHelper;
|
||||||
import org.webrtc.VideoSource;
|
import org.webrtc.VideoSource;
|
||||||
import org.webrtc.VideoTrack;
|
import org.webrtc.VideoTrack;
|
||||||
|
|
||||||
|
@ -41,11 +47,16 @@ import eu.siacs.conversations.services.AppRTCAudioManager;
|
||||||
|
|
||||||
public class WebRTCWrapper {
|
public class WebRTCWrapper {
|
||||||
|
|
||||||
|
private final EventCallback eventCallback;
|
||||||
|
private final AppRTCAudioManager.AudioManagerEvents audioManagerEvents = new AppRTCAudioManager.AudioManagerEvents() {
|
||||||
|
@Override
|
||||||
|
public void onAudioDeviceChanged(AppRTCAudioManager.AudioDevice selectedAudioDevice, Set<AppRTCAudioManager.AudioDevice> availableAudioDevices) {
|
||||||
|
eventCallback.onAudioDeviceChanged(selectedAudioDevice, availableAudioDevices);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
private final Handler mainHandler = new Handler(Looper.getMainLooper());
|
||||||
private VideoTrack localVideoTrack = null;
|
private VideoTrack localVideoTrack = null;
|
||||||
private VideoTrack remoteVideoTrack = null;
|
private VideoTrack remoteVideoTrack = null;
|
||||||
|
|
||||||
private final EventCallback eventCallback;
|
|
||||||
|
|
||||||
private final PeerConnection.Observer peerConnectionObserver = new PeerConnection.Observer() {
|
private final PeerConnection.Observer peerConnectionObserver = new PeerConnection.Observer() {
|
||||||
@Override
|
@Override
|
||||||
public void onSignalingChange(PeerConnection.SignalingState signalingState) {
|
public void onSignalingChange(PeerConnection.SignalingState signalingState) {
|
||||||
|
@ -94,9 +105,6 @@ public class WebRTCWrapper {
|
||||||
@Override
|
@Override
|
||||||
public void onAddStream(MediaStream mediaStream) {
|
public void onAddStream(MediaStream mediaStream) {
|
||||||
Log.d(Config.LOGTAG, "onAddStream");
|
Log.d(Config.LOGTAG, "onAddStream");
|
||||||
for (AudioTrack audioTrack : mediaStream.audioTracks) {
|
|
||||||
Log.d(Config.LOGTAG, "remote? - audioTrack enabled:" + audioTrack.enabled() + " state=" + audioTrack.state());
|
|
||||||
}
|
|
||||||
final List<VideoTrack> videoTracks = mediaStream.videoTracks;
|
final List<VideoTrack> videoTracks = mediaStream.videoTracks;
|
||||||
if (videoTracks.size() > 0) {
|
if (videoTracks.size() > 0) {
|
||||||
Log.d(Config.LOGTAG, "more than zero remote video tracks found. using first");
|
Log.d(Config.LOGTAG, "more than zero remote video tracks found. using first");
|
||||||
|
@ -125,17 +133,12 @@ public class WebRTCWrapper {
|
||||||
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
private final AppRTCAudioManager.AudioManagerEvents audioManagerEvents = new AppRTCAudioManager.AudioManagerEvents() {
|
|
||||||
@Override
|
|
||||||
public void onAudioDeviceChanged(AppRTCAudioManager.AudioDevice selectedAudioDevice, Set<AppRTCAudioManager.AudioDevice> availableAudioDevices) {
|
|
||||||
eventCallback.onAudioDeviceChanged(selectedAudioDevice, availableAudioDevices);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@Nullable
|
@Nullable
|
||||||
private PeerConnection peerConnection = null;
|
private PeerConnection peerConnection = null;
|
||||||
private AudioTrack localAudioTrack = null;
|
private AudioTrack localAudioTrack = null;
|
||||||
private AppRTCAudioManager appRTCAudioManager = null;
|
private AppRTCAudioManager appRTCAudioManager = null;
|
||||||
private final Handler mainHandler = new Handler(Looper.getMainLooper());
|
private Context context = null;
|
||||||
|
private EglBase eglBase = null;
|
||||||
|
|
||||||
public WebRTCWrapper(final EventCallback eventCallback) {
|
public WebRTCWrapper(final EventCallback eventCallback) {
|
||||||
this.eventCallback = eventCallback;
|
this.eventCallback = eventCallback;
|
||||||
|
@ -145,6 +148,8 @@ public class WebRTCWrapper {
|
||||||
PeerConnectionFactory.initialize(
|
PeerConnectionFactory.initialize(
|
||||||
PeerConnectionFactory.InitializationOptions.builder(context).createInitializationOptions()
|
PeerConnectionFactory.InitializationOptions.builder(context).createInitializationOptions()
|
||||||
);
|
);
|
||||||
|
this.eglBase = EglBase.create();
|
||||||
|
this.context = context;
|
||||||
mainHandler.post(() -> {
|
mainHandler.post(() -> {
|
||||||
appRTCAudioManager = AppRTCAudioManager.create(context, AppRTCAudioManager.SpeakerPhonePreference.EARPIECE);
|
appRTCAudioManager = AppRTCAudioManager.create(context, AppRTCAudioManager.SpeakerPhonePreference.EARPIECE);
|
||||||
appRTCAudioManager.start(audioManagerEvents);
|
appRTCAudioManager.start(audioManagerEvents);
|
||||||
|
@ -153,64 +158,35 @@ public class WebRTCWrapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void initializePeerConnection(final List<PeerConnection.IceServer> iceServers) throws InitializationException {
|
public void initializePeerConnection(final List<PeerConnection.IceServer> iceServers) throws InitializationException {
|
||||||
PeerConnectionFactory peerConnectionFactory = PeerConnectionFactory.builder().createPeerConnectionFactory();
|
Preconditions.checkState(this.eglBase != null);
|
||||||
|
PeerConnectionFactory peerConnectionFactory = PeerConnectionFactory.builder()
|
||||||
|
.setVideoDecoderFactory(new DefaultVideoDecoderFactory(eglBase.getEglBaseContext()))
|
||||||
|
.setVideoEncoderFactory(new DefaultVideoEncoderFactory(eglBase.getEglBaseContext(), true, true))
|
||||||
|
.createPeerConnectionFactory();
|
||||||
|
|
||||||
CameraVideoCapturer capturer = null;
|
|
||||||
Camera1Enumerator camera1Enumerator = new Camera1Enumerator();
|
|
||||||
for (String deviceName : camera1Enumerator.getDeviceNames()) {
|
|
||||||
Log.d(Config.LOGTAG, "camera device name: " + deviceName);
|
|
||||||
if (camera1Enumerator.isFrontFacing(deviceName)) {
|
|
||||||
capturer = camera1Enumerator.createCapturer(deviceName, new CameraVideoCapturer.CameraEventsHandler() {
|
|
||||||
@Override
|
|
||||||
public void onCameraError(String s) {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onCameraDisconnected() {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onCameraFreezed(String s) {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onCameraOpening(String s) {
|
|
||||||
Log.d(Config.LOGTAG, "onCameraOpening");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onFirstFrameAvailable() {
|
|
||||||
Log.d(Config.LOGTAG, "onFirstFrameAvailable");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onCameraClosed() {
|
|
||||||
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/*if (capturer != null) {
|
|
||||||
capturer.initialize();
|
|
||||||
Log.d(Config.LOGTAG,"start capturing");
|
|
||||||
capturer.startCapture(800,600,30);
|
|
||||||
}*/
|
|
||||||
|
|
||||||
final VideoSource videoSource = peerConnectionFactory.createVideoSource(false);
|
|
||||||
final VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("my-video-track", videoSource);
|
|
||||||
|
|
||||||
final AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
|
|
||||||
|
|
||||||
this.localAudioTrack = peerConnectionFactory.createAudioTrack("my-audio-track", audioSource);
|
|
||||||
final MediaStream stream = peerConnectionFactory.createLocalMediaStream("my-media-stream");
|
final MediaStream stream = peerConnectionFactory.createLocalMediaStream("my-media-stream");
|
||||||
stream.addTrack(this.localAudioTrack);
|
|
||||||
//stream.addTrack(videoTrack);
|
|
||||||
|
|
||||||
this.localVideoTrack = videoTrack;
|
final Optional<CameraVideoCapturer> optionalCapturer = getVideoCapturer();
|
||||||
|
|
||||||
|
if (optionalCapturer.isPresent()) {
|
||||||
|
final CameraVideoCapturer capturer = optionalCapturer.get();
|
||||||
|
final VideoSource videoSource = peerConnectionFactory.createVideoSource(false);
|
||||||
|
SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("webrtc", eglBase.getEglBaseContext());
|
||||||
|
capturer.initialize(surfaceTextureHelper, requireContext(), videoSource.getCapturerObserver());
|
||||||
|
capturer.startCapture(320, 240, 30);
|
||||||
|
|
||||||
|
this.localVideoTrack = peerConnectionFactory.createVideoTrack("my-video-track", videoSource);
|
||||||
|
|
||||||
|
stream.addTrack(this.localVideoTrack);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//set up audio track
|
||||||
|
final AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
|
||||||
|
this.localAudioTrack = peerConnectionFactory.createAudioTrack("my-audio-track", audioSource);
|
||||||
|
stream.addTrack(this.localAudioTrack);
|
||||||
|
|
||||||
|
|
||||||
final PeerConnection peerConnection = peerConnectionFactory.createPeerConnection(iceServers, peerConnectionObserver);
|
final PeerConnection peerConnection = peerConnectionFactory.createPeerConnection(iceServers, peerConnectionObserver);
|
||||||
if (peerConnection == null) {
|
if (peerConnection == null) {
|
||||||
|
@ -225,7 +201,7 @@ public class WebRTCWrapper {
|
||||||
public void close() {
|
public void close() {
|
||||||
final PeerConnection peerConnection = this.peerConnection;
|
final PeerConnection peerConnection = this.peerConnection;
|
||||||
if (peerConnection != null) {
|
if (peerConnection != null) {
|
||||||
peerConnection.close();
|
peerConnection.dispose();
|
||||||
}
|
}
|
||||||
final AppRTCAudioManager audioManager = this.appRTCAudioManager;
|
final AppRTCAudioManager audioManager = this.appRTCAudioManager;
|
||||||
if (audioManager != null) {
|
if (audioManager != null) {
|
||||||
|
@ -233,14 +209,6 @@ public class WebRTCWrapper {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setMicrophoneEnabled(final boolean enabled) {
|
|
||||||
final AudioTrack audioTrack = this.localAudioTrack;
|
|
||||||
if (audioTrack == null) {
|
|
||||||
throw new IllegalStateException("Local audio track does not exist (yet)");
|
|
||||||
}
|
|
||||||
audioTrack.setEnabled(enabled);
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isMicrophoneEnabled() {
|
public boolean isMicrophoneEnabled() {
|
||||||
final AudioTrack audioTrack = this.localAudioTrack;
|
final AudioTrack audioTrack = this.localAudioTrack;
|
||||||
if (audioTrack == null) {
|
if (audioTrack == null) {
|
||||||
|
@ -249,6 +217,13 @@ public class WebRTCWrapper {
|
||||||
return audioTrack.enabled();
|
return audioTrack.enabled();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setMicrophoneEnabled(final boolean enabled) {
|
||||||
|
final AudioTrack audioTrack = this.localAudioTrack;
|
||||||
|
if (audioTrack == null) {
|
||||||
|
throw new IllegalStateException("Local audio track does not exist (yet)");
|
||||||
|
}
|
||||||
|
audioTrack.setEnabled(enabled);
|
||||||
|
}
|
||||||
|
|
||||||
public ListenableFuture<SessionDescription> createOffer() {
|
public ListenableFuture<SessionDescription> createOffer() {
|
||||||
return Futures.transformAsync(getPeerConnectionFuture(), peerConnection -> {
|
return Futures.transformAsync(getPeerConnectionFuture(), peerConnection -> {
|
||||||
|
@ -261,6 +236,7 @@ public class WebRTCWrapper {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onCreateFailure(String s) {
|
public void onCreateFailure(String s) {
|
||||||
|
Log.d(Config.LOGTAG, "create failure" + s);
|
||||||
future.setException(new IllegalStateException("Unable to create offer: " + s));
|
future.setException(new IllegalStateException("Unable to create offer: " + s));
|
||||||
}
|
}
|
||||||
}, new MediaConstraints());
|
}, new MediaConstraints());
|
||||||
|
@ -297,6 +273,7 @@ public class WebRTCWrapper {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onSetFailure(String s) {
|
public void onSetFailure(String s) {
|
||||||
|
Log.d(Config.LOGTAG, "unable to set local " + s);
|
||||||
future.setException(new IllegalArgumentException("unable to set local session description: " + s));
|
future.setException(new IllegalArgumentException("unable to set local session description: " + s));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -338,10 +315,45 @@ public class WebRTCWrapper {
|
||||||
requirePeerConnection().addIceCandidate(iceCandidate);
|
requirePeerConnection().addIceCandidate(iceCandidate);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private CameraEnumerator getCameraEnumerator() {
|
||||||
|
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
|
||||||
|
return new Camera2Enumerator(requireContext());
|
||||||
|
} else {
|
||||||
|
return new Camera1Enumerator();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Optional<CameraVideoCapturer> getVideoCapturer() {
|
||||||
|
final CameraEnumerator enumerator = getCameraEnumerator();
|
||||||
|
final String[] deviceNames = enumerator.getDeviceNames();
|
||||||
|
for (String deviceName : deviceNames) {
|
||||||
|
if (enumerator.isFrontFacing(deviceName)) {
|
||||||
|
return Optional.fromNullable(enumerator.createCapturer(deviceName, null));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (deviceNames.length == 0) {
|
||||||
|
return Optional.absent();
|
||||||
|
} else {
|
||||||
|
return Optional.fromNullable(enumerator.createCapturer(deviceNames[0], null));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public PeerConnection.PeerConnectionState getState() {
|
public PeerConnection.PeerConnectionState getState() {
|
||||||
return requirePeerConnection().connectionState();
|
return requirePeerConnection().connectionState();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public EglBase.Context getEglBaseContext() {
|
||||||
|
return this.eglBase.getEglBaseContext();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Optional<VideoTrack> getLocalVideoTrack() {
|
||||||
|
return Optional.fromNullable(this.localVideoTrack);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Optional<VideoTrack> getRemoteVideoTrack() {
|
||||||
|
return Optional.fromNullable(this.remoteVideoTrack);
|
||||||
|
}
|
||||||
|
|
||||||
private PeerConnection requirePeerConnection() {
|
private PeerConnection requirePeerConnection() {
|
||||||
final PeerConnection peerConnection = this.peerConnection;
|
final PeerConnection peerConnection = this.peerConnection;
|
||||||
if (peerConnection == null) {
|
if (peerConnection == null) {
|
||||||
|
@ -350,10 +362,26 @@ public class WebRTCWrapper {
|
||||||
return peerConnection;
|
return peerConnection;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Context requireContext() {
|
||||||
|
final Context context = this.context;
|
||||||
|
if (context == null) {
|
||||||
|
throw new IllegalStateException("call setup first");
|
||||||
|
}
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
public AppRTCAudioManager getAudioManager() {
|
public AppRTCAudioManager getAudioManager() {
|
||||||
return appRTCAudioManager;
|
return appRTCAudioManager;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public interface EventCallback {
|
||||||
|
void onIceCandidate(IceCandidate iceCandidate);
|
||||||
|
|
||||||
|
void onConnectionChange(PeerConnection.PeerConnectionState newState);
|
||||||
|
|
||||||
|
void onAudioDeviceChanged(AppRTCAudioManager.AudioDevice selectedAudioDevice, Set<AppRTCAudioManager.AudioDevice> availableAudioDevices);
|
||||||
|
}
|
||||||
|
|
||||||
private static abstract class SetSdpObserver implements SdpObserver {
|
private static abstract class SetSdpObserver implements SdpObserver {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -389,12 +417,4 @@ public class WebRTCWrapper {
|
||||||
super(message);
|
super(message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public interface EventCallback {
|
|
||||||
void onIceCandidate(IceCandidate iceCandidate);
|
|
||||||
|
|
||||||
void onConnectionChange(PeerConnection.PeerConnectionState newState);
|
|
||||||
|
|
||||||
void onAudioDeviceChanged(AppRTCAudioManager.AudioDevice selectedAudioDevice, Set<AppRTCAudioManager.AudioDevice> availableAudioDevices);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
android:background="?color_background_secondary">
|
android:background="?color_background_secondary">
|
||||||
|
|
||||||
<android.support.design.widget.AppBarLayout
|
<android.support.design.widget.AppBarLayout
|
||||||
|
android:id="@+id/app_bar_layout"
|
||||||
android:layout_width="match_parent"
|
android:layout_width="match_parent"
|
||||||
android:layout_height="wrap_content"
|
android:layout_height="wrap_content"
|
||||||
android:paddingLeft="16dp"
|
android:paddingLeft="16dp"
|
||||||
|
@ -36,6 +37,20 @@
|
||||||
</android.support.design.widget.AppBarLayout>
|
</android.support.design.widget.AppBarLayout>
|
||||||
|
|
||||||
|
|
||||||
|
<org.webrtc.SurfaceViewRenderer
|
||||||
|
android:id="@+id/local_video"
|
||||||
|
android:layout_width="100dp"
|
||||||
|
android:layout_height="100dp"
|
||||||
|
android:layout_below="@+id/app_bar_layout"
|
||||||
|
android:layout_alignParentRight="true"/>
|
||||||
|
|
||||||
|
<org.webrtc.SurfaceViewRenderer
|
||||||
|
android:id="@+id/remote_video"
|
||||||
|
android:layout_width="100dp"
|
||||||
|
android:layout_height="100dp"
|
||||||
|
android:layout_below="@+id/app_bar_layout"
|
||||||
|
android:layout_alignParentLeft="true"/>
|
||||||
|
|
||||||
<RelativeLayout
|
<RelativeLayout
|
||||||
android:layout_width="288dp"
|
android:layout_width="288dp"
|
||||||
android:layout_height="wrap_content"
|
android:layout_height="wrap_content"
|
||||||
|
|
Loading…
Reference in a new issue