/// <summary> /// Accesses the local video track, specified by /// this.selectedDevice and this.selectedProfile. /// MUST NOT BE CALLED FROM THE UI THREAD. /// </summary> /// <param name="factory"></param> /// <returns></returns> private IMediaStreamTrack getLocalVideo(IWebRtcFactory factory) { IReadOnlyList <IConstraint> mandatoryConstraints = new List <IConstraint>() { new Constraint("maxWidth", this.selectedProfile.Width.ToString()), new Constraint("minWidth", this.selectedProfile.Width.ToString()), new Constraint("maxHeight", this.selectedProfile.Height.ToString()), new Constraint("minHeight", this.selectedProfile.Height.ToString()), new Constraint("maxFrameRate", this.selectedProfile.FrameRate.ToString()), new Constraint("minFrameRate", this.selectedProfile.FrameRate.ToString()) }; IReadOnlyList <IConstraint> optionalConstraints = new List <IConstraint>(); var mediaConstraints = new MediaConstraints(mandatoryConstraints, optionalConstraints); // this will throw a very unhelpful exception if called from the UI thread var videoCapturer = VideoCapturer.Create(this.selectedDevice.Name, this.selectedDevice.Id, false); var options = new VideoOptions() { Factory = factory, Capturer = videoCapturer, Constraints = mediaConstraints }; var videoTrackSource = VideoTrackSource.Create(options); return(MediaStreamTrack.CreateVideoTrack("LocalVideo", videoTrackSource)); }
private void CreateMediaConstraintsInternal() { if (IsVideoCallEnabled) { _videoWidth = _parameters.VideoWidth; _videoHeight = _parameters.VideoHeight; _fps = _parameters.VideoFps; if (_videoWidth == 0 || _videoHeight == 0) { _videoWidth = HDVideoWidth; _videoHeight = HDVideoHeight; } if (_fps == 0) { _fps = 30; } _logger.Debug(TAG, $"Capturing format: {_videoWidth}x{_videoHeight}@{_fps}"); } _audioConstraints = new MediaConstraints(); if (_parameters.NoAudioProcessing) { _logger.Debug(TAG, "Disabling audio processing"); _audioConstraints.Mandatory.Add(AudioEchoCancellationConstraint, "false"); _audioConstraints.Mandatory.Add(AudioAutoGainControlConstraint, "false"); _audioConstraints.Mandatory.Add(AudioHighPassFilterConstraint, "false"); _audioConstraints.Mandatory.Add(AudioNoiseSuppressionConstraint, "false"); } _sdpMediaConstraints = new MediaConstraints(); _sdpMediaConstraints.Mandatory.Add("OfferToReceiveAudio", "true"); _sdpMediaConstraints.Mandatory.Add("OfferToReceiveVideo", _parameters.VideoCallEnabled ? "true" : "false"); }
public static Task <SessionDescription> CreateOfferAsync(this IPeerConnection self, MediaConstraints mediaConstraints) { var observer = new SdpObserver(); self.CreateOffer(mediaConstraints, observer); return(observer.OnCreateAsync()); }
private void CreateMediaConstraintsInternal() { _audioConstraints = new MediaConstraints(); _sdpMediaConstraints = new MediaConstraints(); _sdpMediaConstraints.Mandatory.Add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); _sdpMediaConstraints.Mandatory.Add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true")); }
public IAudioSource CreateAudioSource(MediaConstraints mediaConstraints) { var audioSource = _factory.AudioSourceWithConstraints(mediaConstraints.ToNative()); if (audioSource == null) { return(null); } return(new AudioSourceNative(audioSource)); }
public AppRTCSignalingParameters(AppRTCClient outerInstance, IList <PeerConnection.IceServer> iceServers, string gaeBaseHref, string channelToken, string postMessageUrl, bool initiator, MediaConstraints pcConstraints, MediaConstraints videoConstraints) { this.outerInstance = outerInstance; this.iceServers = iceServers; this.gaeBaseHref = gaeBaseHref; this.channelToken = channelToken; this.postMessageUrl = postMessageUrl; this.initiator = initiator; this.pcConstraints = pcConstraints; this.videoConstraints = videoConstraints; }
// Fetches |url| and fishes the signaling parameters out of the HTML via // regular expressions. // // TODO(fischman): replace this hackery with a dedicated JSON-serving URL in // apprtc so that this isn't necessary (here and in other future apps that // want to interop with apprtc). private AppRTCSignalingParameters getParametersForRoomUrl(string url) { Java.Util.Regex.Pattern fullRoomPattern = Java.Util.Regex.Pattern.Compile(".*\n *Sorry, this room is full\\..*"); string roomHtml = drainStream((new URL(url)).OpenConnection().InputStream); Matcher fullRoomMatcher = fullRoomPattern.Matcher(roomHtml); if (fullRoomMatcher.Find()) { throw new IOException("Room is full!"); } string gaeBaseHref = url.Substring(0, url.IndexOf('?')); string token = getVarValue(roomHtml, "channelToken", true); string postMessageUrl = "/message?r=" + getVarValue(roomHtml, "roomKey", true) + "&u=" + getVarValue(roomHtml, "me", true); bool initiator = getVarValue(roomHtml, "initiator", false).Equals("1"); List <PeerConnection.IceServer> iceServers = outerInstance.iceServersFromPCConfigJSON(getVarValue(roomHtml, "pcConfig", false)); bool isTurnPresent = false; foreach (PeerConnection.IceServer server in iceServers) { if (server.Uri.StartsWith("turn:")) { isTurnPresent = true; break; } } if (!isTurnPresent) { iceServers.Add(requestTurnServer(getVarValue(roomHtml, "turnUrl", true))); } MediaConstraints pcConstraints = constraintsFromJSON(getVarValue(roomHtml, "pcConstraints", false)); Log.Debug(TAG, "pcConstraints: " + pcConstraints); MediaConstraints videoConstraints = constraintsFromJSON(getVideoConstraints(getVarValue(roomHtml, "mediaConstraints", false))); Log.Debug(TAG, "videoConstraints: " + videoConstraints); return(new AppRTCSignalingParameters(outerInstance, iceServers, gaeBaseHref, token, postMessageUrl, initiator, pcConstraints, videoConstraints)); }
private MediaConstraints constraintsFromJSON(string jsonString) { if (jsonString == null) { return(null); } try { MediaConstraints constraints = new MediaConstraints(); JSONObject json = new JSONObject(jsonString); JSONObject mandatoryJSON = json.OptJSONObject("mandatory"); if (mandatoryJSON != null) { JSONArray mandatoryKeys = mandatoryJSON.Names(); if (mandatoryKeys != null) { for (int i = 0; i < mandatoryKeys.Length(); ++i) { string key = mandatoryKeys.GetString(i); string value = mandatoryJSON.GetString(key); constraints.Mandatory.Add(new MediaConstraints.KeyValuePair(key, value)); } } } JSONArray optionalJSON = json.OptJSONArray("optional"); if (optionalJSON != null) { for (int i = 0; i < optionalJSON.Length(); ++i) { JSONObject keyValueDict = optionalJSON.GetJSONObject(i); string key = keyValueDict.Names().GetString(0); string value = keyValueDict.GetString(key); constraints.Optional.Add(new MediaConstraints.KeyValuePair(key, value)); } } return(constraints); } catch (JSONException e) { throw new Exception("Error", e); } }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); Java.Lang.Thread.DefaultUncaughtExceptionHandler = new UnhandledExceptionHandler(this); Window.AddFlags(WindowManagerFlags.Fullscreen); Window.AddFlags(WindowManagerFlags.KeepScreenOn); Point displaySize = new Point(); WindowManager.DefaultDisplay.GetSize(displaySize); vsv = new VideoStreamsView(this, displaySize); SetContentView(vsv); abortUnless(PeerConnectionFactory.InitializeAndroidGlobals(this), "Failed to initializeAndroidGlobals"); AudioManager audioManager = ((AudioManager)GetSystemService(AudioService)); // TODO(fischman): figure out how to do this Right(tm) and remove the // suppression. bool isWiredHeadsetOn = audioManager.WiredHeadsetOn; audioManager.Mode = isWiredHeadsetOn ? Mode.InCall : Mode.InCommunication; audioManager.SpeakerphoneOn = !isWiredHeadsetOn; sdpMediaConstraints = new MediaConstraints(); sdpMediaConstraints.Mandatory.Add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); sdpMediaConstraints.Mandatory.Add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true")); Intent intent = Intent; if ("Android.intent.action.VIEW".Equals(intent.Action)) { connectToRoom(intent.Data.ToString()); return; } showGetRoomUI(); }
private void GetUserMedia() { Debug.WriteLine("Getting user media."); MediaDevice _selectedVideoDevice = (MediaDevice)Devices.Instance.VideoMediaDevicesList[0]; for (int i = 0; i < Devices.Instance.VideoMediaDevicesList.Count; i++) { if (Devices.Instance.VideoMediaDevicesList[i].DisplayName == (string)_localSettings.Values["SelectedCameraName"]) { _selectedVideoDevice = (MediaDevice)Devices.Instance.VideoMediaDevicesList[i]; } } List <int> widths = new List <int>(); List <int> heights = new List <int>(); List <int> frameRates = new List <int>(); foreach (var videoFormat in _selectedVideoDevice.VideoFormats) { widths.Add(videoFormat.Dimension.Width); heights.Add(videoFormat.Dimension.Height); foreach (var frameRate in videoFormat.FrameRates) { frameRates.Add(frameRate); } } // Maximum and minimum values for the selected camera IReadOnlyList <IConstraint> mandatoryConstraints = new List <IConstraint>() { new Constraint("maxWidth", widths.Max().ToString()), new Constraint("minWidth", widths.Min().ToString()), new Constraint("maxHeight", heights.Max().ToString()), new Constraint("minHeight", heights.Min().ToString()), new Constraint("maxFrameRate", frameRates.Max().ToString()), new Constraint("minFrameRate", frameRates.Min().ToString()) }; // Add optional constrains IReadOnlyList <IConstraint> optionalConstraints = new List <IConstraint>(); IMediaConstraints mediaConstraints = new MediaConstraints(mandatoryConstraints, optionalConstraints); var videoCapturer = VideoCapturer.Create(_selectedVideoDevice.DisplayName, _selectedVideoDevice.Id, false); var videoOptions = new VideoOptions(); videoOptions.Factory = _factory; videoOptions.Capturer = videoCapturer; videoOptions.Constraints = mediaConstraints; var videoTrackSource = VideoTrackSource.Create(videoOptions); _selfVideoTrack = MediaStreamTrack.CreateVideoTrack("SELF_VIDEO", videoTrackSource); var audioOptions = new AudioOptions(); audioOptions.Factory = _factory; var audioTrackSource = AudioTrackSource.Create(audioOptions); _selfAudioTrack = MediaStreamTrack.CreateAudioTrack("SELF_AUDIO", audioTrackSource); }
public IAudioSource CreateAudioSource(MediaConstraints mediaConstraints) { return(_factory.CreateAudioSource(mediaConstraints)); }
public void CreateAnswer(MediaConstraints constraints, ISdpObserver observer) { var sdpCallbacksHelper = new SdpCallbackHelper(observer); _peerConnection.AnswerForConstraints(constraints.ToNative(), sdpCallbacksHelper.CreateSdp); }
public void OfferForConstraints(MediaConstraints mediaConstraints, ISdpObserver sdpObserver) { var sdpCallbackHelper = new SdpCallbackHelper(sdpObserver); _peerConnection.OfferForConstraints(mediaConstraints.ToPlatformNative(), sdpCallbackHelper.CreateSdp); }
public IPeerConnection PeerConnectionWithConfiguration(Core.RTCConfiguration configuration, MediaConstraints constraints, IPeerConnectionDelegate peerConnectionDelegate) { var _configuration = configuration.ToPlatformNative(); var _constraints = new RTCMediaConstraints(null, new NSDictionary <NSString, NSString>(new NSString("DtlsSrtpKeyAgreement"), new NSString(configuration.EnableDtlsSrtp ? "false" : "true"))); var _peerConnection = _peerConnectionFactory.PeerConnectionWithConfiguration(_configuration, _constraints, new PlatformPeerConnectionDelegate(peerConnectionDelegate)); return(_peerConnection == null ? null : new PlatformPeerConnection(_peerConnection, configuration, this)); }
public IAudioSource AudioSourceWithConstraints(MediaConstraints mediaConstraints) { var audioSource = _peerConnectionFactory.AudioSourceWithConstraints(mediaConstraints.ToPlatformNative()); return(audioSource == null ? null : new PlatformAudioSource(audioSource)); }
public static RTCMediaConstraints ToNative(this MediaConstraints self) { return(new RTCMediaConstraints(self.Mandatory.ToNative(), self.Optional.ToNative())); }