IEnumerator GetOffer() { var op = signaling.GetOffer(sessionId, lastTimeGetOfferRequest); yield return(op); if (op.webRequest.isNetworkError) { Debug.LogError($"Network Error: {op.webRequest.error}"); yield break; } var date = DateTimeExtension.ParseHttpDate(op.webRequest.GetResponseHeader("Date")); lastTimeGetOfferRequest = date.ToJsMilliseconds(); var obj = op.webRequest.DownloadHandlerJson <OfferResDataList>().GetObject(); if (obj == null) { yield break; } foreach (var offer in obj.offers) { RTCSessionDescription _desc; _desc.type = RTCSdpType.Offer; _desc.sdp = offer.sdp; var connectionId = offer.connectionId; if (pcs.ContainsKey(connectionId)) { continue; } var pc = new RTCPeerConnection(); pc.OnStatsDelivered = (data) => Debug.Log(data); pcs.Add(offer.connectionId, pc); pc.OnDataChannel = new DelegateOnDataChannel(channel => { OnDataChannel(pc, channel); }); pc.SetConfiguration(ref conf); pc.OnIceCandidate = new DelegateOnIceCandidate(candidate => { StartCoroutine(OnIceCandidate(offer.connectionId, candidate)); }); pc.OnIceConnectionChange = new DelegateOnIceConnectionChange(state => { if (state == RTCIceConnectionState.Disconnected) { pc.Close(); } }); //make video bit rate starts at 16000kbits, and 160000kbits at max. string pattern = @"(a=fmtp:\d+ .*level-asymmetry-allowed=.*)\r\n"; _desc.sdp = Regex.Replace(_desc.sdp, pattern, "$1;x-google-start-bitrate=16000;x-google-max-bitrate=160000\r\n"); pc.SetRemoteDescription(ref _desc); foreach (var track in videoStream.GetTracks()) { pc.AddTrack(track); } foreach (var track in audioStream.GetTracks()) { pc.AddTrack(track); } StartCoroutine(Answer(connectionId)); } }
private void AddTracks() { pc1Senders.Add(_pc1.AddTrack(videoStreamTrack)); pc1Senders.Add(_pc1.AddTrack(audioStreamTrack)); addTracksButton.interactable = false; removeTracksButton.interactable = true; }
private void AddLocalMediaTracks() { Debug.WriteLine("Adding local media tracks."); PeerConnection.AddTrack(_selfVideoTrack); PeerConnection.AddTrack(_selfAudioTrack); OnAddLocalTrack?.Invoke(_selfVideoTrack); OnAddLocalTrack?.Invoke(_selfAudioTrack); }
/// <summary> /// WebRTC Establishment: Builds the Peer Connection. /// </summary> /// <param name="mediaOptions"></param> /// <returns></returns> private async Task <RTCPeerConnection> BuildPeerConnection(MediaOptions mediaOptions) { return(await Task.Run(() => { var factory = new WebRtcFactory(new WebRtcFactoryConfiguration()); var peerConnection = new RTCPeerConnection( new RTCConfiguration() { Factory = factory, BundlePolicy = RTCBundlePolicy.Balanced, IceTransportPolicy = RTCIceTransportPolicy.All }); peerConnection.OnTrack += this.OnTrack; peerConnection.OnRemoveTrack += this.OnRemoveTrack; if ((mediaOptions.SendVideo || mediaOptions.LocalLoopback) && this.LocalVideoTrack == null) { if (mediaOptions.SendVideo || mediaOptions.LocalLoopback) { this.LocalVideoTrack = this.GetLocalVideo(factory); } } if (mediaOptions.SendAudio && this.LocalAudioTrack == null) { if (mediaOptions.SendAudio) { this.LocalAudioTrack = this.GetLocalAudio(factory); } } if (mediaOptions.SendVideo) { peerConnection.AddTrack(this.LocalVideoTrack); } if (mediaOptions.SendAudio) { peerConnection.AddTrack(this.LocalAudioTrack); } if (mediaOptions.LocalLoopback) { this.LocalVideoTrack.Element = MediaElementMaker.Bind(this.LocalVideo); } return peerConnection; })); }
private void AddTracks() { foreach (var track in videoStream.GetTracks()) { pc1Senders.Add(_pc1.AddTrack(track, videoStream)); } if (WebRTCSettings.UseVideoCodec != null) { var codecs = new[] { WebRTCSettings.UseVideoCodec }; foreach (var transceiver in _pc1.GetTransceivers()) { if (pc1Senders.Contains(transceiver.Sender)) { transceiver.SetCodecPreferences(codecs); } } } if (!videoUpdateStarted) { StartCoroutine(WebRTC.Update()); videoUpdateStarted = true; } }
private void createPC(string id, bool isCaller = false) { Debug.Log($"[Create PC] id:{id}"); var pc = new RTCPeerConnection(ref conf); pcs.Add(id, pc); pc.OnIceCandidate = cand => { //Debug.Log($"onIceCandidate: id:{id}"); signalingServer.Send(id, cand); }; pc.OnTrack = evt => { Debug.Log($"onTrack: id:{id}"); var vTrack = evt.Track as VideoStreamTrack; var ri = Instantiate(receiveImagePrefab); ri.transform.position = new Vector3(xPos++, 0, 0); ri.GetComponent <Renderer>().material.mainTexture = vTrack.InitializeReceiver(1920, 1080); }; var sendTrack = cam.CaptureStreamTrack(1920, 1080, 2000000); pc.AddTrack(sendTrack); if (isCaller) { createDesc(id, RTCSdpType.Offer); } }
private void AddTracks() { var pc1VideoSenders = new List <RTCRtpSender>(); foreach (var track in sendStream.GetTracks()) { var sender = _pc1.AddTrack(track, sendStream); if (track.Kind == TrackKind.Video) { pc1VideoSenders.Add(sender); } } if (WebRTCSettings.UseVideoCodec != null) { var codecs = new[] { WebRTCSettings.UseVideoCodec }; foreach (var transceiver in _pc1.GetTransceivers()) { if (pc1VideoSenders.Contains(transceiver.Sender)) { transceiver.SetCodecPreferences(codecs); } } } }
public IEnumerator MediaStreamTrackThrowExceptionAfterPeerDisposed() { RTCConfiguration config = default; config.iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } }; var peer1 = new RTCPeerConnection(ref config); var peer2 = new RTCPeerConnection(ref config); peer1.OnIceCandidate = candidate => { peer2.AddIceCandidate(candidate); }; peer2.OnIceCandidate = candidate => { peer1.AddIceCandidate(candidate); }; AudioStreamTrack track = new AudioStreamTrack(); peer1.AddTrack(track); MediaStreamTrack track1 = null; peer2.OnTrack = e => { track1 = e.Track; }; yield return(SignalingOffer(peer1, peer2)); Assert.That(track1, Is.Not.Null); peer2.Dispose(); Assert.That(() => track1.Id, Throws.TypeOf <InvalidOperationException>()); track.Dispose(); track1.Dispose(); }
public IEnumerator TransceiverReturnsSender() { RTCConfiguration config = default; config.iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } }; var peer1 = new RTCPeerConnection(ref config); var peer2 = new RTCPeerConnection(ref config); peer1.OnIceCandidate = candidate => { peer2.AddIceCandidate(candidate); }; peer2.OnIceCandidate = candidate => { peer1.AddIceCandidate(candidate); }; AudioStreamTrack track1 = new AudioStreamTrack(); peer1.AddTrack(track1); yield return(SignalingOffer(peer1, peer2)); Assert.That(peer2.GetTransceivers().Count(), Is.EqualTo(1)); RTCRtpSender sender1 = peer2.GetTransceivers().First().Sender; Assert.That(sender1, Is.Not.Null); AudioStreamTrack track2 = new AudioStreamTrack(); RTCRtpSender sender2 = peer2.AddTrack(track2); Assert.That(sender2, Is.Not.Null); Assert.That(sender1, Is.EqualTo(sender2)); track1.Dispose(); track2.Dispose(); peer1.Dispose(); peer2.Dispose(); }
private void AddTracks() { foreach (var track in sendStream.GetTracks()) { _pc1.AddTrack(track, sendStream); } }
public IEnumerator MediaStreamTrackThrowExceptionAfterPeerDisposed() { RTCConfiguration config = default; config.iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } }; var peer1 = new RTCPeerConnection(ref config); var peer2 = new RTCPeerConnection(ref config); peer1.OnIceCandidate = candidate => { peer2.AddIceCandidate(candidate); }; peer2.OnIceCandidate = candidate => { peer1.AddIceCandidate(candidate); }; var obj = new GameObject("audio"); var source = obj.AddComponent <AudioSource>(); source.clip = AudioClip.Create("test", 480, 2, 48000, false); AudioStreamTrack track = new AudioStreamTrack(source); peer1.AddTrack(track); MediaStreamTrack track1 = null; peer2.OnTrack = e => { track1 = e.Track; }; yield return(SignalingOffer(peer1, peer2)); Assert.That(track1, Is.Not.Null); peer2.Dispose(); Assert.That(() => track1.Id, Throws.TypeOf <ObjectDisposedException>()); track.Dispose(); track1.Dispose(); Object.DestroyImmediate(source.clip); Object.DestroyImmediate(obj); }
private void AddTracks() { foreach (var track in videoStream.GetTracks()) { pc1Senders.Add(_pc1.AddTrack(track, videoStream)); } if (WebRTCSettings.UseVideoCodec != null) { var codecs = new[] { WebRTCSettings.UseVideoCodec }; foreach (var transceiver in _pc1.GetTransceivers()) { if (pc1Senders.Contains(transceiver.Sender)) { transceiver.SetCodecPreferences(codecs); } } } if (!videoUpdateStarted) { StartCoroutine(WebRTC.Update()); StartCoroutine(LoopStatsCoroutine()); videoUpdateStarted = true; } bandwidthSelector.interactable = false; scaleResolutionDownSelector.interactable = false; }
public IEnumerator SetLocalDescriptionFailed() { var peer = new RTCPeerConnection(); var stream = new MediaStream(); var track = new AudioStreamTrack("audio"); var sender = peer.AddTrack(track, stream); RTCOfferOptions options = default; var op = peer.CreateOffer(ref options); yield return(op); Assert.True(op.IsDone); Assert.False(op.IsError); var desc = op.Desc; // change sdp to cannot parse desc.sdp = desc.sdp.Replace("m=audio", "m=audiable"); var op2 = peer.SetLocalDescription(ref desc); yield return(op2); Assert.True(op2.IsDone); Assert.True(op2.IsError); Assert.IsNotEmpty(op2.Error.message); peer.RemoveTrack(sender); track.Dispose(); stream.Dispose(); peer.Close(); peer.Dispose(); }
private void AddTracks() { foreach (var track in audioStream.GetTracks()) { pc1Senders.Add(peerConnection.AddTrack(track, audioStream)); } foreach (var track in videoStream.GetTracks()) { pc1Senders.Add(peerConnection.AddTrack(track, videoStream)); } if (!videoUpdateStarted) { StartCoroutine(WebRTC.Update()); videoUpdateStarted = true; } }
private void Call() { Debug.Log("Starting calls"); pcLocal = new RTCPeerConnection(ref configuration); pcRemote = new RTCPeerConnection(ref configuration); pcRemote.OnTrack = e => receiveVideoStream.AddTrack(e.Track); pcLocal.OnIceCandidate = candidate => pcRemote.AddIceCandidate(candidate); pcRemote.OnIceCandidate = candidate => pcLocal.AddIceCandidate(candidate); Debug.Log("pc1: created local and remote peer connection object"); foreach (var track in sourceVideoStream.GetTracks()) { pcLocal.AddTrack(track, sourceVideoStream); } Debug.Log("Adding local stream to pcLocal"); callButton.interactable = false; createOfferButton.interactable = true; createAnswerButton.interactable = true; setOfferButton.interactable = true; setAnswerButton.interactable = true; hangUpButton.interactable = true; }
private void Call() { Debug.Log("Starting calls"); pc1Local = new RTCPeerConnection(ref configuration); pc1Remote = new RTCPeerConnection(ref configuration); pc1Remote.OnTrack = e => receiveVideoStream1.AddTrack(e.Track); pc1Local.OnIceCandidate = candidate => pc1Remote.AddIceCandidate(ref candidate); pc1Remote.OnIceCandidate = candidate => pc1Local.AddIceCandidate(ref candidate); Debug.Log("pc1: created local and remote peer connection object"); pc2Local = new RTCPeerConnection(ref configuration); pc2Remote = new RTCPeerConnection(ref configuration); pc2Remote.OnTrack = e => receiveVideoStream2.AddTrack(e.Track); pc2Local.OnIceCandidate = candidate => pc2Remote.AddIceCandidate(ref candidate); pc2Remote.OnIceCandidate = candidate => pc2Local.AddIceCandidate(ref candidate); Debug.Log("pc2: created local and remote peer connection object"); foreach (var track in sourceVideoStream.GetTracks()) { pc1Local.AddTrack(track, sourceVideoStream); pc2Local.AddTrack(track, sourceVideoStream); } Debug.Log("Adding local stream to pc1Local/pc2Local"); StartCoroutine(NegotiationPeer(pc1Local, pc1Remote)); StartCoroutine(NegotiationPeer(pc2Local, pc2Remote)); callButton.interactable = false; hangUpButton.interactable = true; }
private void AddTracks() { pc1Senders.Add(_pc1.AddTrack(track)); if (WebRTCSettings.UseVideoCodec != null) { var codecs = new[] { WebRTCSettings.UseVideoCodec }; foreach (var transceiver in _pc1.GetTransceivers()) { if (pc1Senders.Contains(transceiver.Sender)) { transceiver.SetCodecPreferences(codecs); } } } if (!videoUpdateStarted) { StartCoroutine(WebRTC.Update()); videoUpdateStarted = true; } addTracksButton.interactable = false; removeTracksButton.interactable = true; }
public IEnumerator UnitySetUp() { WebRTC.WebRTC.Initialize(); RTCConfiguration config = default; RTCIceCandidate candidate_ = null; config.iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } }; var peer1 = new RTCPeerConnection(ref config); var peer2 = new RTCPeerConnection(ref config); peer1.OnIceCandidate = candidate => { candidate_ = candidate; }; MediaStream stream = WebRTC.Audio.CaptureStream(); peer1.AddTrack(stream.GetTracks().First()); RTCOfferOptions offerOptions = new RTCOfferOptions(); var op1 = peer1.CreateOffer(ref offerOptions); yield return(op1); m_DescOffer = op1.Desc; var op2 = peer1.SetLocalDescription(ref m_DescOffer); yield return(op2); var op3 = peer2.SetRemoteDescription(ref m_DescOffer); yield return(op3); RTCAnswerOptions answerOptions = new RTCAnswerOptions(); var op4 = peer2.CreateAnswer(ref answerOptions); yield return(op4); m_DescAnswer = op4.Desc; var op5 = peer2.SetLocalDescription(ref m_DescAnswer); yield return(op5); var op6 = peer1.SetRemoteDescription(ref m_DescAnswer); yield return(op6); yield return(new WaitUntil(() => candidate_ != null)); m_candidate = candidate_; stream.Dispose(); peer1.Close(); peer2.Close(); m_Context = SynchronizationContext.Current; signaling1 = CreateSignaling(m_SignalingType, m_Context); signaling2 = CreateSignaling(m_SignalingType, m_Context); }
private void AddTracks() { foreach (var track in videoStream.GetTracks()) { pc1Senders.Add(_pc1.AddTrack(track, videoStream)); } if (!videoUpdateStarted) { StartCoroutine(WebRTC.Update()); videoUpdateStarted = true; } RTCRtpCodecCapability[] codecs = null; if (codecSelector.value == 0) { codecs = RTCRtpSender.GetCapabilities(TrackKind.Video).codecs; } else { RTCRtpCodecCapability preferredCodec = availableCodecs[codecSelector.value - 1]; codecs = new[] { preferredCodec }; } RTCRtpTransceiver transceiver = _pc1.GetTransceivers().First(); RTCErrorType error = transceiver.SetCodecPreferences(codecs); if (error != RTCErrorType.None) { Debug.LogErrorFormat("RTCRtpTransceiver.SetCodecPreferences failed. {0}", error); } }
public void AddTrack() { var peer = new RTCPeerConnection(); var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var track = new VideoStreamTrack(rt); var sender = peer.AddTrack(track); Assert.That(sender, Is.Not.Null); Assert.That(track, Is.EqualTo(sender.Track)); RTCRtpSendParameters parameters = sender.GetParameters(); Assert.That(parameters, Is.Not.Null); Assert.That(parameters.encodings, Is.Empty); track.Dispose(); peer.Dispose(); Object.DestroyImmediate(rt); }
void AddTracks(string connectionId, RTCPeerConnection pc) { // ToDo: need webrtc package version 2.3 // foreach (var transceiver in pc.GetTransceivers() // .Where(x => x.Receiver.Track.Kind == TrackKind.Video) // .Select((x, index) => new {x, index}) // .Take(m_listVideoStreamTrack.Count)) // { // RTCRtpSender sender = transceiver.x.Sender; // VideoStreamTrack track = m_listVideoStreamTrack[transceiver.index]; // transceiver.x.Sender.ReplaceTrack(track); // transceiver.x.Direction = RTCRtpTransceiverDirection.SendOnly; // // if (!m_mapTrackAndSenderList.TryGetValue(track, out List<RTCRtpSender> list)) // { // list = new List<RTCRtpSender>(); // m_mapTrackAndSenderList.Add(track, list); // } // // list.Add(sender); // } foreach (var track in m_listVideoStreamTrack) { RTCRtpSender sender = pc.AddTrack(track); if (!m_mapTrackAndSenderList.TryGetValue(track, out List <RTCRtpSender> list)) { list = new List <RTCRtpSender>(); m_mapTrackAndSenderList.Add(track, list); } list.Add(sender); } foreach (var track in m_audioStream.GetTracks()) { RTCRtpSender sender = pc.AddTrack(track); if (!m_mapTrackAndSenderList.TryGetValue(track, out List <RTCRtpSender> list)) { list = new List <RTCRtpSender>(); m_mapTrackAndSenderList.Add(track, list); } list.Add(sender); } }
private void AddTracks() { var videoSender = _pc1.AddTrack(videoStreamTrack); pc1Senders.Add(videoSender); pc1Senders.Add(_pc1.AddTrack(audioStreamTrack)); if (WebRTCSettings.UseVideoCodec != null) { var codecs = new[] { WebRTCSettings.UseVideoCodec }; var transceiver = _pc1.GetTransceivers().First(t => t.Sender == videoSender); transceiver.SetCodecPreferences(codecs); } addTracksButton.interactable = false; removeTracksButton.interactable = true; }
private void OnTrack(RTCPeerConnection pc, RTCTrackEvent e) { pc2Senders.Add(pc.AddTrack(e.Track, videoStream)); trackInfos.Append($"{GetName(pc)} receives remote track:\r\n"); trackInfos.Append($"Track kind: {e.Track.Kind}\r\n"); trackInfos.Append($"Track id: {e.Track.Id}\r\n"); infoText.text = trackInfos.ToString(); }
public void AddTrack() { sender = offerPc.AddTrack(SendVideoTrack); if (videoCodec != null) { var transceiver = offerPc.GetTransceivers().First(t => t.Sender == sender); transceiver.SetCodecPreferences(new[] { videoCodec }); } }
public IEnumerator RemoteOnRemoveTrack() { RTCConfiguration config = default; config.iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } }; var peer1 = new RTCPeerConnection(ref config); var peer2 = new RTCPeerConnection(ref config); peer1.OnIceCandidate = candidate => { peer2.AddIceCandidate(candidate); }; peer2.OnIceCandidate = candidate => { peer1.AddIceCandidate(candidate); }; var stream = new MediaStream(); MediaStream receiveStream = null; var track = new AudioStreamTrack(); stream.AddTrack(track); RTCRtpSender sender = peer1.AddTrack(track, stream); bool isInvokeNegotiationNeeded1 = false; peer1.OnNegotiationNeeded = () => isInvokeNegotiationNeeded1 = true; bool isInvokeOnRemoveTrack = false; peer2.OnTrack = e => { Assert.That(e.Streams, Has.Count.EqualTo(1)); receiveStream = e.Streams.First(); receiveStream.OnRemoveTrack = ev => isInvokeOnRemoveTrack = true; }; yield return(SignalingOffer(peer1, peer2)); peer1.RemoveTrack(sender); var op9 = new WaitUntilWithTimeout(() => isInvokeNegotiationNeeded1, 5000); yield return(op9); Assert.That(op9.IsCompleted, Is.True); yield return(SignalingOffer(peer1, peer2)); var op10 = new WaitUntilWithTimeout(() => isInvokeOnRemoveTrack, 5000); yield return(op10); Assert.That(op10.IsCompleted, Is.True); stream.Dispose(); receiveStream.Dispose(); track.Dispose(); peer1.Dispose(); peer2.Dispose(); }
void setupPeer(string clientId) { pc = new RTCPeerConnection(ref conf); pc.OnIceCandidate = candidate => { Debug.Log($"onIceCandidate: candidate:{candidate.candidate}, sdpMid:{candidate.sdpMid}, sdpMLineIndex:{candidate.sdpMLineIndex}"); wsMessageHandler.SendIceCandidate(candidate.candidate, candidate.sdpMid, candidate.sdpMLineIndex, clientId); }; foreach (var track in videoStream.GetTracks()) { pc.AddTrack(track); } foreach (var track in audioStream.GetTracks()) { pc.AddTrack(track); } StartCoroutine(proccessOffer(clientId)); }
private void AddTracks() { foreach (var track in audioStream.GetTracks()) { pc1Senders.Add(_pc1.AddTrack(track, audioStream)); } foreach (var track in videoStream.GetTracks()) { pc1Senders.Add(_pc1.AddTrack(track, videoStream)); } if (!videoUpdateStarted) { StartCoroutine(WebRTC.Update()); videoUpdateStarted = true; } addTracksButton.interactable = false; removeTracksButton.interactable = true; }
public IEnumerator VideoReceive() { const int width = 256; const int height = 256; var config = new RTCConfiguration { iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } } }; var pc1 = new RTCPeerConnection(ref config); var pc2 = new RTCPeerConnection(ref config); VideoStreamTrack receiveVideoTrack = null; Texture receiveImage = null; pc2.OnTrack = e => { if (e.Track is VideoStreamTrack track && !track.IsDecoderInitialized) { receiveVideoTrack = track; receiveImage = track.InitializeReceiver(width, height); } }; var camObj = new GameObject("Camera"); var cam = camObj.AddComponent <Camera>(); cam.backgroundColor = Color.red; var sendVideoTrack = cam.CaptureStreamTrack(width, height, 1000000); yield return(new WaitForSeconds(0.1f)); pc1.AddTrack(sendVideoTrack); yield return(SignalingPeers(pc1, pc2)); yield return(new WaitUntil(() => receiveVideoTrack != null && receiveVideoTrack.IsDecoderInitialized)); Assert.That(receiveImage, Is.Not.Null); sendVideoTrack.Update(); yield return(new WaitForSeconds(0.1f)); receiveVideoTrack.UpdateReceiveTexture(); yield return(new WaitForSeconds(0.1f)); receiveVideoTrack.Dispose(); sendVideoTrack.Dispose(); yield return(0); pc2.Dispose(); pc1.Dispose(); Object.DestroyImmediate(camObj); }
public IEnumerator VideoReceive() { var config = new RTCConfiguration { iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } } }; var pc1 = new RTCPeerConnection(ref config); var pc2 = new RTCPeerConnection(ref config); var sendStream = new MediaStream(); var receiveStream = new MediaStream(); VideoStreamTrack receiveVideoTrack = null; RenderTexture receiveImage = null; receiveStream.OnAddTrack = e => { if (e.Track is VideoStreamTrack track) { receiveVideoTrack = track; receiveImage = receiveVideoTrack.InitializeReceiver(640, 320); } }; pc2.OnTrack = e => receiveStream.AddTrack(e.Track); var camObj = new GameObject("Camera"); var cam = camObj.AddComponent <Camera>(); cam.backgroundColor = Color.red; var sendVideoTrack = cam.CaptureStreamTrack(1280, 720, 1000000); yield return(new WaitForSeconds(0.1f)); pc1.AddTrack(sendVideoTrack, sendStream); pc2.AddTransceiver(TrackKind.Video); yield return(SignalingPeers(pc1, pc2)); yield return(new WaitUntil(() => receiveVideoTrack != null && receiveVideoTrack.IsDecoderInitialized)); Assert.NotNull(receiveImage); sendVideoTrack.Update(); yield return(new WaitForSeconds(0.1f)); receiveVideoTrack.UpdateReceiveTexture(); yield return(new WaitForSeconds(0.1f)); receiveVideoTrack.Dispose(); receiveStream.Dispose(); sendVideoTrack.Dispose(); sendStream.Dispose(); pc2.Dispose(); pc1.Dispose(); Object.DestroyImmediate(receiveImage); }
private void AddTracks() { Debug.Log("Add not added tracks"); foreach (var track in audioStreamTrackList.Where(x => !sendingSenderList.Exists(y => y.Track.Id == x.Id))) { var sender = _pc1.AddTrack(track); sendingSenderList.Add(sender); } }