public void AccessAfterDisposed() { var stream = new MediaStream(); stream.Dispose(); Assert.That(() => { var id = stream.Id; }, Throws.TypeOf <InvalidOperationException>()); }
private void OnHangUp() { videoUpdateStarted = false; DeleteTracks(); _pc1.Close(); _pc2.Close(); _pc1.Dispose(); _pc2.Dispose(); _pc1 = null; _pc2 = null; sendStream.Dispose(); sendStream = null; receiveStream.Dispose(); receiveStream = null; startButton.interactable = true; callButton.interactable = false; hangUpButton.interactable = false; dropDownResolution.interactable = true; dropDownFramerate.interactable = true; receiveImage.color = Color.black; }
void OnDisable() { RenderStreaming.Instance?.RemoveVideoReceiveStream(m_receiveStream); m_receiveStream.OnAddTrack = null; m_receiveStream.Dispose(); m_receiveStream = null; }
public void Construct() { var stream = new MediaStream(); Assert.That(stream, Is.Not.Null); stream.Dispose(); }
public IEnumerator ReceiverGetStreams() { var audioTrack = new AudioStreamTrack(); var stream = new MediaStream(); stream.AddTrack(audioTrack); yield return(0); var test = new MonoBehaviourTest <SignalingPeers>(); test.component.AddStream(0, stream); yield return(test); foreach (var receiver in test.component.GetPeerReceivers(1)) { Assert.That(receiver.Streams, Has.Count.EqualTo(1)); } test.component.Dispose(); foreach (var track in stream.GetTracks()) { track.Dispose(); } stream.Dispose(); Object.DestroyImmediate(test.gameObject); }
private void HangUp() { foreach (var track in sourceStream.GetTracks()) { track.Dispose(); } sourceStream.Dispose(); sourceStream = null; pc1Local.Close(); pc1Remote.Close(); pc2Local.Close(); pc2Remote.Close(); pc1Local.Dispose(); pc1Remote.Dispose(); pc2Local.Dispose(); pc2Remote.Dispose(); pc1Local = null; pc1Remote = null; pc2Local = null; pc2Remote = null; sourceImage.texture = null; sourceAudio.Stop(); sourceAudio.clip = null; receiveImage1.texture = null; receiveAudio1.Stop(); receiveAudio1.clip = null; receiveImage2.texture = null; receiveAudio2.Stop(); receiveAudio2.clip = null; startButton.interactable = true; callButton.interactable = false; hangUpButton.interactable = false; }
private void HangUp() { StopCoroutine(updateCoroutine); updateCoroutine = null; sourceVideoStream.Dispose(); sourceVideoStream = null; sourceImage.texture = null; receiveVideoStream.Dispose(); receiveVideoStream = null; receiveImage.texture = null; offerSdpInput.text = string.Empty; answerSdpInput.text = string.Empty; pcLocal.Close(); pcRemote.Close(); pcLocal.Dispose(); pcRemote.Dispose(); pcLocal = null; pcRemote = null; startButton.interactable = true; callButton.interactable = false; createOfferButton.interactable = false; createAnswerButton.interactable = false; setOfferButton.interactable = false; setAnswerButton.interactable = false; hangUpButton.interactable = false; offerSdpInput.interactable = false; answerSdpInput.interactable = false; }
public IEnumerator ReceiverGetStreams() { var obj = new GameObject("audio"); var source = obj.AddComponent <AudioSource>(); source.clip = AudioClip.Create("test", 480, 2, 48000, false); var audioTrack = new AudioStreamTrack(source); var stream = new MediaStream(); stream.AddTrack(audioTrack); yield return(0); var test = new MonoBehaviourTest <SignalingPeers>(); test.component.AddStream(0, stream); yield return(test); foreach (var receiver in test.component.GetPeerReceivers(1)) { Assert.That(receiver.Streams, Has.Count.EqualTo(1)); } test.component.Dispose(); foreach (var track in stream.GetTracks()) { track.Dispose(); } stream.Dispose(); Object.DestroyImmediate(test.gameObject); Object.DestroyImmediate(source.clip); Object.DestroyImmediate(obj); }
private void HangUp() { StopCoroutine(updateCoroutine); updateCoroutine = null; sourceVideoStream.Dispose(); sourceVideoStream = null; sourceImage.texture = null; receiveVideoStream1.Dispose(); receiveVideoStream1 = null; receiveImage1.texture = null; receiveVideoStream2.Dispose(); receiveVideoStream2 = null; receiveImage2.texture = null; pc1Local.Close(); pc1Remote.Close(); pc2Local.Close(); pc2Remote.Close(); pc1Local.Dispose(); pc1Remote.Dispose(); pc2Local.Dispose(); pc2Remote.Dispose(); pc1Local = null; pc1Remote = null; pc2Local = null; pc2Remote = null; startButton.interactable = true; callButton.interactable = false; hangUpButton.interactable = false; }
public void CreateAndDeleteMediaStream() { var stream = new MediaStream(); Assert.NotNull(stream); stream.Dispose(); }
public IEnumerator VideoStreamAddTrackAndRemoveTrack() { var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var stream = new MediaStream(); var track = new VideoStreamTrack("video", rt); // wait for the end of the initialization for encoder on the render thread. yield return(0); Assert.That(track.Kind, Is.EqualTo(TrackKind.Video)); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0)); Assert.That(stream.AddTrack(track), Is.True); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(1)); Assert.That(stream.GetVideoTracks(), Has.All.Not.Null); Assert.That(stream.RemoveTrack(track), Is.True); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0)); track.Dispose(); // wait for disposing video track. yield return(0); stream.Dispose(); Object.DestroyImmediate(rt); }
public IEnumerator UnitySetUp() { WebRTC.WebRTC.Initialize(); RTCConfiguration config = default; RTCIceCandidate candidate_ = null; config.iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } }; var peer1 = new RTCPeerConnection(ref config); var peer2 = new RTCPeerConnection(ref config); peer1.OnIceCandidate = candidate => { candidate_ = candidate; }; MediaStream stream = WebRTC.Audio.CaptureStream(); peer1.AddTrack(stream.GetTracks().First()); RTCOfferOptions offerOptions = new RTCOfferOptions(); var op1 = peer1.CreateOffer(ref offerOptions); yield return(op1); m_DescOffer = op1.Desc; var op2 = peer1.SetLocalDescription(ref m_DescOffer); yield return(op2); var op3 = peer2.SetRemoteDescription(ref m_DescOffer); yield return(op3); RTCAnswerOptions answerOptions = new RTCAnswerOptions(); var op4 = peer2.CreateAnswer(ref answerOptions); yield return(op4); m_DescAnswer = op4.Desc; var op5 = peer2.SetLocalDescription(ref m_DescAnswer); yield return(op5); var op6 = peer1.SetRemoteDescription(ref m_DescAnswer); yield return(op6); yield return(new WaitUntil(() => candidate_ != null)); m_candidate = candidate_; stream.Dispose(); peer1.Close(); peer2.Close(); m_Context = SynchronizationContext.Current; signaling1 = CreateSignaling(m_SignalingType, m_Context); signaling2 = CreateSignaling(m_SignalingType, m_Context); }
public IEnumerator ReceiverGetStreams() { var audioTrack = new AudioStreamTrack("audio"); var stream = new MediaStream(WebRTC.Context.CreateMediaStream("audiostream")); stream.AddTrack(audioTrack); yield return(0); var test = new MonoBehaviourTest <SignalingPeers>(); test.component.SetStream(stream); yield return(test); foreach (var receiver in test.component.GetReceivers(1)) { Assert.That(receiver.Streams, Has.Count.EqualTo(1)); } test.component.Dispose(); foreach (var track in stream.GetTracks()) { track.Dispose(); } stream.Dispose(); }
public IEnumerator SetLocalDescriptionFailed() { var peer = new RTCPeerConnection(); var stream = new MediaStream(); var track = new AudioStreamTrack("audio"); var sender = peer.AddTrack(track, stream); RTCOfferOptions options = default; var op = peer.CreateOffer(ref options); yield return(op); Assert.True(op.IsDone); Assert.False(op.IsError); var desc = op.Desc; // change sdp to cannot parse desc.sdp = desc.sdp.Replace("m=audio", "m=audiable"); var op2 = peer.SetLocalDescription(ref desc); yield return(op2); Assert.True(op2.IsDone); Assert.True(op2.IsError); Assert.IsNotEmpty(op2.Error.message); peer.RemoveTrack(sender); track.Dispose(); stream.Dispose(); peer.Close(); peer.Dispose(); }
public IEnumerator MediaStreamAddTrack() { var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var stream = new MediaStream(); var track = new VideoStreamTrack("video", rt); yield return(new WaitForSeconds(0.1f)); Assert.AreEqual(TrackKind.Video, track.Kind); Assert.AreEqual(0, stream.GetVideoTracks().Count()); Assert.True(stream.AddTrack(track)); Assert.AreEqual(1, stream.GetVideoTracks().Count()); Assert.NotNull(stream.GetVideoTracks().First()); Assert.True(stream.RemoveTrack(track)); Assert.AreEqual(0, stream.GetVideoTracks().Count()); track.Dispose(); yield return(new WaitForSeconds(0.1f)); stream.Dispose(); Object.DestroyImmediate(rt); }
public void RegisterDelegate() { var stream = new MediaStream(); stream.OnAddTrack = e => {}; stream.OnRemoveTrack = e => {}; stream.Dispose(); }
public IEnumerator RemoteOnRemoveTrack() { RTCConfiguration config = default; config.iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } }; var peer1 = new RTCPeerConnection(ref config); var peer2 = new RTCPeerConnection(ref config); peer1.OnIceCandidate = candidate => { peer2.AddIceCandidate(candidate); }; peer2.OnIceCandidate = candidate => { peer1.AddIceCandidate(candidate); }; var stream = new MediaStream(); MediaStream receiveStream = null; var track = new AudioStreamTrack(); stream.AddTrack(track); RTCRtpSender sender = peer1.AddTrack(track, stream); bool isInvokeNegotiationNeeded1 = false; peer1.OnNegotiationNeeded = () => isInvokeNegotiationNeeded1 = true; bool isInvokeOnRemoveTrack = false; peer2.OnTrack = e => { Assert.That(e.Streams, Has.Count.EqualTo(1)); receiveStream = e.Streams.First(); receiveStream.OnRemoveTrack = ev => isInvokeOnRemoveTrack = true; }; yield return(SignalingOffer(peer1, peer2)); peer1.RemoveTrack(sender); var op9 = new WaitUntilWithTimeout(() => isInvokeNegotiationNeeded1, 5000); yield return(op9); Assert.That(op9.IsCompleted, Is.True); yield return(SignalingOffer(peer1, peer2)); var op10 = new WaitUntilWithTimeout(() => isInvokeOnRemoveTrack, 5000); yield return(op10); Assert.That(op10.IsCompleted, Is.True); stream.Dispose(); receiveStream.Dispose(); track.Dispose(); peer1.Dispose(); peer2.Dispose(); }
void OnDisable() { RenderStreaming.Instance?.CloseConnection(connectionId); RenderStreaming.Instance?.RemoveVideoReceiveViewer(this); m_receiveStream.OnAddTrack = null; m_receiveStream.Dispose(); m_receiveStream = null; m_receiveTexture = null; }
public void EqualId() { var guid = Guid.NewGuid().ToString(); var stream = new MediaStream(WebRTC.Context.CreateMediaStream(guid)); Assert.That(stream, Is.Not.Null); Assert.That(stream.Id, Is.EqualTo(guid)); stream.Dispose(); }
public IEnumerator VideoReceive() { var config = new RTCConfiguration { iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } } }; var pc1 = new RTCPeerConnection(ref config); var pc2 = new RTCPeerConnection(ref config); var sendStream = new MediaStream(); var receiveStream = new MediaStream(); VideoStreamTrack receiveVideoTrack = null; RenderTexture receiveImage = null; receiveStream.OnAddTrack = e => { if (e.Track is VideoStreamTrack track) { receiveVideoTrack = track; receiveImage = receiveVideoTrack.InitializeReceiver(640, 320); } }; pc2.OnTrack = e => receiveStream.AddTrack(e.Track); var camObj = new GameObject("Camera"); var cam = camObj.AddComponent <Camera>(); cam.backgroundColor = Color.red; var sendVideoTrack = cam.CaptureStreamTrack(1280, 720, 1000000); yield return(new WaitForSeconds(0.1f)); pc1.AddTrack(sendVideoTrack, sendStream); pc2.AddTransceiver(TrackKind.Video); yield return(SignalingPeers(pc1, pc2)); yield return(new WaitUntil(() => receiveVideoTrack != null && receiveVideoTrack.IsDecoderInitialized)); Assert.NotNull(receiveImage); sendVideoTrack.Update(); yield return(new WaitForSeconds(0.1f)); receiveVideoTrack.UpdateReceiveTexture(); yield return(new WaitForSeconds(0.1f)); receiveVideoTrack.Dispose(); receiveStream.Dispose(); sendVideoTrack.Dispose(); sendStream.Dispose(); pc2.Dispose(); pc1.Dispose(); Object.DestroyImmediate(receiveImage); }
/// <summary> /// Dipose of unmanaged resources. /// </summary> /// <param name="disposing">Whether this is being called from Dispose or from the GC.</param> /// <remarks> /// Implements the disposing pattern. /// </remarks> protected virtual void Dispose(bool disposing) { if (disposing) { if (MediaStream != null) { MediaStream.Dispose(); MediaStream = null; } } }
public IEnumerator VideoStreamAddTrackAndRemoveTrack() { var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var stream = new MediaStream(); var track = new VideoStreamTrack(rt); bool isCalledOnAddTrack = false; bool isCalledOnRemoveTrack = false; stream.OnAddTrack = e => { Assert.That(e.Track, Is.EqualTo(track)); isCalledOnAddTrack = true; }; stream.OnRemoveTrack = e => { Assert.That(e.Track, Is.EqualTo(track)); isCalledOnRemoveTrack = true; }; // wait for the end of the initialization for encoder on the render thread. yield return(0); Assert.That(track.Kind, Is.EqualTo(TrackKind.Video)); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0)); Assert.That(stream.AddTrack(track), Is.True); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(1)); Assert.That(stream.GetVideoTracks(), Has.All.Not.Null); Assert.That(stream.RemoveTrack(track), Is.True); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0)); var op1 = new WaitUntilWithTimeout(() => isCalledOnAddTrack, 5000); yield return(op1); var op2 = new WaitUntilWithTimeout(() => isCalledOnRemoveTrack, 5000); yield return(op2); track.Dispose(); stream.Dispose(); Object.DestroyImmediate(rt); }
public void AddAndRemoveAudioStreamTrack() { var stream = new MediaStream(); var track = new AudioStreamTrack("audio"); Assert.AreEqual(TrackKind.Audio, track.Kind); Assert.AreEqual(0, stream.GetAudioTracks().Count()); Assert.True(stream.AddTrack(track)); Assert.AreEqual(1, stream.GetAudioTracks().Count()); Assert.NotNull(stream.GetAudioTracks().First()); Assert.True(stream.RemoveTrack(track)); Assert.AreEqual(0, stream.GetAudioTracks().Count()); track.Dispose(); stream.Dispose(); }
public void AddAndRemoveAudioStreamTrack() { var stream = new MediaStream(); var track = new AudioStreamTrack("audio"); Assert.That(TrackKind.Audio, Is.EqualTo(track.Kind)); Assert.That(stream.GetAudioTracks(), Has.Count.EqualTo(0)); Assert.That(stream.AddTrack(track), Is.True); Assert.That(stream.GetAudioTracks(), Has.Count.EqualTo(1)); Assert.That(stream.GetAudioTracks(), Has.All.Not.Null); Assert.That(stream.RemoveTrack(track), Is.True); Assert.That(stream.GetAudioTracks(), Has.Count.EqualTo(0)); track.Dispose(); stream.Dispose(); }
public void Process(GetMediaStreamPipelineArgs args) { Assert.ArgumentNotNull(args, "args"); if (args.Options.Thumbnail) { return; } if (Sitecore.Context.Site?.Name == "shell") { return; } MediaStream outputStream = args.OutputStream; if (outputStream == null) { return; } if (!outputStream.AllowMemoryLoading) { Tracer.Error("Could not resize image as it was larger than the maximum size allowed for memory processing. Media item: {0}", outputStream.MediaItem.Path); return; } MediaStream optimizedOutputStream = _optimizer.Process(outputStream, args.Options); if (optimizedOutputStream != null && outputStream.Stream != optimizedOutputStream.Stream) { outputStream.Dispose(); // Uses thread safe dispose helper that won't double dispose args.OutputStream = optimizedOutputStream; if (optimizedOutputStream.Extension == "webp") { // WebP processor has handled everything including resizing args.AbortPipeline(); } } else { var mediaPath = outputStream.MediaItem.MediaPath; Log.Info($"Dianoga: {mediaPath} cannot be optimized due to media type or path exclusion", this); } }
public IEnumerator RestartIceInvokeOnNegotiationNeeded() { RTCConfiguration config = default; config.iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } }; var peer1 = new RTCPeerConnection(ref config); var peer2 = new RTCPeerConnection(ref config); peer1.OnIceCandidate = candidate => { peer2.AddIceCandidate(candidate); }; peer2.OnIceCandidate = candidate => { peer1.AddIceCandidate(candidate); }; MediaStream stream = Audio.CaptureStream(); peer1.AddTrack(stream.GetTracks().First()); yield return(SignalingOffer(peer1, peer2)); bool isInvokeOnNegotiationNeeded1 = false; bool isInvokeOnNegotiationNeeded2 = false; peer1.OnNegotiationNeeded = () => isInvokeOnNegotiationNeeded1 = true; peer2.OnNegotiationNeeded = () => isInvokeOnNegotiationNeeded2 = true; peer1.RestartIce(); var op9 = new WaitUntilWithTimeout(() => isInvokeOnNegotiationNeeded1, 5000); yield return(op9); Assert.That(op9.IsCompleted, Is.True); peer2.RestartIce(); var op10 = new WaitUntilWithTimeout(() => isInvokeOnNegotiationNeeded2, 5000); yield return(op10); Assert.That(op10.IsCompleted, Is.True); stream.Dispose(); peer1.Close(); peer2.Close(); }
public IEnumerator SetRemoteDescriptionFailed() { var config = GetDefaultConfiguration(); var peer1 = new RTCPeerConnection(ref config); var peer2 = new RTCPeerConnection(ref config); var stream = new MediaStream(); var obj = new GameObject("audio"); var source = obj.AddComponent <AudioSource>(); source.clip = AudioClip.Create("test", 480, 2, 48000, false); var track = new AudioStreamTrack(source); var sender = peer1.AddTrack(track, stream); var op1 = peer1.CreateOffer(); yield return(op1); var desc = op1.Desc; var op2 = peer1.SetLocalDescription(ref desc); yield return(op2); // change sdp to cannot parse desc.sdp = desc.sdp.Replace("a=mid:0", "a=mid:10"); var op3 = peer2.SetRemoteDescription(ref desc); yield return(op3); Assert.True(op3.IsDone); Assert.True(op3.IsError); Assert.IsNotEmpty(op3.Error.message); peer1.RemoveTrack(sender); track.Dispose(); stream.Dispose(); peer1.Close(); peer2.Close(); peer1.Dispose(); peer2.Dispose(); Object.DestroyImmediate(source.clip); Object.DestroyImmediate(obj); }
private void HangUp() { videoStream.Dispose(); receiveStream.Dispose(); videoStream = null; receiveStream = null; _pc1.Close(); _pc2.Close(); Debug.Log("Close local/remote peer connection"); _pc1.Dispose(); _pc2.Dispose(); _pc1 = null; _pc2 = null; receiveImage.texture = null; callButton.interactable = true; hangUpButton.interactable = false; addTracksButton.interactable = false; removeTracksButton.interactable = false; }
public void AddAndRemoveAudioStreamTrack() { GameObject obj = new GameObject("audio"); AudioSource source = obj.AddComponent <AudioSource>(); source.clip = AudioClip.Create("test", 480, 2, 48000, false); var stream = new MediaStream(); var track = new AudioStreamTrack(source); Assert.AreEqual(TrackKind.Audio, track.Kind); Assert.AreEqual(0, stream.GetAudioTracks().Count()); Assert.True(stream.AddTrack(track)); Assert.AreEqual(1, stream.GetAudioTracks().Count()); Assert.NotNull(stream.GetAudioTracks().First()); Assert.True(stream.RemoveTrack(track)); Assert.AreEqual(0, stream.GetAudioTracks().Count()); track.Dispose(); stream.Dispose(); Object.DestroyImmediate(source.clip); Object.DestroyImmediate(obj); }
public void AddAndRemoveAudioTrack() { var obj = new GameObject("audio"); var source = obj.AddComponent <AudioSource>(); source.clip = AudioClip.Create("test", 480, 2, 48000, false); var stream = new MediaStream(); var track = new AudioStreamTrack(source); Assert.That(TrackKind.Audio, Is.EqualTo(track.Kind)); Assert.That(stream.GetAudioTracks(), Has.Count.EqualTo(0)); Assert.That(stream.AddTrack(track), Is.True); Assert.That(stream.GetAudioTracks(), Has.Count.EqualTo(1)); Assert.That(stream.GetAudioTracks(), Has.All.Not.Null); Assert.That(stream.RemoveTrack(track), Is.True); Assert.That(stream.GetAudioTracks(), Has.Count.EqualTo(0)); track.Dispose(); stream.Dispose(); Object.DestroyImmediate(source.clip); Object.DestroyImmediate(obj); }