protected virtual void Awake() { this._camera = this.gameObject.AddComponent <Camera>(); this._cameraConstraint = this.gameObject.AddComponent <ParentConstraint>(); this._cameraConstraint.constraintActive = true; RenderTextureFormat format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType); this._streamingTexture = new RenderTexture(VideoStreamManager.Instance.streamingResolution.x, VideoStreamManager.Instance.streamingResolution.y, this.depth, format) { antiAliasing = this.antiAliasing }; this._streamingTexture.Create(); Camera currentMain = Camera.main; if (currentMain != null) { ConfigureForNewCamera(currentMain); } else { this._lookingForNewCamera = true; } }
public IEnumerator MediaStreamTrackEnabled() { var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType); var rt = new RenderTexture(width, height, 0, format); rt.Create(); var track = new VideoStreamTrack("video", rt); Assert.NotNull(track); yield return(new WaitForSeconds(0.1f)); // Enabled property Assert.True(track.Enabled); track.Enabled = false; Assert.False(track.Enabled); // ReadyState property Assert.AreEqual(track.ReadyState, TrackState.Live); track.Dispose(); yield return(new WaitForSeconds(0.1f)); Object.DestroyImmediate(rt); }
public IEnumerator VideoStreamTrackEnabled() { var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType); var rt = new RenderTexture(width, height, 0, format); rt.Create(); var track = new VideoStreamTrack("video", rt); Assert.NotNull(track); // wait for the end of the initialization for encoder on the render thread. yield return(0); // todo:: returns always false. // Assert.True(track.IsInitialized); // Enabled property Assert.True(track.Enabled); track.Enabled = false; Assert.False(track.Enabled); // ReadyState property Assert.AreEqual(track.ReadyState, TrackState.Live); track.Dispose(); // wait for disposing video track. yield return(0); Object.DestroyImmediate(rt); }
public void AddTransceiverWithInit() { var peer = new RTCPeerConnection(); var stream = new MediaStream(); var direction = RTCRtpTransceiverDirection.SendOnly; var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new RenderTexture(width, height, 0, format); rt.Create(); var track = new VideoStreamTrack(rt); var init = new RTCRtpTransceiverInit() { direction = direction, sendEncodings = new RTCRtpEncodingParameters[] { new RTCRtpEncodingParameters { maxFramerate = 30 } }, streams = new MediaStream[] { stream } }; var transceiver = peer.AddTransceiver(track, init); Assert.That(transceiver, Is.Not.Null); Assert.That(transceiver.CurrentDirection, Is.Null); Assert.That(transceiver.Direction, Is.EqualTo(RTCRtpTransceiverDirection.SendOnly)); Assert.That(transceiver.Sender, Is.Not.Null); var parameters = transceiver.Sender.GetParameters(); Assert.That(parameters, Is.Not.Null); Assert.That(parameters.codecs, Is.Not.Null.And.Empty); peer.Dispose(); }
public IEnumerator MediaStreamAddTrack() { var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var stream = new MediaStream(); var track = new VideoStreamTrack("video", rt); yield return(new WaitForSeconds(0.1f)); Assert.AreEqual(TrackKind.Video, track.Kind); Assert.AreEqual(0, stream.GetVideoTracks().Count()); Assert.True(stream.AddTrack(track)); Assert.AreEqual(1, stream.GetVideoTracks().Count()); Assert.NotNull(stream.GetVideoTracks().First()); Assert.True(stream.RemoveTrack(track)); Assert.AreEqual(0, stream.GetVideoTracks().Count()); track.Dispose(); yield return(new WaitForSeconds(0.1f)); stream.Dispose(); Object.DestroyImmediate(rt); }
public IEnumerator VideoStreamAddTrackAndRemoveTrack() { var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var stream = new MediaStream(); var track = new VideoStreamTrack("video", rt); // wait for the end of the initialization for encoder on the render thread. yield return(0); Assert.That(track.Kind, Is.EqualTo(TrackKind.Video)); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0)); Assert.That(stream.AddTrack(track), Is.True); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(1)); Assert.That(stream.GetVideoTracks(), Has.All.Not.Null); Assert.That(stream.RemoveTrack(track), Is.True); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0)); track.Dispose(); // wait for disposing video track. yield return(0); stream.Dispose(); Object.DestroyImmediate(rt); }
public void AddTrack() { var peer = new RTCPeerConnection(); var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var track = new VideoStreamTrack(rt); var sender = peer.AddTrack(track); Assert.That(sender, Is.Not.Null); Assert.That(track, Is.EqualTo(sender.Track)); RTCRtpSendParameters parameters = sender.GetParameters(); Assert.That(parameters, Is.Not.Null); Assert.That(parameters.encodings, Is.Empty); track.Dispose(); peer.Dispose(); Object.DestroyImmediate(rt); }
private static RenderTexture CreateRenderTexture(int width, int height) { var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType); var renderTexture = new RenderTexture(width, height, 0, format); renderTexture.Create(); return(renderTexture); }
public IEnumerator OnAddTrackDelegatesWithEvent() { var camObj = new GameObject("Camera"); var cam = camObj.AddComponent <Camera>(); var videoStream = cam.CaptureStream(1280, 720, 1000000); yield return(new WaitForSeconds(0.1f)); var test = new MonoBehaviourTest <SignalingPeers>(); test.component.SetStream(videoStream); yield return(test); test.component.CoroutineUpdate(); yield return(new WaitForSeconds(0.1f)); bool isCalledOnAddTrack = false; bool isCalledOnRemoveTrack = false; videoStream.OnAddTrack = e => { Assert.That(e.Track, Is.Not.Null); isCalledOnAddTrack = true; }; videoStream.OnRemoveTrack = e => { Assert.That(e.Track, Is.Not.Null); isCalledOnRemoveTrack = true; }; var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var track2 = new VideoStreamTrack("video2", rt); videoStream.AddTrack(track2); var op1 = new WaitUntilWithTimeout(() => isCalledOnAddTrack, 5000); yield return(op1); videoStream.RemoveTrack(track2); var op2 = new WaitUntilWithTimeout(() => isCalledOnRemoveTrack, 5000); yield return(op2); test.component.Dispose(); track2.Dispose(); // wait for disposing video track. yield return(0); videoStream.Dispose(); Object.DestroyImmediate(camObj); Object.DestroyImmediate(rt); }
public void ConstructThrowsExceptionWhenSmallTexture() { var width = 50; var height = 50; var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType); var rt = new RenderTexture(width, height, 0, format); rt.Create(); Assert.That(() => { new VideoStreamTrack("video", rt); }, Throws.TypeOf <ArgumentException>()); Object.DestroyImmediate(rt); }
public void VideoStreamTrackDisposeImmediately() { var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var track = new VideoStreamTrack("video", rt); track.Dispose(); Object.DestroyImmediate(rt); }
public void Construct() { var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType); var rt = new RenderTexture(width, height, 0, format); rt.Create(); var track = new VideoStreamTrack("video", rt); Assert.That(track, Is.Not.Null); track.Dispose(); Object.DestroyImmediate(rt); }
public void CreateAndDeleteVideoTrack() { var context = Context.Create(); var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var track = context.CreateVideoTrack("video", rt.GetNativeTexturePtr()); context.DeleteMediaStreamTrack(track); context.Dispose(); }
public IEnumerator VideoStreamAddTrackAndRemoveTrack() { var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var stream = new MediaStream(); var track = new VideoStreamTrack(rt); bool isCalledOnAddTrack = false; bool isCalledOnRemoveTrack = false; stream.OnAddTrack = e => { Assert.That(e.Track, Is.EqualTo(track)); isCalledOnAddTrack = true; }; stream.OnRemoveTrack = e => { Assert.That(e.Track, Is.EqualTo(track)); isCalledOnRemoveTrack = true; }; // wait for the end of the initialization for encoder on the render thread. yield return(0); Assert.That(track.Kind, Is.EqualTo(TrackKind.Video)); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0)); Assert.That(stream.AddTrack(track), Is.True); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(1)); Assert.That(stream.GetVideoTracks(), Has.All.Not.Null); Assert.That(stream.RemoveTrack(track), Is.True); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0)); var op1 = new WaitUntilWithTimeout(() => isCalledOnAddTrack, 5000); yield return(op1); var op2 = new WaitUntilWithTimeout(() => isCalledOnRemoveTrack, 5000); yield return(op2); track.Dispose(); stream.Dispose(); Object.DestroyImmediate(rt); }
public void AccessAfterDisposed() { var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType); var rt = new RenderTexture(width, height, 0, format); rt.Create(); var track = new VideoStreamTrack("video", rt); Assert.That(track, Is.Not.Null); track.Dispose(); Assert.That(() => { var id = track.Id; }, Throws.TypeOf <InvalidOperationException>()); Object.DestroyImmediate(rt); }
public void CreateAndDeleteVideoTrack() { var value = NativeMethods.GetHardwareEncoderSupport(); var context = Context.Create( encoderType: value ? EncoderType.Hardware : EncoderType.Software); var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var track = context.CreateVideoTrack("video"); context.DeleteMediaStreamTrack(track); context.Dispose(); UnityEngine.Object.DestroyImmediate(rt); }
public void CreateAndDeleteVideoTrack() { var context = Context.Create(); var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var source = context.CreateVideoTrackSource(); var track = context.CreateVideoTrack("video", source); context.DeleteRefPtr(track); context.DeleteRefPtr(source); context.Dispose(); UnityEngine.Object.DestroyImmediate(rt); }
public void AddTransceiver() { var peer = new RTCPeerConnection(); var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var track = new VideoStreamTrack(rt); Assert.That(peer.GetTransceivers(), Is.Empty); var transceiver = peer.AddTransceiver(track); Assert.That(transceiver, Is.Not.Null); Assert.That(transceiver.Mid, Is.Null); Assert.That(transceiver.CurrentDirection, Is.Null); RTCRtpSender sender = transceiver.Sender; Assert.That(sender, Is.Not.Null); Assert.That(track, Is.EqualTo(sender.Track)); RTCRtpSendParameters parameters = sender.GetParameters(); Assert.That(parameters, Is.Not.Null); Assert.That(parameters.encodings, Is.Empty); Assert.That(parameters.transactionId, Is.Not.Empty); Assert.That(peer.GetTransceivers(), Has.Count.EqualTo(1)); Assert.That(peer.GetTransceivers().First(), Is.Not.Null); Assert.That(parameters.codecs, Is.Empty); Assert.That(parameters.rtcp, Is.Not.Null); // Some platforms return an empty list Assert.That(parameters.headerExtensions, Is.Not.Null); foreach (var extension in parameters.headerExtensions) { Assert.That(extension, Is.Not.Null); Assert.That(extension.uri, Is.Not.Empty); } track.Dispose(); peer.Dispose(); Object.DestroyImmediate(rt); }
public void GraphicsFormat() { var graphicsFormat = WebRTC.GetSupportedGraphicsFormat(SystemInfo.graphicsDeviceType); var renderTextureFormat = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType); var textureFormat = WebRTC.GetSupportedTextureFormat(SystemInfo.graphicsDeviceType); var rt = new RenderTexture(10, 10, 0, renderTextureFormat); rt.Create(); Assert.That(rt.graphicsFormat, Is.EqualTo(graphicsFormat)); var tx = new Texture2D(10, 10, textureFormat, false); Assert.That(tx.graphicsFormat, Is.EqualTo(graphicsFormat)); Object.DestroyImmediate(rt); Object.DestroyImmediate(tx); }
public void GetTransceiversReturnsNotEmptyAfterCallingRemoveTrack() { // Also, `RTCPeerConnection.AddTrack` and `RTCPeerConnection.RemoveTrack` method is not intuitive. var peer = new RTCPeerConnection(); var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var track = new VideoStreamTrack(rt); var sender = peer.AddTrack(track); Assert.That(peer.GetTransceivers(), Has.Count.EqualTo(1)); Assert.That(peer.RemoveTrack(sender), Is.EqualTo(RTCErrorType.None)); Assert.That(peer.GetTransceivers(), Has.Count.EqualTo(1)); peer.Dispose(); }
public IEnumerator VideoStreamTrackInstantiateMultiple() { var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt1 = new UnityEngine.RenderTexture(width, height, 0, format); rt1.Create(); var track1 = new VideoStreamTrack("video1", rt1); var rt2 = new UnityEngine.RenderTexture(width, height, 0, format); rt2.Create(); var track2 = new VideoStreamTrack("video2", rt2); // wait for initialization encoder on render thread. yield return(new WaitForSeconds(0.1f)); track1.Dispose(); track2.Dispose(); Object.DestroyImmediate(rt1); Object.DestroyImmediate(rt2); }