public static MediaStream CaptureStream(this Camera cam, int width, int height, RenderTextureDepth depth = RenderTextureDepth.DEPTH_24) { if (camCopyRts.Count > 0) { throw new NotImplementedException("Currently not allowed multiple MediaStream"); } switch (depth) { case RenderTextureDepth.DEPTH_16: case RenderTextureDepth.DEPTH_24: case RenderTextureDepth.DEPTH_32: break; default: throw new InvalidEnumArgumentException(nameof(depth), (int)depth, typeof(RenderTextureDepth)); } int depthValue = (int)depth; RenderTexture[] rts = new RenderTexture[2]; var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType); //rts[0] for render target, rts[1] for flip and WebRTC source rts[0] = new RenderTexture(width, height, depthValue, format); rts[1] = new RenderTexture(width, height, 0, format); rts[0].Create(); rts[1].Create(); camCopyRts.Add(rts); cam.targetTexture = rts[0]; cam.gameObject.AddCleanerCallback(() => { CameraExtension.RemoveRt(rts); rts[0].Release(); rts[1].Release(); UnityEngine.Object.Destroy(rts[0]); UnityEngine.Object.Destroy(rts[1]); }); started = true; var stream = WebRTC.Context.CaptureVideoStream(rts[1].GetNativeTexturePtr(), width, height); // TODO:: // You should initialize encoder after create stream instance. // This specification will change in the future. WebRTC.Context.InitializeEncoder(); return(new MediaStream(rts, stream)); }
private void StopTrack(MediaStreamTrack track) { if (track.Kind == TrackKind.Video) { WebRTC.Context.StopMediaStreamTrack(track.self); RenderTexture[] rts = VideoTrackToRts[track]; if (rts != null) { CameraExtension.RemoveRt(rts); rts[0].Release(); rts[1].Release(); UnityEngine.Object.Destroy(rts[0]); UnityEngine.Object.Destroy(rts[1]); } } else { Audio.Stop(); } }