public static VideoStreamTrack CaptureStreamTrack(this Camera cam, int width, int height, int bitrate, RenderTextureDepth depth = RenderTextureDepth.DEPTH_24) { switch (depth) { case RenderTextureDepth.DEPTH_16: case RenderTextureDepth.DEPTH_24: case RenderTextureDepth.DEPTH_32: break; default: throw new InvalidEnumArgumentException(nameof(depth), (int)depth, typeof(RenderTextureDepth)); } if (width == 0 || height == 0) { throw new ArgumentException("width and height are should be greater than zero."); } int depthValue = (int)depth; var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, depthValue, format); rt.Create(); cam.targetTexture = rt; return(new VideoStreamTrack(rt)); }
public VideoStreamTrack(string label, UnityEngine.Texture source) : this(label, source, CreateRenderTexture(source.width, source.height, WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType)), source.width, source.height) { }
public static MediaStream CaptureStream(this Camera cam, int width, int height, RenderTextureDepth depth = RenderTextureDepth.DEPTH_24) { if (camCopyRts.Count > 0) { throw new NotImplementedException("Currently not allowed multiple MediaStream"); } switch (depth) { case RenderTextureDepth.DEPTH_16: case RenderTextureDepth.DEPTH_24: case RenderTextureDepth.DEPTH_32: break; default: throw new InvalidEnumArgumentException(nameof(depth), (int)depth, typeof(RenderTextureDepth)); } int depthValue = (int)depth; RenderTexture[] rts = new RenderTexture[2]; var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType); //rts[0] for render target, rts[1] for flip and WebRTC source rts[0] = new RenderTexture(width, height, depthValue, format); rts[1] = new RenderTexture(width, height, 0, format); rts[0].Create(); rts[1].Create(); camCopyRts.Add(rts); cam.targetTexture = rts[0]; cam.gameObject.AddCleanerCallback(() => { CameraExtension.RemoveRt(rts); rts[0].Release(); rts[1].Release(); UnityEngine.Object.Destroy(rts[0]); UnityEngine.Object.Destroy(rts[1]); }); started = true; var stream = WebRTC.Context.CaptureVideoStream(rts[1].GetNativeTexturePtr(), width, height); // TODO:: // You should initialize encoder after create stream instance. // This specification will change in the future. WebRTC.Context.InitializeEncoder(); return(new MediaStream(rts, stream)); }
public UnityEngine.RenderTexture InitializeReceiver(int width, int height) { if (IsDecoderInitialized) { throw new InvalidOperationException("Already initialized receiver"); } m_needFlip = true; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); m_sourceTexture = CreateRenderTexture(width, height, format); m_destTexture = CreateRenderTexture(m_sourceTexture.width, m_sourceTexture.height, format); m_renderer = new UnityVideoRenderer(WebRTC.Context.CreateVideoRenderer(), this); return(m_destTexture); }
private static RenderTexture CreateRenderTexture(int width, int height, RenderTextureFormat format) { // todo::(kazuki) Increase the supported formats. RenderTextureFormat supportedFormat = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); if (format != supportedFormat) { throw new ArgumentException( $"This graphics format is not supported for streaming: {format} supportedFormat: {supportedFormat}"); } var tex = new RenderTexture(width, height, 0, format); tex.Create(); return(tex); }