/// <summary> /// Creates a new VideoStream object. /// The track is created with a source texture `ptr`. /// It is noted that streamed video might be flipped when not action was taken. Almost case it has no problem to use other constructor instead. /// /// See Also: Texture.GetNativeTexturePtr /// </summary> /// <param name="texturePtr"></param> /// <param name="width"></param> /// <param name="height"></param> /// <param name="format"></param> public VideoStreamTrack(IntPtr texturePtr, int width, int height, GraphicsFormat format) : this(Guid.NewGuid().ToString(), new VideoTrackSource()) { WebRTC.ValidateTextureSize(width, height, Application.platform, WebRTC.GetEncoderType()); WebRTC.ValidateGraphicsFormat(format); WebRTC.Context.SetVideoEncoderParameter(GetSelfOrThrow(), width, height, format, texturePtr); WebRTC.Context.InitializeEncoder(GetSelfOrThrow()); }
private static RenderTexture CreateRenderTexture(int width, int height, GraphicsFormat format) { WebRTC.ValidateGraphicsFormat(format); var tex = new RenderTexture(width, height, 0, format); tex.Create(); return(tex); }
/// <summary> /// Creates a new VideoStream object. /// The track is created with a source texture `ptr`. /// It is noted that streamed video might be flipped when not action was taken. Almost case it has no problem to use other constructor instead. /// /// See Also: Texture.GetNativeTexturePtr /// </summary> /// <param name="label"></param> /// <param name="texturePtr"></param> /// <param name="width"></param> /// <param name="height"></param> /// <param name="format"></param> public VideoStreamTrack(string label, IntPtr texturePtr, int width, int height, GraphicsFormat format) : base(WebRTC.Context.CreateVideoTrack(label)) { WebRTC.ValidateTextureSize(width, height, Application.platform); WebRTC.ValidateGraphicsFormat(format); WebRTC.Context.SetVideoEncoderParameter(GetSelfOrThrow(), width, height, format, texturePtr); WebRTC.Context.InitializeEncoder(GetSelfOrThrow()); tracks.Add(this); }
/// <summary> /// Creates a new VideoStream object. /// The track is created with a source texture `ptr`. /// It is noted that streamed video might be flipped when not action was taken. Almost case it has no problem to use other constructor instead. /// /// See Also: Texture.GetNativeTexturePtr /// </summary> /// <param name="texturePtr"></param> /// <param name="width"></param> /// <param name="height"></param> /// <param name="format"></param> public VideoStreamTrack(IntPtr texturePtr, int width, int height, GraphicsFormat format, bool needFlip) : this(Guid.NewGuid().ToString(), new VideoTrackSource(), needFlip) { var error = WebRTC.ValidateTextureSize(width, height, Application.platform, WebRTC.GetEncoderType()); if (error.errorType != RTCErrorType.None) { throw new ArgumentException(error.message); } WebRTC.ValidateGraphicsFormat(format); WebRTC.Context.SetVideoEncoderParameter(GetSelfOrThrow(), width, height, format, texturePtr); WebRTC.Context.InitializeEncoder(GetSelfOrThrow()); }
internal VideoStreamTrack(Texture texture, RenderTexture dest, string label, VideoTrackSource source, bool needFlip) : base(WebRTC.Context.CreateVideoTrack(label, source.self)) { var error = WebRTC.ValidateTextureSize(texture.width, texture.height, Application.platform); if (error.errorType != RTCErrorType.None) { throw new ArgumentException(error.message); } WebRTC.ValidateGraphicsFormat(texture.graphicsFormat); if (!s_tracks.TryAdd(self, new WeakReference <VideoStreamTrack>(this))) { throw new InvalidOperationException(); } m_source = source; m_source.sourceTexture_ = texture; m_source.destTexture_ = dest; m_source.needFlip_ = needFlip; }