protected void Awake() { _frameQueue = new VideoFrameQueue <I420AVideoFrameStorage>(3); FrameQueue = _frameQueue; PeerConnection.OnInitialized.AddListener(OnPeerInitialized); PeerConnection.OnShutdown.AddListener(OnPeerShutdown); }
private void VideoStreamStopped() { FrameQueue = null; // Clear the video display to not confuse the user who could otherwise // think that the video is still playing but is lagging. CreateEmptyVideoTextures(); }
protected CustomVideoSource() { _frameQueue = new VideoFrameQueue <T>(3); FrameQueue = _frameQueue; //PeerConnection.OnInitialized.AddListener(OnPeerInitialized); //PeerConnection.OnShutdown.AddListener(OnPeerShutdown); OnEnable(); }
private void StartNativeRendering(int width, int height) { // Subscription is only used to ge the frame dimensions to generate the textures. So Unsubscribe once that is done. _source.I420AVideoFrameReady -= I420AVideoFrameReady; _i420aFrameQueue = null; CreateEmptyVideoTextures(width, height, 128); _nativeVideo = new NativeVideo(_source.NativeHandle); RegisterRemoteTextures(); }
/// <summary> /// Start rendering the passed source. /// </summary> /// <remarks> /// Can be used to handle <see cref="VideoTrackSource.VideoStreamStarted"/> or <see cref="VideoReceiver.VideoStreamStarted"/>. /// </remarks> public void StartRendering(IVideoSource source) { _source = source as RemoteVideoTrack; Debug.Assert(_source != null, "NativeVideoRender currently only supports RemoteVideoTack"); switch (source.FrameEncoding) { case VideoEncoding.I420A: _i420aFrameQueue = new VideoFrameQueue <I420AVideoFrameStorage>(2); _source.I420AVideoFrameReady += I420AVideoFrameReady; break; case VideoEncoding.Argb32: break; } }
/// <summary> /// Start rendering the passed source. /// </summary> /// <remarks> /// Can be used to handle <see cref="VideoTrackSource.VideoStreamStarted"/> or <see cref="VideoReceiver.VideoStreamStarted"/>. /// </remarks> public void StartRendering(IVideoSource source) { bool isRemote = (source is RemoteVideoTrack); int frameQueueSize = (isRemote ? 5 : 3); switch (source.FrameEncoding) { case VideoEncoding.I420A: _i420aFrameQueue = new VideoFrameQueue <I420AVideoFrameStorage>(frameQueueSize); source.I420AVideoFrameReady += I420AVideoFrameReady; break; case VideoEncoding.Argb32: _argb32FrameQueue = new VideoFrameQueue <Argb32VideoFrameStorage>(frameQueueSize); source.Argb32VideoFrameReady += Argb32VideoFrameReady; break; } }
private void VideoStreamStarted(IVideoSource source) { bool isRemote = (source is VideoReceiver); int frameQueueSize = (isRemote ? 5 : 3); var videoSrc = (IVideoSource)VideoSource; switch (videoSrc.FrameEncoding) { case VideoEncoding.I420A: _i420aFrameQueue = new VideoFrameQueue <I420AVideoFrameStorage>(frameQueueSize); videoSrc.RegisterCallback(I420AVideoFrameReady); break; case VideoEncoding.Argb32: _argb32FrameQueue = new VideoFrameQueue <Argb32VideoFrameStorage>(frameQueueSize); videoSrc.RegisterCallback(Argb32VideoFrameReady); break; } }
public Form1() { InitializeComponent(); var vframeQueue = new VideoFrameQueue(QUEUE_SIZE); decoder = new VideoDecoder(outputVframeQueue: vframeQueue); videoPlayer = new VideoPlayer( vframeViewer: pictureBoxVideo, selectedVFrameViewer: selectedFrameViewer, thumbnailsContainer: thumbPanel ); videoPlayer.SetSourceQueue(vframeQueue); videoPlayer.ScrollBar = queueScroller; videoPlayer.Init(); }
private void VideoStreamStarted() { FrameQueue = VideoSource.FrameQueue; }
/// <summary> /// Construct a new video bridge with the given frame queue capacity. /// </summary> /// <param name="queueCapacity">Video frame queue initial capacity</param> public VideoBridge(int queueCapacity) { _frameQueue = new VideoFrameQueue <I420VideoFrameStorage>(queueCapacity); }
public void SetVideoFrameQueue(VideoFrameQueue frameQueue) { this.frameQueue = frameQueue; }
public VideoDecoder(VideoFrameQueue outputVframeQueue) : this() { this.frameQueue = outputVframeQueue; }