Пример #1
0
 internal void OnArgb32FrameReady(Argb32VideoFrame frame)
 {
     MainEventSource.Log.Argb32LocalVideoFrameReady(frame.width, frame.height);
     lock (_videoFrameReadyLock)
     {
         _argb32videoFrameReady?.Invoke(frame);
     }
 }
        /// <summary>
        /// Try to enqueue a new video frame encoded in raw ARGB format.
        /// If the internal queue reached its maximum capacity, do nothing and drop the frame.
        /// </summary>
        /// <param name="frame">The video frame to enqueue</param>
        /// <returns>Return <c>true</c> if the frame was enqueued successfully, or <c>false</c> if it was dropped</returns>
        /// <remarks>This should only be used if the queue has storage for a compatible video frame encoding.</remarks>
        public bool Enqueue(Argb32VideoFrame frame)
        {
            MainEventSource.Log.VideoFrameQueueEnqueueArgb32(_traceId, (int)frame.width, (int)frame.height);

            Debug.Assert(frame.stride >= frame.width * 4);

            double curTime = _stopwatch.Elapsed.TotalMilliseconds;

            // Always update queued time, which refers to calling Enqueue(), even
            // if the queue is full and the frame is dropped.
            float queuedDt = (float)(curTime - _lastQueuedTimeMs);

            _lastQueuedTimeMs = curTime;

            // Try to get some storage for that new frame
            ulong byteSize = (ulong)frame.stride * frame.height;
            T     storage  = GetStorageFor(byteSize);

            if (storage == null)
            {
                // Too many frames in queue, drop the current one
                MainEventSource.Log.VideoFrameQueueDropArgb32(_traceId, (int)frame.width, (int)frame.height);
                float droppedDt = (float)(curTime - _lastDroppedTimeMs);
                _lastDroppedTimeMs = curTime;
                _droppedFrameTimeAverage.Push(droppedDt);
                return(false);
            }

            // Copy the new frame to its storage
            unsafe
            {
                fixed(void *dst = storage.Buffer)
                {
                    void *src = (void *)frame.data;

                    Utils.MemCpyStride(dst, frame.stride, (void *)frame.data, frame.stride, (int)frame.width * 4, (int)frame.height);
                }
            }
            storage.Width  = frame.width;
            storage.Height = frame.height;

            // Enqueue for later delivery
            _frameQueue.Enqueue(storage);
            _queuedFrameTimeAverage.Push(queuedDt);
            _droppedFrameTimeAverage.Push((float)(curTime - _lastDroppedTimeMs));
            return(true);
        }
 internal void OnArgb32FrameReady(Argb32VideoFrame frame)
 {
     MainEventSource.Log.Argb32RemoteVideoFrameReady(frame.width, frame.height);
     Argb32VideoFrameReady?.Invoke(frame);
 }
 void VideoTrackSourceInterop.IVideoSource.OnArgb32FrameReady(Argb32VideoFrame frame)
 {
     MainEventSource.Log.Argb32RemoteVideoFrameReady(frame.width, frame.height);
     _argb32VideoFrameReady?.Invoke(frame);
 }