コード例 #1
0
        /// <summary>
        /// Wrties the raw frame into the given texture or creates it if null or wrong width/height.
        /// </summary>
        /// <param name="tex"></param>
        /// <param name="frame"></param>
        /// <returns></returns>
        protected bool UpdateTexture(ref Texture2D tex, RawFrame frame)
        {
            bool newTextureCreated = false;

            //texture exists but has the wrong height /width? -> destroy it and set the value to null
            if (tex != null && (tex.width != frame.Width || tex.height != frame.Height))
            {
                Texture2D.Destroy(tex);
                tex = null;
            }
            //no texture? create a new one first
            if (tex == null)
            {
                newTextureCreated = true;
                Debug.Log("Creating new texture with resolution " + frame.Width + "x" + frame.Height + " Format:" + mMediaConfig.Format);
                if (mMediaConfig.Format == FramePixelFormat.ABGR)
                {
                    tex = new Texture2D(frame.Width, frame.Height, TextureFormat.RGBA32, false);
                }
                else
                {
                    //not yet properly supported.
                    tex = new Texture2D(frame.Width, frame.Height, TextureFormat.YUY2, false);
                }
                tex.wrapMode = TextureWrapMode.Clamp;
            }
            ///copy image data into the texture and apply
            tex.LoadRawTextureData(frame.Buffer);
            tex.Apply();
            return(newTextureCreated);
        }
コード例 #2
0
    /// <summary>
    /// Updates the local video. If the frame is null it will hide the video image
    /// </summary>
    /// <param name="frame"></param>
    public virtual void UpdateLocalTexture(RawFrame frame, FramePixelFormat format)
    {
        if (uLocalVideoImage != null)
        {
            if (frame != null)
            {
                UpdateTexture(ref mLocalVideoTexture, frame, format);
                uLocalVideoImage.texture = mLocalVideoTexture;
                if (uLocalVideoImage.gameObject.activeSelf == false)
                {
                    uLocalVideoImage.gameObject.SetActive(true);
                }
                //apply rotation
                //watch out uLocalVideoImage should be scaled -1 X to make the local camera appear mirrored
                //it should also be scaled -1 Y because Unity reads the image from bottom to top
                uLocalVideoImage.transform.rotation = Quaternion.Euler(0, 0, frame.Rotation);

                mHasLocalVideo = true;
                mLocalFrameCounter++;
                mLocalVideoWidth  = frame.Width;
                mLocalVideoHeight = frame.Height;
                mLocalVideoFormat = format;
            }
            else
            {
                //app shutdown. reset values
                mHasLocalVideo                      = false;
                uLocalVideoImage.texture            = null;
                uLocalVideoImage.transform.rotation = Quaternion.Euler(0, 0, 0);
                uLocalVideoImage.gameObject.SetActive(false);
            }
        }
    }
コード例 #3
0
        /// <summary>
        /// Handler polls the media network to check for new video frames.
        ///
        /// </summary>
        protected virtual void HandleMediaEvents()
        {
            //just for debugging
            bool handleLocalFrames  = true;
            bool handleRemoteFrames = true;

            if (mMediaNetwork != null && handleLocalFrames)
            {
                RawFrame localFrame = mMediaNetwork.TryGetFrame(ConnectionId.INVALID);
                if (localFrame != null)
                {
                    UpdateTexture(localFrame);
                }
            }
            if (mMediaNetwork != null && handleRemoteFrames)
            {
                //so far the loop shouldn't be needed. we only expect one
                foreach (var id in mConnectionIds)
                {
                    if (mMediaNetwork != null)
                    {
                        RawFrame remoteFrame = mMediaNetwork.TryGetFrame(id);
                        if (remoteFrame != null)
                        {
                            UpdateTexture(remoteFrame);
                        }
                    }
                }
            }
        }
コード例 #4
0
        void PollThreadMain(Object state)
        {
            var(cancellationToken, context) = ((CancellationToken, IContext))state;

            try {
                while (cancellationToken.IsCancellationRequested == false)
                {
                    var bytesReceived = _socket.Receive(_frameBuffer.Slice(_frameBufferSize).Span);

                    if (bytesReceived > 0)
                    {
                        _log.LogDebug("Received {bytes} bytes", bytesReceived);

                        _frameBufferSize += (UInt16)bytesReceived;

                        while (RawFrame.Deserialize(_frameBuffer.Slice(0, _frameBufferSize).Span, out var frame, out var surplus))
                        {
                            context.Send(context.Parent, (Inbound, frame));

                            var consumed = _frameBufferSize - surplus.Length;
                            _frameBuffer.Slice(consumed, _frameBufferSize).CopyTo(_frameBuffer);
                            _frameBufferSize -= (UInt16)consumed;
                        }
                    }
                }
            }
            catch (SocketException) {
                // TODO: Publish a disconnected event, or just let the actor die?
                // Or both?
            }
            catch (OperationCanceledException) {
                // Let the thread terminate...
            }
        }
コード例 #5
0
        public static bool UpdateTexture(ref Texture2D tex, RawFrame frame, FramePixelFormat format)
        {
            bool newTextureCreated = false;

            //texture exists but has the wrong height /width? -> destroy it and set the value to null
            if (tex != null && (tex.width != frame.Width || tex.height != frame.Height))
            {
                Texture2D.Destroy(tex);
                tex = null;
            }
            //no texture? create a new one first
            if (tex == null)
            {
                newTextureCreated = true;
                //current default format for compatibility reasons
                if (format == FramePixelFormat.ABGR)
                {
                    tex = new Texture2D(frame.Width, frame.Height, TextureFormat.RGBA32, false);
                }
                tex.wrapMode = TextureWrapMode.Clamp;
            }
            //copy image data into the texture and apply
            tex.LoadRawTextureData(frame.Buffer);
            tex.Apply();
            return(newTextureCreated);
        }
コード例 #6
0
ファイル: MediaStream.cs プロジェクト: Grandbrain/RTSPPlayer
        /// <summary>
        /// Raises an appropriate event when a media frame is received.
        /// </summary>
        /// <param name="sender">Sender object.</param>
        /// <param name="mediaFrame">Media frame.</param>
        private void OnFrameReceived(object?sender, RawFrame mediaFrame)
        {
            var metadataRequired = false;
            var now = DateTime.UtcNow;

            switch (mediaFrame)
            {
            case RawAudioFrame _ when now >= _audioMetadataTime + _metadataFrequency:
                metadataRequired   = true;
                _audioMetadataTime = now;
                break;

            case RawH264Frame _ when now >= _videoMetadataTime + _metadataFrequency:
                metadataRequired   = mediaFrame is RawH264IFrame;
                _videoMetadataTime = metadataRequired ? now : _videoMetadataTime;
                break;

            case RawVideoFrame _ when now >= _videoMetadataTime + _metadataFrequency:
                metadataRequired   = true;
                _videoMetadataTime = now;
                break;

            case null: return;
            }

            Interlocked.Increment(ref _totalFramesReceived);
            FrameReceived?.Invoke(this, mediaFrame, metadataRequired);
        }
コード例 #7
0
        private void OnFrameReceived(object sender, RawFrame rawFrame)
        {
            if (!(rawFrame is RawAudioFrame rawAudioFrame))
            {
                return;
            }

            FFmpegAudioDecoder decoder = GetDecoderForFrame(rawAudioFrame);

            if (!decoder.TryDecode(rawAudioFrame, out int decodedFrameSize))
            {
                return;
            }

            if (_decodedFrameBuffer.Length < decodedFrameSize)
            {
                _decodedFrameBuffer = new byte[decodedFrameSize];
            }

            var bufferSegment = new ArraySegment <byte>(_decodedFrameBuffer, 0, decodedFrameSize);

            IDecodedAudioFrame decodedFrame = decoder.GetDecodedFrame(bufferSegment);

            FrameReceived?.Invoke(this, decodedFrame);
        }
コード例 #8
0
 /// <summary>
 /// Updates the remote video. If the frame is null it will hide the video image.
 /// </summary>
 /// <param name="frame"></param>
 public virtual void UpdateRemoteTexture(RawFrame frame, FramePixelFormat format)
 {
     if (uRemoteVideoImage != null)
     {
         if (frame != null)
         {
             UpdateTexture(ref mRemoteVideoTexture, frame, format);
             uRemoteVideoImage.texture = mRemoteVideoTexture;
             //watch out: due to conversion from WebRTC to Unity format the image is flipped (top to bottom)
             //this also inverts the rotation
             uRemoteVideoImage.transform.rotation = Quaternion.Euler(0, 0, frame.Rotation * -1);
             mHasRemoteVideo    = true;
             mRemoteVideoWidth  = frame.Width;
             mRemoteVideoHeight = frame.Height;
             mRemoteVideoFormat = format;
             mRemoteFrameCounter++;
         }
         else
         {
             mHasRemoteVideo                      = false;
             uRemoteVideoImage.texture            = uNoCameraTexture;
             uRemoteVideoImage.transform.rotation = Quaternion.Euler(0, 0, 0);
         }
     }
 }
コード例 #9
0
        private async Task MarkupImage(int frameIndex, RawFrame frame, IEnumerable <SSDProcessor.BoundingBox> boxes, CancellationToken ct)
        {
            var converter = new ImageConvert();

            using var decoded = converter.Convert(frame, frame.Width, frame.Height, FFmpeg.AutoGen.AVPixelFormat.AV_PIX_FMT_BGR24);
            using var bitmap  = GetBitmap(decoded);
            await _markup.MarkupImage(frameIndex, bitmap, boxes, ct);
        }
コード例 #10
0
 public State(int index, RawFrameHandler rawFrameHandler, RawFrame frame, FrameDecoder decoder, IntPtr buffer)
 {
     Index           = index;
     RawFrameHandler = rawFrameHandler;
     Frame           = frame;
     Decoder         = decoder;
     Buffer          = buffer;
 }
コード例 #11
0
ファイル: program.cs プロジェクト: windygu/JenkinsVMSTest
        private static void RtspClient_FrameReceived(object sender, RawFrame rawFrame)
        {
            if (!(rawFrame is RawVideoFrame rawVideoFrame))
            {
                return;
            }

            FFmpegVideoDecoder decoder      = GetDecoderForFrame(rawVideoFrame);
            IDecodedVideoFrame decodedFrame = decoder.TryDecode(rawVideoFrame);

            if (decodedFrame != null)
            {
                var _FrameType = rawFrame is RawH264IFrame ? "IFrame" : "PFrame";
                TransformParameters _transformParameters = new TransformParameters(RectangleF.Empty,
                                                                                   new Size(STREAM_WIDTH, STREAM_HEIGHT),
                                                                                   ScalingPolicy.Stretch, PixelFormat.Bgra32, ScalingQuality.FastBilinear);

                var    pictureSize      = STREAM_WIDTH * STREAM_HEIGHT;
                IntPtr unmanagedPointer = Marshal.AllocHGlobal(pictureSize * 4);

                decodedFrame.TransformTo(unmanagedPointer, STREAM_WIDTH * 4, _transformParameters);
                byte[] managedArray = new byte[pictureSize * 4];
                Marshal.Copy(unmanagedPointer, managedArray, 0, pictureSize * 4);
                Marshal.FreeHGlobal(unmanagedPointer);
                Console.WriteLine($"Frame was successfully decoded! {_FrameType } Trying to save to JPG file...");
                try
                {
                    string mypath = "empty";//initialize


                    //request.AddFile("f1", "http://*****:*****@"E:\learning\testphoto\image21.jpg", ImageFormat.Jpeg);
                        im.Save(@mypath, ImageFormat.Jpeg);
                        return;
                    }
                    if (isLinux)
                    {
                        // Change to your path
                        mypath = Path.Combine(WebHostEnvironment.WebRootPath, "uploads/", "1", "image21.jpg");//linux path
                        im.Save(@mypath, ImageFormat.Jpeg);
                        return;
                    }
                    throw new PlatformNotSupportedException("Not supported OS platform!!");
                }
                catch (Exception e)
                {
                    Console.WriteLine($"Error saving to file: {e.Message}");
                    Debug.WriteLine($"Error saving to file: {e.Message}");
                    Debug.WriteLine($"Stack trace: {e.StackTrace}");
                }
            }
        }
コード例 #12
0
 /// <summary>
 /// Updates the frame for a connection id. If the id is new it will create a
 /// visible image for it. The frame can be null for connections that
 /// don't sent frames.
 /// </summary>
 /// <param name="id"></param>
 /// <param name="frame"></param>
 private void UpdateFrame(ConnectionId id, RawFrame frame)
 {
     if (mVideoUiElements.ContainsKey(id))
     {
         VideoData videoData = mVideoUiElements[id];
         UpdateTexture(ref videoData.texture, frame);
         videoData.image.texture = videoData.texture;
     }
 }
コード例 #13
0
        private void OnFrameReceived(object sender, RawFrame rawFrame)
        {
            if (!(rawFrame is RawVideoFrame rawVideoFrame))
            {
                return;
            }

            FFmpegVideoDecoder decoder = GetDecoderForFrame(rawVideoFrame);

            if (!decoder.TryDecode(rawVideoFrame, out DecodedVideoFrameParameters decodedFrameParameters))
            {
                return;
            }

            //long desiredSize = (long)rawFrame. << 32 | (uint)height;

            //long desiredSize = Interlocked.Read(ref _desiredSize);

            int targetWidth;
            int targetHeight;

            int bufferSize;

            //if (desiredSize == 0)
            //{
            targetWidth  = decodedFrameParameters.Width;
            targetHeight = decodedFrameParameters.Height;

            bufferSize = decodedFrameParameters.Height *
                         ImageUtils.GetStride(decodedFrameParameters.Width, PixelFormat.Bgr24);
            //}
            //else
            //{
            //    targetWidth = (int)(desiredSize >> 32);
            //    targetHeight = (int)desiredSize;

            //    bufferSize = targetHeight *
            //                 ImageUtils.GetStride(targetWidth, PixelFormat.Bgr24);
            //}


            if (_decodedFrameBuffer.Length != bufferSize)
            {
                _decodedFrameBuffer = new byte[bufferSize];
            }

            var bufferSegment = new ArraySegment <byte>(_decodedFrameBuffer);

            var postVideoDecodingParameters = new PostVideoDecodingParameters(RectangleF.Empty,
                                                                              new Size(targetWidth, targetHeight),
                                                                              ScalingPolicy.Stretch, PixelFormat.Bgr24, ScalingQuality.Bicubic);

            IDecodedVideoFrame decodedFrame = decoder.GetDecodedFrame(bufferSegment, postVideoDecodingParameters);

            FrameReceived?.Invoke(this, decodedFrame);
        }
コード例 #14
0
 /// <summary>
 /// Updates the ui with the new raw frame
 /// </summary>
 /// <param name="frame"></param>
 private void UpdateTexture(RawFrame frame)
 {
     if (uVideoOutput != null)
     {
         if (frame != null)
         {
             UpdateTexture(ref mVideoTexture, frame);
             uVideoOutput.texture = mVideoTexture;
         }
     }
 }
コード例 #15
0
ファイル: MediaSystem.cs プロジェクト: Grandbrain/RTSPPlayer
        /// <summary>
        /// Called when a media frame is received.
        /// </summary>
        /// <param name="sender">Sender object.</param>
        /// <param name="mediaFrame">Media frame.</param>
        /// <param name="metadataRequired">Indicates whether to include metadata.</param>
        private void OnFrameReceived(object sender, RawFrame mediaFrame, bool metadataRequired)
        {
            if (!(sender is IMediaStream mediaStream) || _endPoint == null)
            {
                return;
            }

            var result = CreateNetworkFrame(Name, mediaStream.Name, mediaStream.TotalFramesReceived,
                                            mediaFrame, metadataRequired);

            _networkStream.TrySend(result, _endPoint);
        }
コード例 #16
0
ファイル: MediaSystem.cs プロジェクト: Grandbrain/RTSPPlayer
        /// <summary>
        /// Creates media frame data segments.
        /// </summary>
        /// <param name="mediaFrame">Media frame.</param>
        /// <param name="metadataRequired">Indicates whether to include metadata.</param>
        /// <returns>An array of data segments.</returns>
        private static ArraySegment <byte>[] CreateDataSegments(RawFrame mediaFrame, bool metadataRequired)
        {
            if (!metadataRequired)
            {
                return new[] { new byte[] { 0 }, mediaFrame.FrameSegment }
            }
            ;

            var codecName = mediaFrame switch
            {
                RawAACFrame _ => "AAC",
                RawG711AFrame _ => "G711A",
                RawG711UFrame _ => "G711U",
                RawG726Frame _ => "G726",
                RawPCMFrame _ => "PCM",
                RawH264IFrame _ => "H264",
                RawH264PFrame _ => "H264",
                RawJpegFrame _ => "MJPEG",
                _ => string.Empty
            };

            var bitsPerCodedUnit = mediaFrame switch
            {
                RawG726Frame rawG726Frame => rawG726Frame.BitsPerCodedSample,
                _ => 0
            };

            var configSegment = mediaFrame switch
            {
                RawAACFrame rawAacFrame => rawAacFrame.ConfigSegment,
                RawH264IFrame rawH264IFrame => rawH264IFrame.SpsPpsSegment,
                _ => default
            };

            var codecBytes = Encoding.UTF8.GetBytes(codecName);

            Array.Resize(ref codecBytes, 10);

            var metaSegment = new byte[19];

            using var stream = new MemoryStream(metaSegment);
            using var writer = new EndianBinaryWriter(stream);

            writer.Write((byte)1);
            writer.Write(codecBytes);
            writer.Write(bitsPerCodedUnit);
            writer.Write(configSegment.Count);

            return(configSegment.Count > 0
                ? new[] { metaSegment, configSegment, mediaFrame.FrameSegment }
                : new[] { metaSegment, mediaFrame.FrameSegment });
        }
コード例 #17
0
        private void OnFrameReceived(object sender, RawFrame rawFrame)
        {
            if (!(rawFrame is RawVideoFrame rawVideoFrame))
            {
                return;
            }

            FFmpegVideoDecoder decoder = GetDecoderForFrame(rawVideoFrame);

            IDecodedVideoFrame decodedFrame = decoder.TryDecode(rawVideoFrame);

            if (decodedFrame != null)
            {
                FrameReceived?.Invoke(this, decodedFrame);
            }
        }
コード例 #18
0
ファイル: AnimFrame.cs プロジェクト: VividMaster/StarKnights
 public void Write(BinaryWriter w)
 {
     w.Write(RawFrame.Width);
     w.Write(RawFrame.Height);
     for (int y = 0; y < RawFrame.Height; y++)
     {
         for (int x = 0; x < RawFrame.Width; x++)
         {
             var pix = RawFrame.GetPixel(x, y);
             w.Write(pix.R);
             w.Write(pix.G);
             w.Write(pix.B);
             w.Write(pix.A);
         }
     }
     w.Write(Duration);
 }
コード例 #19
0
 /// <summary>
 /// Updates the texture based on the given frame update.
 ///
 /// </summary>
 /// <param name="tex"></param>
 /// <param name="frame"></param>
 private void UpdateTexture(ref Texture2D tex, RawFrame frame)
 {
     //texture exists but has the wrong height /width? -> destroy it and set the value to null
     if (tex != null && (tex.width != frame.Width || tex.height != frame.Height))
     {
         Texture2D.Destroy(tex);
         tex = null;
     }
     //no texture? create a new one first
     if (tex == null)
     {
         tex          = new Texture2D(frame.Width, frame.Height, TextureFormat.RGBA32, false);
         tex.wrapMode = TextureWrapMode.Clamp;
     }
     ///copy image data into the texture and apply
     tex.LoadRawTextureData(frame.Buffer);
     tex.Apply();
 }
コード例 #20
0
        private void OnFrameReceived(object sender, RawFrame rawFrame)
        {
            if (!(rawFrame is RawVideoFrame rawVideoFrame))
            {
                return;
            }

            Console.WriteLine($"OnFrameReceived sender : {sender.GetType()}, rawFrame : {rawFrame.GetType()}");

            FFmpegVideoDecoder decoder = GetDecoderForFrame(rawVideoFrame);

            IDecodedVideoFrame decodedFrame = decoder.TryDecode(rawVideoFrame);

            if (decodedFrame != null)
            {
                FrameReceived?.Invoke(this, decodedFrame);
            }
        }
コード例 #21
0
        private void OnFrameGeneratedThreadSafe(RawFrame frame)
        {
            if (FrameReceived == null)
            {
                return;
            }

            _hybridLock.Enter();

            try
            {
                FrameReceived.Invoke(frame);
            }
            finally
            {
                _hybridLock.Leave();
            }
        }
コード例 #22
0
ファイル: MediaSystem.cs プロジェクト: Grandbrain/RTSPPlayer
        /// <summary>
        /// Creates a new network frame.
        /// </summary>
        /// <param name="systemName">System name.</param>
        /// <param name="streamName">Stream name.</param>
        /// <param name="totalFramesReceived">Total number of frames received by the media stream so far.</param>
        /// <param name="mediaFrame">Media frame.</param>
        /// <param name="metadataRequired">Indicates whether to include metadata.</param>
        /// <returns>A new network frame.</returns>
        private static NetworkFrame CreateNetworkFrame(string systemName, string streamName, int totalFramesReceived,
                                                       RawFrame mediaFrame, bool metadataRequired)
        {
            byte interpretation = mediaFrame switch
            {
                RawAudioFrame _ => 1,
                RawVideoFrame _ => 2,
                 _ => 0
            };

            return(new NetworkFrame(
                       (ulong)Chrono.GetUniqueTimestamp64(),
                       (uint)totalFramesReceived,
                       interpretation,
                       systemName,
                       streamName,
                       CreateDataSegments(mediaFrame, metadataRequired)));
        }
コード例 #23
0
    /// <summary>
    /// Updates the texture based on the given frame update.
    ///
    /// Returns true if a complete new texture was created
    /// </summary>
    /// <param name="tex"></param>
    /// <param name="frame"></param>
    protected bool UpdateTexture(ref Texture2D tex, RawFrame frame, FramePixelFormat format)
    {
        bool newTextureCreated = false;

        //texture exists but has the wrong height /width? -> destroy it and set the value to null
        if (tex != null && (tex.width != frame.Width || tex.height != frame.Height))
        {
            Texture2D.Destroy(tex);
            tex = null;
        }

        //no texture? create a new one first
        if (tex == null)
        {
            newTextureCreated = true;
            Debug.Log("Creating new texture with resolution " + frame.Width + "x" + frame.Height + " Format:" + format);

            //so far only ABGR is really supported. this will change later
            if (format == FramePixelFormat.ABGR)
            {
                tex = new Texture2D(frame.Width, frame.Height, TextureFormat.RGBA32, false);
            }
            else
            {
                Debug.LogWarning("YUY2 texture is set. This is only for testing");
                tex = new Texture2D(frame.Width, frame.Height, TextureFormat.YUY2, false);
            }

            tex.wrapMode = TextureWrapMode.Clamp;
        }
        //copy image data into the texture and apply
        //Watch out the RawImage has the top pixels in the top row but
        //unity has the top pixels in the bottom row. Result is an image that is
        //flipped. Fixing this here would waste a lot of CPU power thus
        //the UI will simply set scale.Y of the UI element to -1 to reverse this.
        tex.LoadRawTextureData(frame.Buffer);
        tex.Apply();
        return(newTextureCreated);
    }
コード例 #24
0
        //convert the raw frame to decoded frame
        private void RtspClientOnFrameReceived(object sender, RawFrame rawFrame)
        {
            if (!(rawFrame is RawAudioFrame rawAudioFrame))
            {
                return;
            }

            FFmpegAudioDecoder decoder = GetDecoderForFrame(rawAudioFrame);

            if (!decoder.TryDecode(rawAudioFrame))
            {
                return;
            }

            //OutBitsPerSample may need to be set by user or the program?
            IDecodedAudioFrame decodedFrame = decoder.GetDecodedFrame(new AudioConversionParameters()
            {
                OutBitsPerSample = 16
            });

            System.Windows.Application.Current.Dispatcher.Invoke(_invalidateAction, DispatcherPriority.Send, decodedFrame);
        }
コード例 #25
0
        private void OnFrameReceived(object sender, RawFrame rawFrame)
        {
            if (!(rawFrame is RawVideoFrame rawVideoFrame))
            {
                return;
            }

            FFmpegVideoDecoder decoder = GetDecoderForFrame(rawVideoFrame);

            if (!decoder.TryDecode(rawVideoFrame, out DecodedVideoFrameParameters decodedFrameParameters))
            {
                return;
            }

            int targetWidth  = decodedFrameParameters.Width;
            int targetHeight = decodedFrameParameters.Height;

            int bufferSize = decodedFrameParameters.Height *
                             ImageUtils.GetStride(decodedFrameParameters.Width, PixelFormat.Bgr24);

            if (_decodedFrameBuffer.Length != bufferSize)
            {
                _decodedFrameBuffer = new byte[bufferSize];
            }

            var bufferSegment = new ArraySegment <byte>(_decodedFrameBuffer);

            if (_postVideoDecodingParameters.TargetFrameSize.Width != targetWidth ||
                _postVideoDecodingParameters.TargetFrameSize.Height != targetHeight)
            {
                _postVideoDecodingParameters = new PostVideoDecodingParameters(RectangleF.Empty,
                                                                               new Size(targetWidth, targetHeight),
                                                                               ScalingPolicy.Stretch, PixelFormat.Bgr24, ScalingQuality.Nearest);
            }

            IDecodedVideoFrame decodedFrame = decoder.GetDecodedFrame(bufferSegment, _postVideoDecodingParameters);

            FrameReceived?.Invoke(this, decodedFrame);
        }
コード例 #26
0
        private void OnFrameReceived(object sender, RawFrame rawFrame)
        {
            if (!(rawFrame is RawVideoFrame rawVideoFrame))
            {
                return;
            }

            FFmpegVideoDecoder decoder = GetDecoderForFrame(rawVideoFrame);

            if (!decoder.TryDecode(rawVideoFrame, out DecodedVideoFrameParameters decodedFrameParameters))
            {
                return;
            }

            long desiredSize = Interlocked.Read(ref _desiredSize);

            int targetWidth;
            int targetHeight;

            if (desiredSize == 0)
            {
                targetWidth  = decodedFrameParameters.Width;
                targetHeight = decodedFrameParameters.Height;
            }
            else
            {
                targetWidth  = (int)(desiredSize >> 32);
                targetHeight = (int)desiredSize;
            }

            var postVideoDecodingParameters = new PostVideoDecodingParameters(RectangleF.Empty,
                                                                              new Size(targetWidth, targetHeight),
                                                                              ScalingPolicy.Stretch, PixelFormat.Bgr24, ScalingQuality.Bicubic);

            IDecodedVideoFrame decodedFrame = decoder.GetDecodedFrame(postVideoDecodingParameters);

            FrameReceived?.Invoke(this, decodedFrame);
        }
コード例 #27
0
        private void OnFrameReceived(object sender, RawFrame rawFrame)
        {
            if (!(rawFrame is RawAudioFrame rawAudioFrame))
            {
                return;
            }

            FFmpegAudioDecoder decoder = GetDecoderForFrame(rawAudioFrame);

            if (!decoder.TryDecode(rawAudioFrame))
            {
                return;
            }

            IDecodedAudioFrame decodedFrame = decoder.GetDecodedFrame(new AudioConversionParameters()
            {
                OutBitsPerSample = 16
            });



            FrameReceived?.Invoke(this, decodedFrame);
        }
コード例 #28
0
ファイル: Charting.cs プロジェクト: wenjie891/UnrealVN
        public static void ParseStreamIntoChart(NetworkStream NetworkStream, Chart NetworkChart, string ActorFilter, string PropertyFilter, string RPCFilter)
        {
            var StartTime = DateTime.UtcNow;

            NetworkChart.BeginInit();

            // Reset existing data.
            foreach (var Series in NetworkChart.Series)
            {
                Series.Points.Clear();
            }
            NetworkChart.ResetAutoValues();
            NetworkChart.Invalidate();

            NetworkChart.ChartAreas[0].AxisX.ScrollBar.IsPositionedInside = false;
            NetworkChart.ChartAreas[0].AxisX.ScrollBar.ButtonStyle        = ScrollBarButtonStyles.All;
            NetworkChart.ChartAreas[0].AxisX.ScrollBar.Size        = 15;
            NetworkChart.ChartAreas[0].AxisX.ScrollBar.ButtonColor = Color.LightGray;

            NetworkChart.ChartAreas[0].AxisY.ScrollBar.IsPositionedInside = false;
            NetworkChart.ChartAreas[0].AxisY.ScrollBar.ButtonStyle        = ScrollBarButtonStyles.All;
            NetworkChart.ChartAreas[0].AxisY.ScrollBar.Size        = 15;
            NetworkChart.ChartAreas[0].AxisY.ScrollBar.ButtonColor = Color.LightGray;

            int FrameCounter = 0;

            foreach (PartialNetworkStream RawFrame in NetworkStream.Frames)
            {
                PartialNetworkStream Frame = RawFrame.Filter(ActorFilter, PropertyFilter, RPCFilter);

                if (Frame.EndTime == Frame.StartTime)
                {
                    throw new InvalidOperationException();
                }
                float OneOverDeltaTime = 1 / (Frame.EndTime - Frame.StartTime);

                NetworkChart.Series["ActorCount"].Points.AddXY(FrameCounter, Frame.ActorCount);
                NetworkChart.Series["ActorCountSec"].Points.AddXY(FrameCounter, Frame.ActorCount * OneOverDeltaTime);
                NetworkChart.Series["PropertyCount"].Points.AddXY(FrameCounter, Frame.PropertyCount);
                NetworkChart.Series["PropertyCountSec"].Points.AddXY(FrameCounter, Frame.PropertyCount * OneOverDeltaTime);
                NetworkChart.Series["PropertySize"].Points.AddXY(FrameCounter, Frame.ReplicatedSizeBits / 8);
                NetworkChart.Series["PropertySizeSec"].Points.AddXY(FrameCounter, Frame.ReplicatedSizeBits / 8 * OneOverDeltaTime);
                NetworkChart.Series["RPCCount"].Points.AddXY(FrameCounter, Frame.RPCCount);
                NetworkChart.Series["RPCCountSec"].Points.AddXY(FrameCounter, Frame.RPCCount * OneOverDeltaTime);
                NetworkChart.Series["RPCSize"].Points.AddXY(FrameCounter, Frame.RPCSizeBits / 8);
                NetworkChart.Series["RPCSizeSec"].Points.AddXY(FrameCounter, Frame.RPCSizeBits / 8 * OneOverDeltaTime);
                NetworkChart.Series["ExportBunchCount"].Points.AddXY(FrameCounter, Frame.ExportBunchCount);
                NetworkChart.Series["ExportBunchSize"].Points.AddXY(FrameCounter, Frame.ExportBunchSizeBits / 8);
                NetworkChart.Series["MustBeMappedGuidsCount"].Points.AddXY(FrameCounter, Frame.MustBeMappedGuidCount / 8);
                NetworkChart.Series["MustBeMappedGuidsSize"].Points.AddXY(FrameCounter, Frame.MustBeMappedGuidSizeBits / 8);
                NetworkChart.Series["SendAckCount"].Points.AddXY(FrameCounter, Frame.SendAckCount);
                NetworkChart.Series["SendAckCountSec"].Points.AddXY(FrameCounter, Frame.SendAckCount * OneOverDeltaTime);
                NetworkChart.Series["SendAckSize"].Points.AddXY(FrameCounter, Frame.SendAckSizeBits / 8);
                NetworkChart.Series["SendAckSizeSec"].Points.AddXY(FrameCounter, Frame.SendAckSizeBits / 8 * OneOverDeltaTime);
                NetworkChart.Series["ContentBlockHeaderSize"].Points.AddXY(FrameCounter, Frame.ContentBlockHeaderSizeBits / 8);
                NetworkChart.Series["ContentBlockFooterSize"].Points.AddXY(FrameCounter, Frame.ContentBlockFooterSizeBits / 8);
                NetworkChart.Series["PropertyHandleSize"].Points.AddXY(FrameCounter, Frame.PropertyHandleSizeBits / 8);
                NetworkChart.Series["SendBunchCount"].Points.AddXY(FrameCounter, Frame.SendBunchCount);
                NetworkChart.Series["SendBunchCountSec"].Points.AddXY(FrameCounter, Frame.SendBunchCount * OneOverDeltaTime);
                NetworkChart.Series["SendBunchSize"].Points.AddXY(FrameCounter, Frame.SendBunchSizeBits / 8);
                NetworkChart.Series["SendBunchSizeSec"].Points.AddXY(FrameCounter, Frame.SendBunchSizeBits / 8 * OneOverDeltaTime);
                NetworkChart.Series["SendBunchHeaderSize"].Points.AddXY(FrameCounter, Frame.SendBunchHeaderSizeBits / 8);
                NetworkChart.Series["GameSocketSendSize"].Points.AddXY(FrameCounter, Frame.UnrealSocketSize);
                NetworkChart.Series["GameSocketSendSizeSec"].Points.AddXY(FrameCounter, Frame.UnrealSocketSize * OneOverDeltaTime);
                NetworkChart.Series["GameSocketSendCount"].Points.AddXY(FrameCounter, Frame.UnrealSocketCount);
                NetworkChart.Series["GameSocketSendCountSec"].Points.AddXY(FrameCounter, Frame.UnrealSocketCount * OneOverDeltaTime);
                NetworkChart.Series["MiscSocketSendSize"].Points.AddXY(FrameCounter, Frame.OtherSocketSize);
                NetworkChart.Series["MiscSocketSendSizeSec"].Points.AddXY(FrameCounter, Frame.OtherSocketSize * OneOverDeltaTime);
                NetworkChart.Series["MiscSocketSendCount"].Points.AddXY(FrameCounter, Frame.OtherSocketCount);
                NetworkChart.Series["MiscSocketSendCountSec"].Points.AddXY(FrameCounter, Frame.OtherSocketCount * OneOverDeltaTime);
                int OutgoingBandwidth = Frame.UnrealSocketSize + Frame.OtherSocketSize + NetworkStream.PacketOverhead * (Frame.UnrealSocketCount + Frame.OtherSocketCount);
                NetworkChart.Series["OutgoingBandwidthSize"].Points.AddXY(FrameCounter, OutgoingBandwidth);
                NetworkChart.Series["OutgoingBandwidthSizeSec"].Points.AddXY(FrameCounter, OutgoingBandwidth * OneOverDeltaTime);
                NetworkChart.Series["ActorReplicateTimeInMS"].Points.AddXY(FrameCounter, Frame.ActorReplicateTimeInMS);

                if (Frame.NumEvents > 0)
                {
                    NetworkChart.Series["Events"].Points.AddXY(FrameCounter, 0);
                }

                FrameCounter++;
            }

            NetworkChart.DataManipulator.FinancialFormula(FinancialFormula.MovingAverage, "30", "GameSocketSendSizeSec", "GameSocketSendSizeAvgSec");
            NetworkChart.DataManipulator.FinancialFormula(FinancialFormula.MovingAverage, "30", "OutgoingBandwidthSizeSec", "OutgoingBandwidthSizeAvgSec");

            NetworkChart.ChartAreas["DefaultChartArea"].RecalculateAxesScale();

            NetworkChart.EndInit();

            Console.WriteLine("Adding data to chart took {0} seconds", (DateTime.UtcNow - StartTime).TotalSeconds);
        }
コード例 #29
0
 private void RtspClientOnFrameReceived(object sender, RawFrame rawFrame)
 {
     FrameReceived?.Invoke(this, rawFrame);
 }
コード例 #30
0
ファイル: Charting.cs プロジェクト: nicklinesla/4.21-arcore
        public static void ParseStreamIntoChart(MainWindow InMainWindow, NetworkStream NetworkStream, Chart NetworkChart, FilterValues InFilterValues)
        {
            var StartTime = DateTime.UtcNow;

            InMainWindow.ShowProgress(true);

            // Save old scroll position
            double OldPosition = NetworkChart.ChartAreas["DefaultChartArea"].AxisX.ScaleView.Position;

            NetworkChart.BeginInit();

            // Reset existing data.
            for (int i = 0; i < NetworkChart.Series.Count; i++)
            {
                float Percent = ( float )i / ( float )NetworkChart.Series.Count;
                InMainWindow.UpdateProgress(( int )(Percent * 100));

                NetworkChart.Series[i].Points.Clear();
            }

            InMainWindow.ShowProgress(true);

            NetworkChart.ResetAutoValues();
            NetworkChart.Invalidate();

            NetworkChart.ChartAreas[0].AxisX.ScrollBar.IsPositionedInside = false;
            NetworkChart.ChartAreas[0].AxisX.ScrollBar.ButtonStyle        = ScrollBarButtonStyles.All;
            NetworkChart.ChartAreas[0].AxisX.ScrollBar.Size        = 15;
            NetworkChart.ChartAreas[0].AxisX.ScrollBar.ButtonColor = Color.LightGray;

            NetworkChart.ChartAreas[0].AxisY.ScrollBar.IsPositionedInside = false;
            NetworkChart.ChartAreas[0].AxisY.ScrollBar.ButtonStyle        = ScrollBarButtonStyles.All;
            NetworkChart.ChartAreas[0].AxisY.ScrollBar.Size        = 15;
            NetworkChart.ChartAreas[0].AxisY.ScrollBar.ButtonColor = Color.LightGray;

            int FrameCounter = 0;

            foreach (PartialNetworkStream RawFrame in NetworkStream.Frames)
            {
                if (FrameCounter % 1000 == 0)
                {
                    float Percent = ( float )FrameCounter / ( float )NetworkStream.Frames.Count;
                    InMainWindow.UpdateProgress(( int )(Percent * 100));
                }

                PartialNetworkStream Frame = RawFrame.Filter(InFilterValues);

                if (Frame.EndTime == Frame.StartTime)
                {
                    throw new InvalidOperationException();
                }

                float OneOverDeltaTime = 1 / (Frame.EndTime - Frame.StartTime);

                int OutgoingBandwidth = Frame.UnrealSocketSize + Frame.OtherSocketSize + NetworkStream.PacketOverhead * (Frame.UnrealSocketCount + Frame.OtherSocketCount);

                InMainWindow.AddChartPoint(SeriesType.OutgoingBandwidthSize, FrameCounter, OutgoingBandwidth);
                InMainWindow.AddChartPoint(SeriesType.OutgoingBandwidthSizeSec, FrameCounter, OutgoingBandwidth * OneOverDeltaTime);
                InMainWindow.AddChartPoint(SeriesType.ActorCount, FrameCounter, Frame.ActorCount);
                InMainWindow.AddChartPoint(SeriesType.PropertySize, FrameCounter, Frame.ReplicatedSizeBits / 8);
                InMainWindow.AddChartPoint(SeriesType.PropertySizeSec, FrameCounter, Frame.ReplicatedSizeBits / 8 * OneOverDeltaTime);
                InMainWindow.AddChartPoint(SeriesType.RPCSize, FrameCounter, Frame.RPCSizeBits / 8);
                InMainWindow.AddChartPoint(SeriesType.RPCSizeSec, FrameCounter, Frame.RPCSizeBits / 8 * OneOverDeltaTime);

#if true
                InMainWindow.AddChartPoint(SeriesType.ActorCountSec, FrameCounter, Frame.ActorCount * OneOverDeltaTime);
                InMainWindow.AddChartPoint(SeriesType.PropertyCount, FrameCounter, Frame.PropertyCount);
                InMainWindow.AddChartPoint(SeriesType.PropertyCountSec, FrameCounter, Frame.PropertyCount * OneOverDeltaTime);
                InMainWindow.AddChartPoint(SeriesType.RPCCount, FrameCounter, Frame.RPCCount);
                InMainWindow.AddChartPoint(SeriesType.RPCCountSec, FrameCounter, Frame.RPCCount * OneOverDeltaTime);
                InMainWindow.AddChartPoint(SeriesType.ExportBunchCount, FrameCounter, Frame.ExportBunchCount);
                InMainWindow.AddChartPoint(SeriesType.ExportBunchSize, FrameCounter, Frame.ExportBunchSizeBits / 8);
                InMainWindow.AddChartPoint(SeriesType.MustBeMappedGuidsCount, FrameCounter, Frame.MustBeMappedGuidCount / 8);
                InMainWindow.AddChartPoint(SeriesType.MustBeMappedGuidsSize, FrameCounter, Frame.MustBeMappedGuidSizeBits / 8);
                InMainWindow.AddChartPoint(SeriesType.SendAckCount, FrameCounter, Frame.SendAckCount);
                InMainWindow.AddChartPoint(SeriesType.SendAckCountSec, FrameCounter, Frame.SendAckCount * OneOverDeltaTime);
                InMainWindow.AddChartPoint(SeriesType.SendAckSize, FrameCounter, Frame.SendAckSizeBits / 8);
                InMainWindow.AddChartPoint(SeriesType.SendAckSizeSec, FrameCounter, Frame.SendAckSizeBits / 8 * OneOverDeltaTime);
                InMainWindow.AddChartPoint(SeriesType.ContentBlockHeaderSize, FrameCounter, Frame.ContentBlockHeaderSizeBits / 8);
                InMainWindow.AddChartPoint(SeriesType.ContentBlockFooterSize, FrameCounter, Frame.ContentBlockFooterSizeBits / 8);
                InMainWindow.AddChartPoint(SeriesType.PropertyHandleSize, FrameCounter, Frame.PropertyHandleSizeBits / 8);
                InMainWindow.AddChartPoint(SeriesType.SendBunchCount, FrameCounter, Frame.SendBunchCount);
                InMainWindow.AddChartPoint(SeriesType.SendBunchCountSec, FrameCounter, Frame.SendBunchCount * OneOverDeltaTime);
                InMainWindow.AddChartPoint(SeriesType.SendBunchSize, FrameCounter, Frame.SendBunchSizeBits / 8);
                InMainWindow.AddChartPoint(SeriesType.SendBunchSizeSec, FrameCounter, Frame.SendBunchSizeBits / 8 * OneOverDeltaTime);
                InMainWindow.AddChartPoint(SeriesType.SendBunchHeaderSize, FrameCounter, Frame.SendBunchHeaderSizeBits / 8);
                InMainWindow.AddChartPoint(SeriesType.GameSocketSendSize, FrameCounter, Frame.UnrealSocketSize);
                InMainWindow.AddChartPoint(SeriesType.GameSocketSendSizeSec, FrameCounter, Frame.UnrealSocketSize * OneOverDeltaTime);
                InMainWindow.AddChartPoint(SeriesType.GameSocketSendCount, FrameCounter, Frame.UnrealSocketCount);
                InMainWindow.AddChartPoint(SeriesType.GameSocketSendCountSec, FrameCounter, Frame.UnrealSocketCount * OneOverDeltaTime);
                InMainWindow.AddChartPoint(SeriesType.ActorReplicateTimeInMS, FrameCounter, Frame.ActorReplicateTimeInMS);
#endif

#if false
                InMainWindow.AddChartPoint(SeriesType.MiscSocketSendSize, FrameCounter, Frame.OtherSocketSize);
                InMainWindow.AddChartPoint(SeriesType.MiscSocketSendSizeSec, FrameCounter, Frame.OtherSocketSize * OneOverDeltaTime);
                InMainWindow.AddChartPoint(SeriesType.MiscSocketSendCount, FrameCounter, Frame.OtherSocketCount);
                InMainWindow.AddChartPoint(SeriesType.MiscSocketSendCountSec, FrameCounter, Frame.OtherSocketCount * OneOverDeltaTime);
#endif

                if (Frame.NumEvents > 0)
                {
                    InMainWindow.AddChartPoint(SeriesType.Events, FrameCounter, 0);
                }

                FrameCounter++;
            }

            //NetworkChart.DataManipulator.FinancialFormula( FinancialFormula.MovingAverage, "30", SeriesType.GameSocketSendSizeSec, SeriesType.GameSocketSendSizeAvgSec );
            NetworkChart.DataManipulator.FinancialFormula(FinancialFormula.MovingAverage, "30", SeriesType.OutgoingBandwidthSizeSec.ToString(), SeriesType.OutgoingBandwidthSizeAvgSec.ToString());

            NetworkChart.ChartAreas["DefaultChartArea"].RecalculateAxesScale();

            NetworkChart.ChartAreas["DefaultChartArea"].AxisX.ScaleView.Position = OldPosition;

            NetworkChart.EndInit();

            InMainWindow.ShowProgress(false);

            Console.WriteLine("Adding data to chart took {0} seconds", (DateTime.UtcNow - StartTime).TotalSeconds);
        }