Example #1
0
 /// <summary>
 /// Applies the hue colour onto the specified buffer.
 /// </summary>
 /// <param name="videoMediaBuffer">The video media buffer.</param>
 /// <param name="hueColor">Color of the hue.</param>
 /// <returns>The media buffer byte array.</returns>
 public static byte[] ApplyHue(this VideoMediaBuffer videoMediaBuffer, CallHandler.HueColor hueColor)
 {
     byte[] buffer = new byte[videoMediaBuffer.VideoFormat.Width * videoMediaBuffer.VideoFormat.Height * 12 / 8];
     Marshal.Copy(videoMediaBuffer.Data, buffer, 0, buffer.Length);
     ApplyHue(buffer, hueColor, videoMediaBuffer.VideoFormat.Width, videoMediaBuffer.VideoFormat.Height);
     return(buffer);
 }
        public void ReceiveFrame(VideoMediaBuffer videoFrame, string participantId)
        {
            var originatingTime = new DateTime(1900, 1, 1, 0, 0, 0, DateTimeKind.Utc).AddTicks(videoFrame.Timestamp);
            var frames          = new Dictionary <string, Shared <Image> >();

            using (var sharedImage = ImagePool.GetOrCreate(
                       videoFrame.VideoFormat.Width,
                       videoFrame.VideoFormat.Height,
                       PixelFormat.BGR_24bpp))
            {
                var timestamp = (long)videoFrame.Timestamp;
                if (timestamp == 0)
                {
                    this.logger.Warn($"Original sender timestamp is zero: {participantId}");
                    return;
                }
                var length = videoFrame.VideoFormat.Width * videoFrame.VideoFormat.Height * 12 / 8; // This is how to calculate NV12 buffer size
                if (length > videoFrame.Length)
                {
                    return;
                }
                byte[] data = new byte[length];

                try
                {
                    Marshal.Copy(videoFrame.Data, data, 0, length);
                    var bgrImage = NV12toBGR(data, videoFrame.VideoFormat.Width, videoFrame.VideoFormat.Height);
                    sharedImage.Resource.CopyFrom(bgrImage);
                }
                catch (Exception ex)
                {
                    this.logger.Warn($"ON FAILURE: length: {videoFrame.Length}, height: {videoFrame.VideoFormat.Height}, width: {videoFrame.VideoFormat.Width}");
                    this.logger.Error(ex);
                    return;
                }
                lock (this.Video)
                {
                    if (originatingTime > this.Video.LastEnvelope.OriginatingTime)
                    {
                        frames.Add(participantId, sharedImage);
                        this.Video.Post(frames, originatingTime);
                    }
                    else
                    {
                        this.logger.Warn("Out of order frame");
                    }
                }
            }
        }
Example #3
0
        /// <summary>
        /// Callback from the media platform when raw video is received. This is loopbacked to the user after adding the hue of the user's choice
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void OnVideoMediaReceived(object sender, VideoMediaReceivedEventArgs e)
        {
            try
            {
                CorrelationId.SetCurrentId(_correlationId);

                Log.Verbose(
                    new CallerInfo(),
                    LogContext.Media,
                    "[{0}] [VideoMediaReceivedEventArgs(Data=<{1}>, Length={2}, Timestamp={3}, Width={4}, Height={5}, ColorFormat={6}, FrameRate={7})]",
                    this.Id,
                    e.Buffer.Data.ToString(),
                    e.Buffer.Length,
                    e.Buffer.Timestamp,
                    e.Buffer.VideoFormat.Width,
                    e.Buffer.VideoFormat.Height,
                    e.Buffer.VideoFormat.VideoColorFormat,
                    e.Buffer.VideoFormat.FrameRate);


                byte[] buffer = new byte[e.Buffer.Length];
                Marshal.Copy(e.Buffer.Data, buffer, 0, (int)e.Buffer.Length);

                VideoMediaBuffer videoRenderMediaBuffer = e.Buffer as VideoMediaBuffer;
                AddHue(DefaultHueColor, buffer, e.Buffer.VideoFormat.Width, e.Buffer.VideoFormat.Height);

                VideoFormat sendVideoFormat = GetSendVideoFormat(e.Buffer.VideoFormat);
                var         videoSendBuffer = new VideoSendBuffer(buffer, (uint)buffer.Length, sendVideoFormat);
                _videoSocket.Send(videoSendBuffer);
            }
            catch (Exception ex)
            {
                Log.Error(new CallerInfo(), LogContext.Media, $"[{this.Id}]: Exception in VideoMediaReceived {ex.ToString()}");
            }
            finally
            {
                e.Buffer.Dispose();
            }
        }
        private void OnVideoMediaReceived(object sender, VideoMediaReceivedEventArgs e)
        {
            try
            {
                CorrelationId.SetCurrentId(_correlationId);

                if (DateTime.Now > this._lastVideoCapturedTimeUtc + this.VideoCaptureFrequency)
                {
                    // Update the last capture timestamp
                    this._lastVideoCapturedTimeUtc = DateTime.Now;

                    Log.Info(
                        new CallerInfo(),
                        LogContext.Media,
                        "[{0}]: Capturing image: [VideoMediaReceivedEventArgs(Data=<{1}>, Length={2}, Timestamp={3}, Width={4}, Height={5}, ColorFormat={6}, FrameRate={7})]",
                        this.Id,
                        e.Buffer.Data.ToString(),
                        e.Buffer.Length,
                        e.Buffer.Timestamp,
                        e.Buffer.VideoFormat.Width,
                        e.Buffer.VideoFormat.Height,
                        e.Buffer.VideoFormat.VideoColorFormat,
                        e.Buffer.VideoFormat.FrameRate);

                    // Make a copy of the media buffer
                    Stopwatch watch = new Stopwatch();
                    watch.Start();

                    byte[] buffer = new byte[e.Buffer.Length];
                    Marshal.Copy(e.Buffer.Data, buffer, 0, (int)e.Buffer.Length);

                    VideoMediaBuffer videoRenderMediaBuffer = e.Buffer as VideoMediaBuffer;

                    IntPtr ptrToBuffer = Marshal.AllocHGlobal(buffer.Length);
                    Marshal.Copy(buffer, 0, ptrToBuffer, buffer.Length);

                    watch.Stop();
                    Log.Info(new CallerInfo(), LogContext.Media, $"{this.Id} Took {watch.ElapsedMilliseconds} ms to copy buffer");

                    // Transform to bitmap object
                    Bitmap bmpObject = MediaUtils.TransformNV12ToBmpFaster(buffer, e.Buffer.VideoFormat.Width, e.Buffer.VideoFormat.Height);

                    bool sendChatMessage = (CurrentVideoImage == null);
                    Log.Info(new CallerInfo(), LogContext.Media, $"{this.Id} send chat message {sendChatMessage}");

                    // 3. Update the bitmap cache
                    CurrentVideoImage = bmpObject;

                    if (sendChatMessage)
                    {
                        Task.Run(async() => {
                            try
                            {
                                await RealTimeMediaCall.SendMessageForCall(RealTimeMediaCall);
                            }catch (Exception ex)
                            {
                                Log.Info(new CallerInfo(), LogContext.FrontEnd, $"Exception in SendingChatMessage {ex}");
                            }
                        });
                    }
                }
            }
            catch (Exception ex)
            {
                Log.Error(new CallerInfo(), LogContext.Media, $"{this.Id} Exception in VideoMediaReceived {ex.ToString()}");
            }

            e.Buffer.Dispose();
        }
 public void OnVideoMediaReceived(VideoMediaBuffer videoFrame, string participantId)
 {
     frameSourceComponent.ReceiveFrame(videoFrame, participantId);
 }