public static List <VideoMediaBuffer> CreateVideoMediaBuffers(long currentTick) { List <VideoMediaBuffer> videoMediaBuffers = new List <VideoMediaBuffer>(); int frameSize = GetFrameSize(VideoFormat.NV12_1280x720_30Fps); var referenceTime = currentTick; var packetSizeInMs = (long)((1000.0 / (double)VideoFormat.NV12_1280x720_30Fps.FrameRate) * 10000.0); using (FileStream fs = File.Open(Service.Instance.Configuration.VideoFileLocation, FileMode.Open)) { byte[] bytesToRead = new byte[frameSize]; while (fs.Read(bytesToRead, 0, bytesToRead.Length) >= frameSize) { IntPtr unmanagedBuffer = Marshal.AllocHGlobal(frameSize); Marshal.Copy(bytesToRead, 0, unmanagedBuffer, frameSize); referenceTime += packetSizeInMs; var videoSendBuffer = new VideoSendBuffer(unmanagedBuffer, (uint)frameSize, VideoFormat.NV12_1280x720_30Fps, referenceTime); videoMediaBuffers.Add(videoSendBuffer); } } Log.Info( new CallerInfo(), LogContext.Media, "created {0} VideoMediaBuffers", videoMediaBuffers.Count); return(videoMediaBuffers); }
public List <VideoMediaBuffer> GetVideoMediaBuffers(long currentTick) { Stopwatch watch = new Stopwatch(); watch.Start(); // 1. Downlaod _nbSecondToLoad seconds of video content from the storage account long bufferSize = _frameSize * _videoFormat.FrameRate * _nbSecondToLoad; byte[] bytesToRead = new byte[bufferSize]; var nbByteRead = _videoBlob.DownloadRangeToByteArray(bytesToRead, 0, _videoOffset, bytesToRead.Length, null, null); //2. Extract each video frame in a VideoMediaBuffer object List <VideoMediaBuffer> videoMediaBuffers = new List <VideoMediaBuffer>(); long referenceTime = currentTick; for (int index = 0; index < nbByteRead; index += _frameSize) { IntPtr unmanagedBuffer = Marshal.AllocHGlobal(_frameSize); Marshal.Copy(bytesToRead, index, unmanagedBuffer, _frameSize); referenceTime += _frameDurationInTicks; var videoSendBuffer = new VideoSendBuffer(unmanagedBuffer, (uint)_frameSize, _videoFormat, referenceTime); videoMediaBuffers.Add(videoSendBuffer); _videoOffset += _frameSize; } Log.Info(new CallerInfo(), LogContext.FrontEnd, $"Loading {_nbSecondToLoad}s video took {watch.ElapsedMilliseconds}ms ({_frameSize * _videoFormat.FrameRate * _nbSecondToLoad} bytes)"); watch.Stop(); return(videoMediaBuffers); }
/// <summary> /// Save screenshots when we receive video from subscribed participant. /// </summary> /// <param name="sender"> /// The sender. /// </param> /// <param name="e"> /// The video media received arguments. /// </param> private void OnVideoMediaReceived(object sender, VideoMediaReceivedEventArgs e) { try { if (Interlocked.Decrement(ref this.maxIngestFrameCount) > 0) { this.logger.Info( $"[{this.Call.Id}]: Capturing image: [VideoMediaReceivedEventArgs(Data=<{e.Buffer.Data.ToString()}>, " + $"Length={e.Buffer.Length}, Timestamp={e.Buffer.Timestamp}, Width={e.Buffer.VideoFormat.Width}, " + $"Height={e.Buffer.VideoFormat.Height}, ColorFormat={e.Buffer.VideoFormat.VideoColorFormat}, FrameRate={e.Buffer.VideoFormat.FrameRate})]"); } // 33 ms frequency ~ 30 fps if (DateTime.Now > this.lastVideoSentTimeUtc + TimeSpan.FromMilliseconds(33)) { this.lastVideoSentTimeUtc = DateTime.Now; // Step 1: Send Video with added hue byte[] buffer = e.Buffer.ApplyHue(this.hueColor); // Use the real length of the data (Media may send us a larger buffer) VideoFormat sendVideoFormat = e.Buffer.VideoFormat.GetSendVideoFormat(); var videoSendBuffer = new VideoSendBuffer(buffer, (uint)buffer.Length, sendVideoFormat); this.Call.GetLocalMediaSession().VideoSocket.Send(videoSendBuffer); if (DateTime.Now > this.lastVideoCapturedTimeUtc + this.videoCaptureFrequency) { // Step 2: Update screenshot of image with hue applied. // Update the last capture timestamp this.lastVideoCapturedTimeUtc = DateTime.Now; // Transform to bitmap object Bitmap bmpObject = MediaUtils.TransformNv12ToBmpFaster( buffer, e.Buffer.VideoFormat.Width, e.Buffer.VideoFormat.Height, this.logger); // Update the bitmap cache this.LatestScreenshotImage = bmpObject; } } } catch (Exception ex) { this.logger.Error(ex, $"[{this.Call.Id}] Exception in VideoMediaReceived"); } e.Buffer.Dispose(); }
/// <summary> /// Callback from the media platform when raw video is received. This is loopbacked to the user after adding the hue of the user's choice /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void OnVideoMediaReceived(object sender, VideoMediaReceivedEventArgs e) { try { CorrelationId.SetCurrentId(_correlationId); Log.Verbose( new CallerInfo(), LogContext.Media, "[{0}] [VideoMediaReceivedEventArgs(Data=<{1}>, Length={2}, Timestamp={3}, Width={4}, Height={5}, ColorFormat={6}, FrameRate={7})]", this.Id, e.Buffer.Data.ToString(), e.Buffer.Length, e.Buffer.Timestamp, e.Buffer.VideoFormat.Width, e.Buffer.VideoFormat.Height, e.Buffer.VideoFormat.VideoColorFormat, e.Buffer.VideoFormat.FrameRate); byte[] buffer = new byte[e.Buffer.Length]; Marshal.Copy(e.Buffer.Data, buffer, 0, (int)e.Buffer.Length); VideoMediaBuffer videoRenderMediaBuffer = e.Buffer as VideoMediaBuffer; AddHue(DefaultHueColor, buffer, e.Buffer.VideoFormat.Width, e.Buffer.VideoFormat.Height); VideoFormat sendVideoFormat = GetSendVideoFormat(e.Buffer.VideoFormat); var videoSendBuffer = new VideoSendBuffer(buffer, (uint)buffer.Length, sendVideoFormat); _videoSocket.Send(videoSendBuffer); } catch (Exception ex) { Log.Error(new CallerInfo(), LogContext.Media, $"[{this.Id}]: Exception in VideoMediaReceived {ex.ToString()}"); } finally { e.Buffer.Dispose(); } }
public void OnClientVideoReceived(I420AVideoFrame frame) { if (DateTime.Now > this.lastVideoSentToClientTimeUtc + TimeSpan.FromMilliseconds(33)) { try { this.lastVideoSentToClientTimeUtc = DateTime.Now; byte[] i420Frame = new byte[frame.width * frame.height * 12 / 8]; frame.CopyTo(i420Frame); byte[] nv12Frame = VideoConverter.I420ToNV12(i420Frame); VideoFormat sendVideoFormat = VideoFormatUtil.GetSendVideoFormat((int)frame.height, (int)frame.width); var videoSendBuffer = new VideoSendBuffer(nv12Frame, (uint)nv12Frame.Length, sendVideoFormat); this.Call.GetLocalMediaSession().VideoSocket.Send(videoSendBuffer); } catch (Exception e) { Console.WriteLine(e.Message); } } }
/// <summary> /// Create video buffers from a viseme timeline and populate the given list starting at the reference time tick. /// </summary> /// <param name="visemesTimeline">The viseme timeline</param> /// <param name="videoBuffers">The list of video buffers to be populated</param> /// <param name="referenceTimeTick">The reference starting time tick</param> private void CreateVideoBuffers(VisemesTimeline visemesTimeline, List <VideoMediaBuffer> videoBuffers, long referenceTimeTick) { // compute the frame buffer size in bytes for the current video format var frameSize = (int)(_videoFormat.Width * _videoFormat.Height * Helper.GetBitsPerPixel(_videoFormat.VideoColorFormat) / 8); // compute the frame duration for the current framerate var frameDurationInMs = (int)(1000.0 / (double)_videoFormat.FrameRate); var durationInMs = 0; // create video frames for the whole viseme timeline lenght while (durationInMs < visemesTimeline.Length) { // get the current viseme byte[] visemeBitmap = _visemeBitmaps[visemesTimeline.Get(durationInMs)]; // create the buffer IntPtr unmanagedBuffer = Marshal.AllocHGlobal(frameSize); Marshal.Copy(visemeBitmap, 0, unmanagedBuffer, frameSize); // increase the current duration by one frame durationInMs += frameDurationInMs; // create the video buffer and add it to the list var videoSendBuffer = new VideoSendBuffer(unmanagedBuffer, (uint)frameSize, _videoFormat, referenceTimeTick + durationInMs * 10000); videoBuffers.Add(videoSendBuffer); } Log.Info( new CallerInfo(), LogContext.Media, "created {0} VideoMediaBuffers frames", videoBuffers.Count); }