private void Peer_RemoteI420AFrameReady(I420AVideoFrame frame) { lock (_remoteVideoLock) { if (!_remoteVideoPlaying) { _remoteVideoPlaying = true; uint width = frame.width; uint height = frame.height; RunOnMainThread(() => { // Bridge the remote video track with the remote media player UI int framerate = 30; // for lack of an actual value _remoteVideoSource = CreateI420VideoStreamSource(width, height, framerate); var remoteVideoPlayer = new MediaPlayer(); remoteVideoPlayer.Source = MediaSource.CreateFromMediaStreamSource( _remoteVideoSource); remoteVideoPlayerElement.SetMediaPlayer(remoteVideoPlayer); remoteVideoPlayer.Play(); }); } } _remoteVideoBridge.HandleIncomingVideoFrame(frame); }
/// <summary> /// Fulfill a pending Media Foundation video sample request with an incoming /// video frame packet, short-circuiting the internal frame queue. /// </summary> /// <param name="framePacket">The incoming video frame packet to consume.</param> /// <remarks> /// This must be called with the <see cref="_deferralLock"/> acquired. /// </remarks> private void MakeSampleForPendingRequest(I420AVideoFrame frame) { // Calculate frame timestamp TimeSpan timestamp = TimeSpan.FromSeconds(_frameCount / 30.0); ++_frameCount; // Get a sample // FIXME - There are some wrong assumptions around strides here, see MemCpyStride uint pixelSize = frame.width * frame.height; uint byteSize = (pixelSize / 2 * 3); // I420 = 12 bits per pixel //Debug.Assert(byteSize == frame.Size); var sample = _streamSamplePool.Pop(byteSize, timestamp); sample.Duration = TimeSpan.FromSeconds(1.0 / 30.0); // Copy the frame data into the sample's buffer. // Unfortunately the C# interface to Windows.Storage.Streams.Buffer seems to // only offer a copy from a byte[] buffer, so need to copy first into a temporary // one (packed YUV) before copying into the sample's Buffer object. byte[] buffer = new byte[byteSize]; frame.CopyTo(buffer); buffer.CopyTo(0, sample.Buffer, 0, (int)byteSize); // Assign the sample _request.Sample = sample; _request.ReportSampleProgress(100); _deferral.Complete(); _request = null; _deferral = null; }
private void Peer_LocalI420AFrameReady(I420AVideoFrame frame) { lock (_localVideoLock) { if (!_localVideoPlaying) { _localVideoPlaying = true; // Capture the resolution into local variable useable from the lambda below uint width = frame.width; uint height = frame.height; // Defer UI-related work to the main UI thread RunOnMainThread(() => { // Bridge the local video track with the local media player UI int framerate = 30; // for lack of an actual value _localVideoSource = CreateI420VideoStreamSource( width, height, framerate); var localVideoPlayer = new MediaPlayer(); localVideoPlayer.Source = MediaSource.CreateFromMediaStreamSource( _localVideoSource); //localVideoPlayerElement.SetMediaPlayer(localVideoPlayer); remoteVideoPlayerElement.SetMediaPlayer(localVideoPlayer); localVideoPlayer.Play(); }); } } _localVideoBridge.HandleIncomingVideoFrame(frame); }
private void Peer_RemoteI420FrameReady(I420AVideoFrame frame) { // Lazily start the remote video media player when receiving // the first video frame from the remote peer. Currently there // is no exposed API to tell once connected that the remote peer // will be sending some video track. //< TODO - See if we can add an API to enumerate the remote channels, // or an On(Audio|Video|Data)Channel(Added|Removed) event? lock (_isRemoteVideoPlayingLock) { if (!_isRemoteVideoPlaying) { _isRemoteVideoPlaying = true; uint width = frame.width; uint height = frame.height; RunOnMainThread(() => { remoteVideoSource = CreateVideoStreamSource(width, height); remoteVideoPlayer.Source = MediaSource.CreateFromMediaStreamSource(remoteVideoSource); remoteVideoPlayer.Play(); }); } } remoteVideoBridge.HandleIncomingVideoFrame(frame); }
public WebRTCFrameEventArgs(I420AVideoFrame frame) { Width = frame.width; Height = frame.height; StrideY = frame.strideY; StrideU = frame.strideU; StrideV = frame.strideV; StrideA = frame.strideA; frame.CopyTo(frameBuf); }
public WebRTCFrameEventArgs(I420AVideoFrame frame) { var width = frame.width; var height = frame.height; var strideY = frame.strideY; var strideU = frame.strideU; var strideV = frame.strideV; var strideA = frame.strideA; frame.CopyTo(frameBuf); }
/// <summary> /// Callback on video frame received from the local video capture device, /// for local rendering before (or in parallel of) being sent to the remote peer. /// </summary> /// <param name="frame">The newly captured video frame.</param> private void VideoTrack_I420AFrameReady(I420AVideoFrame frame) { // Lazily start the video media player when receiving the first video frame from // the video track. Currently there is no exposed API to tell once connected that // the remote peer will be sending some video track, so handle local and remote // video tracks the same for simplicity. bool needNewSource = false; uint width = frame.width; uint height = frame.height; lock (_mediaPlaybackLock) { if (!_isVideoPlaying) { _isVideoPlaying = true; _videoWidth = width; _videoHeight = height; needNewSource = true; } else if ((width != _videoWidth) || (height != _videoHeight)) { _videoWidth = width; _videoHeight = height; needNewSource = true; } } if (needNewSource) { // We don't know the remote video framerate yet, so use a default. uint framerate = 30; //RunOnMainThread(() => { Logger.Log($"Creating new video source: {width}x{height}@{framerate}"); _videoPlayer.Pause(); //_videoPlayer.Source = null; _videoStreamSource?.NotifyError(MediaStreamSourceErrorStatus.Other); _videoSource?.Dispose(); _videoStreamSource = CreateVideoStreamSource(width, height, framerate); _videoSource = MediaSource.CreateFromMediaStreamSource(_videoStreamSource); _videoPlayer.Source = _videoSource; }//); ThreadHelper.RunOnMainThread(() => { RaisePropertyChanged("FrameWidth"); RaisePropertyChanged("FrameHeight"); }); } _videoBridge.HandleIncomingVideoFrame(frame); }
/// <summary> /// Handle an incoming raw video frame by either enqueuing it for serving /// a later request, or immediately serving a pending request. /// </summary> /// <param name="frame">The incoming video frame</param> public void HandleIncomingVideoFrame(I420AVideoFrame frame) { // If any pending request, serve it immediately lock (_deferralLock) { if (_deferral != null) { //_frameQueue.TrackLateFrame(); MakeSampleForPendingRequest(frame); return; } } // Otherwise queue frame for later pulling by the MediaFoundation framework _frameQueue.Enqueue(frame); }
public static void I420RemoteVideoFrameCallback(IntPtr userData, IntPtr dataY, IntPtr dataU, IntPtr dataV, IntPtr dataA, int strideY, int strideU, int strideV, int strideA, int width, int height) { var peer = Utils.ToWrapper <PeerConnection>(userData); var frame = new I420AVideoFrame() { width = (uint)width, height = (uint)height, dataY = dataY, dataU = dataU, dataV = dataV, dataA = dataA, strideY = strideY, strideU = strideU, strideV = strideV, strideA = strideA }; peer.OnI420RemoteVideoFrameReady(frame); }
public static void I420AFrameCallback(IntPtr userData, IntPtr dataY, IntPtr dataU, IntPtr dataV, IntPtr dataA, int strideY, int strideU, int strideV, int strideA, int width, int height) { var track = Utils.ToWrapper <LocalVideoTrack>(userData); var frame = new I420AVideoFrame() { width = (uint)width, height = (uint)height, dataY = dataY, dataU = dataU, dataV = dataV, dataA = dataA, strideY = strideY, strideU = strideU, strideV = strideV, strideA = strideA }; track.OnI420AFrameReady(frame); }
public void OnClientVideoReceived(I420AVideoFrame frame) { if (DateTime.Now > this.lastVideoSentToClientTimeUtc + TimeSpan.FromMilliseconds(33)) { try { this.lastVideoSentToClientTimeUtc = DateTime.Now; byte[] i420Frame = new byte[frame.width * frame.height * 12 / 8]; frame.CopyTo(i420Frame); byte[] nv12Frame = VideoConverter.I420ToNV12(i420Frame); VideoFormat sendVideoFormat = VideoFormatUtil.GetSendVideoFormat((int)frame.height, (int)frame.width); var videoSendBuffer = new VideoSendBuffer(nv12Frame, (uint)nv12Frame.Length, sendVideoFormat); this.Call.GetLocalMediaSession().VideoSocket.Send(videoSendBuffer); } catch (Exception e) { Console.WriteLine(e.Message); } } }
/// <summary> /// Interal help callback on remote video frame ready. Enqueues the newly-available video /// frame into the internal <see cref="VideoSource.FrameQueue"/> for later consumption by /// a video renderer. /// </summary> /// <param name="frame">The newly-available video frame from the remote peer</param> private void I420ARemoteVideoFrameReady(I420AVideoFrame frame) { // This does not need to enqueue work, because FrameQueue is thread-safe // and can be manipulated from any thread (does not access Unity objects). _frameQueue.Enqueue(frame); }
public static void I420ARemoteVideoFrameCallback(IntPtr userData, ref I420AVideoFrame frame) { var peer = Utils.ToWrapper <PeerConnection>(userData); peer.OnI420ARemoteVideoFrameReady(in frame); }
protected void I420AVideoFrameReady(I420AVideoFrame frame) { // This callback is generally from a non-UI thread, but Unity object access is only allowed // on the main UI thread, so defer to that point. _i420aFrameQueue.Enqueue(frame); }
private void I420AVideoFrameReady(I420AVideoFrame frame) { _i420aFrameQueue?.Enqueue(frame); }
private static void WritingVideo(I420AVideoFrame frame) { Console.WriteLine("Writing video for async working: width: {0}, height {1}", frame.width, frame.height); }
public static void I420AFrameCallback(IntPtr userData, I420AVideoFrame frame) { var track = Utils.ToWrapper <LocalVideoTrack>(userData); track.OnI420AFrameReady(frame); }
private void Peer_LocalI420FrameReady(I420AVideoFrame frame) { localVideoBridge.HandleIncomingVideoFrame(frame); }