private bool PrepareVideoFrameBuffer(VideoBlock block) { // Figure out what we need to do var needsCreation = (TargetBitmapData == null || TargetBitmap == null) && MediaElement.HasVideo; var needsModification = MediaElement.HasVideo && TargetBitmap != null && TargetBitmapData != null && (TargetBitmapData.PixelWidth != block.PixelWidth || TargetBitmapData.PixelHeight != block.PixelHeight || TargetBitmapData.Stride != block.PictureBufferStride); var hasValidDimensions = block.PixelWidth > 0 && block.PixelHeight > 0; if ((!needsCreation && !needsModification) && hasValidDimensions) { return(TargetBitmapData != null); } if (!hasValidDimensions) { TargetBitmap = null; return(false); } // Instantiate or update the target bitmap TargetBitmap = new WriteableBitmap( block.PixelWidth, block.PixelHeight, DpiX, DpiY, MediaPixelFormats[Constants.VideoPixelFormat], null); return(TargetBitmapData != null); }
public override string Process(string[] texts, Nparams attrib3) { if (texts.Length < 1) { throw new InvalidOperationException(); } var attrib = Nparams.Parse(texts[0].Split(';')); var videoBlock = new VideoBlock() { ID = attrib.Slice("id", "Video" + CoreEx.GetNextID()), }; if (attrib.Exists("uri")) { videoBlock.Uri = attrib.Slice <string>("uri"); } if (attrib.Exists("width")) { videoBlock.Width = new Unit(attrib.Slice <string>("width")); } if (attrib.Exists("height")) { videoBlock.Height = new Unit(attrib.Slice <string>("height")); } string responseText; HtmlTextWriterEx.RenderControl(videoBlock, out responseText); return(responseText); }
public unsafe BitmapDataBuffer Write(VideoBlock block) { lock (SyncLock) { if (IsDisposed) { return(null); } EnsureBuffers(block); // Compute a safe number of bytes to copy // At this point, we it is assumed the strides are equal var bufferLength = Math.Min(block.BufferLength, BackBufferView.Capacity); var scan0 = BackBufferView.SafeMemoryMappedViewHandle.DangerousGetHandle(); // Copy the block data into the back buffer of the target bitmap. Buffer.MemoryCopy( block.Buffer.ToPointer(), scan0.ToPointer(), bufferLength, bufferLength); if (BitmapData == null || BitmapData.Scan0 != scan0) { BitmapData = new BitmapDataBuffer( scan0, block.PictureBufferStride, block.PixelWidth, block.PixelHeight, Parent.DpiX, Parent.DpiY); } BackBufferView.Flush(); return(BitmapData); } }
private void RenderTarget(VideoBlock block, BitmapDataBuffer bitmapData, TimeSpan clockPosition) { try { if (RaiseVideoEventOnGui) { MediaElement.RaiseRenderingVideoEvent(block, bitmapData, clockPosition); } // Signal an update on the rendering surface TargetBitmap.AddDirtyRect(bitmapData.UpdateRect); TargetBitmap.Unlock(); ApplyScaleTransform(block); } catch (Exception ex) { MediaElement?.MediaCore?.Log( MediaLogMessageType.Error, $"{nameof(VideoRenderer)} {ex.GetType()}: {ex.Message}. Stack Trace:\r\n{ex.StackTrace}"); } finally { IsRenderingInProgress.Value = false; } }
/// <summary> /// Renders the packets. /// </summary> /// <param name="currentBlock">The current block.</param> /// <param name="mediaCore">The media core.</param> public void RenderPacket(VideoBlock currentBlock, MediaEngine mediaCore) { // var captionsPackets = new List<ClosedCaptionPacket>(1024); // var block = currentBlock; // while (block != null) // { // captionsPackets.AddRange(block.ClosedCaptions); // block = mediaCore.Blocks[currentBlock.MediaType].Next(block) as VideoBlock; // } // foreach (var packet in captionsPackets) // { // if (WriteTags.ContainsKey(packet.Timestamp.Ticks)) // { // if (WriteTags[packet.Timestamp.Ticks] != packet.ToString()) // { // // packet collision // } // } // if (WriteTags.ContainsKey(packet.Timestamp.Ticks)) // continue; // WriteTags[packet.Timestamp.Ticks] = packet.ToString(); // } // if (WriteTags.Count >= 250) // { // var output = string.Join("\r\n", WriteTags.Values); // } }
/// <summary> /// Initializes the target bitmap. Pass a null block to initialize with the default video properties. /// </summary> /// <param name="block">The block.</param> private void InitializeTargetBitmap(VideoBlock block) { Runner.UIInvoke(DispatcherPriority.Normal, () => { var visual = PresentationSource.FromVisual(MediaElement); var dpiX = 96.0 * visual?.CompositionTarget?.TransformToDevice.M11 ?? 96.0; var dpiY = 96.0 * visual?.CompositionTarget?.TransformToDevice.M22 ?? 96.0; var pixelWidth = block?.PixelWidth ?? MediaElement.NaturalVideoWidth; var pixelHeight = block?.PixelHeight ?? MediaElement.NaturalVideoHeight; if (MediaElement.HasVideo && pixelWidth > 0 && pixelHeight > 0) { TargetBitmap = new WriteableBitmap( block?.PixelWidth ?? MediaElement.NaturalVideoWidth, block?.PixelHeight ?? MediaElement.NaturalVideoHeight, dpiX, dpiY, PixelFormats.Bgr24, null); } else { TargetBitmap = null; } MediaElement.ViewBox.Source = TargetBitmap; }); }
public void VideoBlockNoTitle() { var block = new VideoBlock(); var title = block.GetTitle(); Assert.Equal("No video selected", title); }
private void ApplyScaleTransform(VideoBlock b) { var scaleTransform = MediaElement.VideoView.LayoutTransform as ScaleTransform; // Process Aspect Ratio according to block. if (b.AspectWidth != b.AspectHeight) { var scaleX = b.AspectWidth > b.AspectHeight ? (double)b.AspectWidth / b.AspectHeight : 1d; var scaleY = b.AspectHeight > b.AspectWidth ? (double)b.AspectHeight / b.AspectWidth : 1d; if (scaleTransform == null) { scaleTransform = new ScaleTransform(scaleX, scaleY); MediaElement.VideoView.LayoutTransform = scaleTransform; } if (scaleTransform.ScaleX != scaleX || scaleTransform.ScaleY != scaleY) { scaleTransform.ScaleX = scaleX; scaleTransform.ScaleY = scaleY; } } else { if (scaleTransform != null && (scaleTransform.ScaleX != 1d || scaleTransform.ScaleY != 1d)) { scaleTransform.ScaleX = 1d; scaleTransform.ScaleY = 1d; } } }
private void _timer_Tick(object sender, EventArgs e) { if (PlaybackVideoBlock.Count > 0) { VideoBlock videoBlock = null; PlaybackVideoBlock.TryDequeue(out videoBlock); Render(videoBlock); PoolVideoBlock.Enqueue(videoBlock); } }
private void ApplyLayoutTransforms(VideoBlock b) { if (MediaElement == null || MediaElement.VideoView == null) { return; } var layoutTransforms = MediaElement.VideoView.LayoutTransform as TransformGroup; ScaleTransform scaleTransform = null; RotateTransform rotateTransform = null; if (layoutTransforms == null) { layoutTransforms = new TransformGroup(); scaleTransform = new ScaleTransform(1, 1); rotateTransform = new RotateTransform(0, 0.5, 0.5); layoutTransforms.Children.Add(scaleTransform); layoutTransforms.Children.Add(rotateTransform); MediaElement.VideoView.LayoutTransform = layoutTransforms; } else { scaleTransform = layoutTransforms.Children[0] as ScaleTransform; rotateTransform = layoutTransforms.Children[1] as RotateTransform; } // Process Aspect Ratio according to block. if (b.PixelAspectWidth != b.PixelAspectHeight) { var scaleX = b.PixelAspectWidth > b.PixelAspectHeight ? Convert.ToDouble(b.PixelAspectWidth) / Convert.ToDouble(b.PixelAspectHeight) : 1d; var scaleY = b.PixelAspectHeight > b.PixelAspectWidth ? Convert.ToDouble(b.PixelAspectHeight) / Convert.ToDouble(b.PixelAspectWidth) : 1d; if (scaleTransform.ScaleX != scaleX || scaleTransform.ScaleY != scaleY) { scaleTransform.ScaleX = scaleX; scaleTransform.ScaleY = scaleY; } } else { if (scaleTransform.ScaleX != 1d || scaleTransform.ScaleY != 1d) { scaleTransform.ScaleX = 1d; scaleTransform.ScaleY = 1d; } } // Process Rotation if (MediaCore.State.VideoRotation != rotateTransform.Angle) { rotateTransform.Angle = MediaCore.State.VideoRotation; } }
private void UpdateLayout(VideoBlock block, TimeSpan clockPosition) { try { MediaElement?.CaptionsView?.Render(MediaElement.ClosedCaptionsChannel, clockPosition); ApplyLayoutTransforms(block); } catch (Exception ex) { this.LogError(Aspects.VideoRenderer, $"{nameof(VideoRenderer)}.{nameof(Render)} layout/CC failed.", ex); } }
/// <summary> /// Initializes the target bitmap if not available and locks it for loading the back-buffer. /// </summary> /// <param name="block">The block.</param> /// <param name="priority">The priority.</param> /// <returns> /// The locking result. Returns a null pointer on back buffer for invalid. /// </returns> private BitmapDataBuffer LockTarget(VideoBlock block, DispatcherPriority priority) { // Result will be set on the GUI thread BitmapDataBuffer result = null; WindowsPlatform.Instance.Gui?.Invoke(priority, () => { // Skip the locking if scrubbing is not enabled if (MediaElement.ScrubbingEnabled == false && MediaElement.IsPlaying == false) { return; } // Figure out what we need to do var needsCreation = TargetBitmap == null && MediaElement.HasVideo; var needsModification = needsCreation == false && (TargetBitmap.PixelWidth != block.PixelWidth || TargetBitmap.PixelHeight != block.PixelHeight); var hasValidDimensions = block.PixelWidth > 0 && block.PixelHeight > 0; // Instantiate or update the target bitmap if ((needsCreation || needsModification) && hasValidDimensions) { TargetBitmap = new WriteableBitmap( block.PixelWidth, block.PixelHeight, DpiX, DpiY, MediaPixelFormats[Constants.Video.VideoPixelFormat], null); } else if (hasValidDimensions == false) { TargetBitmap = null; } // Update the target ViewBox image if not already set if (MediaElement.VideoView.Source != TargetBitmap) { MediaElement.VideoView.Source = TargetBitmap; } // Don't set the result if (TargetBitmap == null) { return; } // Lock the back-buffer and create a pointer to it TargetBitmap.Lock(); result = BitmapDataBuffer.FromWriteableBitmap(TargetBitmap); if (LoadBlockBufferOnGui) { LoadTarget(result, block); } }); return(result); }
private HashSet <ArticleBlock> CreateBlocks(long articleId) { HashSet <ArticleBlock> result = new HashSet <ArticleBlock>(); TextBlock textBlock = new TextBlock(); textBlock.Text = "Some Text for " + articleId; textBlock.SortIndex = 0; result.Add(textBlock); ImageBlock imageBlock = new ImageBlock(); imageBlock.Image = CreateImage(1L); textBlock.SortIndex = 1; result.Add(imageBlock); TextBlock secondTextBlock = new TextBlock(); secondTextBlock.Text = "Second Text for " + articleId; secondTextBlock.SortIndex = 2; result.Add(secondTextBlock); GalleryBlock galleryBlock = new GalleryBlock(); secondTextBlock.SortIndex = 3; List <Image> galleryImages = new List <Image>(); galleryImages.Add(CreateImage(2L)); galleryImages.Add(CreateImage(3L)); galleryBlock.Images = galleryImages; result.Add(galleryBlock); TextBlock thirdTextBlock = new TextBlock(); thirdTextBlock.Text = "Third Text for " + articleId; thirdTextBlock.SortIndex = 4; result.Add(thirdTextBlock); VideoBlock videoBlock = new VideoBlock(); videoBlock.Type = VideoBlockType.YOUTUBE; videoBlock.Url = "https://youtu.be/myvideo"; videoBlock.SortIndex = 5; result.Add(videoBlock); return(result); }
/// <summary> /// Loads that target data buffer with block data /// </summary> /// <param name="target">The target.</param> /// <param name="source">The source.</param> private void LoadTargetBitmapBuffer(BitmapDataBuffer target, VideoBlock source) { if (source != null && source.TryAcquireReaderLock(out var readLock)) { using (readLock) { // Compute a safe number of bytes to copy // At this point, we it is assumed the strides are equal var bufferLength = Convert.ToUInt32(Math.Min(source.BufferLength, target.BufferLength)); // Copy the block data into the back buffer of the target bitmap. WindowsNativeMethods.Instance.CopyMemory(target.Scan0, source.Buffer, bufferLength); } } }
/// <summary> /// Renders the target bitmap. /// </summary> /// <param name="block">The block.</param> /// <param name="bitmapData">The bitmap data.</param> /// <param name="clockPosition">The clock position.</param> private void RenderTargetBitmap(VideoBlock block, BitmapDataBuffer bitmapData, TimeSpan clockPosition) { try { // Signal an update on the rendering surface TargetBitmap?.AddDirtyRect(bitmapData.UpdateRect); TargetBitmap?.Unlock(); } catch (Exception ex) { MediaElement?.MediaCore?.Log( MediaLogMessageType.Error, $"{nameof(VideoRenderer)} {ex.GetType()}: {ex.Message}. Stack Trace:\r\n{ex.StackTrace}"); } }
public override string Process(string[] texts, Nparams attrib3) { if (texts.Length < 1) throw new InvalidOperationException(); var attrib = Nparams.Parse(texts[0].Split(';')); var videoBlock = new VideoBlock() { ID = attrib.Slice("id", "Video" + CoreEx.GetNextID()), }; if (attrib.Exists("uri")) videoBlock.Uri = attrib.Slice<string>("uri"); if (attrib.Exists("width")) videoBlock.Width = new Unit(attrib.Slice<string>("width")); if (attrib.Exists("height")) videoBlock.Height = new Unit(attrib.Slice<string>("height")); string responseText; HtmlTextWriterEx.RenderControl(videoBlock, out responseText); return responseText; }
private void EnsureBuffers(VideoBlock block) { if (BackBufferView == null || BackBufferView.Capacity != block.BufferLength) { BackBufferView?.Dispose(); BackBufferFile?.Dispose(); BackBufferFile = MemoryMappedFile.CreateNew(null, block.BufferLength); BackBufferView = BackBufferFile.CreateViewAccessor(); NeedsNewImage = true; } Width = block.PixelWidth; Height = block.PixelHeight; Stride = block.PictureBufferStride; }
public void VideoBlockHasTitle() { var block = new VideoBlock() { Body = new Extend.Fields.VideoField { Media = new Models.Media { Filename = "Lorem_ipsum.mp4" } } }; var title = block.GetTitle(); Assert.Equal("Lorem_ipsum.mp4", title); }
public virtual void AddVideoFrame(MediaFrame mediaFrame) { if (PoolVideoBlock.Count == 0) { return; } VideoBlock target = null; if (PoolVideoBlock.TryDequeue(out target)) { if (MaterializeVideoFrame(mediaFrame, ref target)) { PlaybackVideoBlock.Enqueue(target); } } }
internal void RaiseRenderingVideoEvent(VideoBlock videoBlock, BitmapDataBuffer bitmap, TimeSpan clock) { if (RenderingVideo == null) { return; } var e = new RenderingVideoEventArgs( bitmap, MediaCore.MediaInfo.Streams[videoBlock.StreamIndex], videoBlock.ClosedCaptions, videoBlock.SmtpeTimecode, videoBlock.DisplayPictureNumber, videoBlock.StartTime, videoBlock.Duration, clock); RenderingVideo?.Invoke(this, e); }
/// <summary> /// Loads that target data buffer with block data /// </summary> /// <param name="target">The target.</param> /// <param name="source">The source.</param> private void LoadTargetBitmapBuffer(BitmapDataBuffer target, VideoBlock source) { // Copy the block data into the back buffer of the target bitmap. if (target.Stride == source.BufferStride) { WindowsNativeMethods.Instance.CopyMemory(target.Scan0, source.Buffer, Convert.ToUInt32(source.BufferLength)); } else { var format = MediaPixelFormats[Constants.Video.VideoPixelFormat]; var bytesPerPixel = format.BitsPerPixel / 8; var copyLength = Convert.ToUInt32(Math.Min(target.Stride, source.BufferStride)); Parallel.For(0, source.PixelHeight, (i) => { var sourceOffset = source.Buffer + (i * source.BufferStride); var targetOffset = target.Scan0 + (i * target.Stride); WindowsNativeMethods.Instance.CopyMemory(targetOffset, sourceOffset, copyLength); }); } }
/// <summary> /// Loads that target data buffer with block data /// </summary> /// <param name="target">The target.</param> /// <param name="source">The source.</param> private unsafe void LoadTargetBitmapBuffer(BitmapDataBuffer target, VideoBlock source) { if (source == null || !source.TryAcquireReaderLock(out var readLock)) { return; } using (readLock) { // Compute a safe number of bytes to copy // At this point, we it is assumed the strides are equal var bufferLength = Math.Min(source.BufferLength, target.BufferLength); // Copy the block data into the back buffer of the target bitmap. Buffer.MemoryCopy( source.Buffer.ToPointer(), target.Scan0.ToPointer(), bufferLength, bufferLength); } }
private void Render(VideoBlock block) { lock (_sdlLocker) { if (_thisSdlInit == false || block.PixelWidth != _rect.w || block.PixelHeight != _rect.h || _newVideoHandle != _curVideoHandle) { _rect.w = block.PixelWidth; _rect.h = block.PixelHeight; InitThisSDL(_newVideoHandle, block.PixelWidth, block.PixelHeight); } unsafe { if (block.BufferLength > 0) { SDL2.SDL.SDL_UpdateTexture(_texture, ref _rect, block.Buffer, block.PictureBufferStride); SDL2.SDL.SDL_RenderCopy(_rendererPtr, _texture, IntPtr.Zero, IntPtr.Zero); SDL2.SDL.SDL_RenderPresent(_rendererPtr); } } } }
private unsafe void WriteVideoFrameBuffer(VideoBlock block, TimeSpan clockPosition) { var bitmap = TargetBitmap; var target = TargetBitmapData; if (bitmap == null || target == null || block == null || block.IsDisposed || !block.TryAcquireReaderLock(out var readLock)) { return; } // Lock the video block for reading try { // Lock the bitmap bitmap.Lock(); // Compute a safe number of bytes to copy // At this point, we it is assumed the strides are equal var bufferLength = Math.Min(block.BufferLength, target.BufferLength); // Copy the block data into the back buffer of the target bitmap. Buffer.MemoryCopy( block.Buffer.ToPointer(), target.Scan0.ToPointer(), bufferLength, bufferLength); // with the locked video block, raise the rendering video event. MediaElement?.RaiseRenderingVideoEvent(block, target, clockPosition); // Mark the region as dirty so it's updated on the UI bitmap.AddDirtyRect(target.UpdateRect); } finally { readLock.Dispose(); bitmap.Unlock(); } }
private unsafe void WriteVideoFrameBuffer(VideoBlock block, TimeSpan clockPosition) { var bitmap = TargetBitmap; var target = TargetBitmapData; if (bitmap == null || target == null || block == null || block.IsDisposed || !block.TryAcquireReaderLock(out var readLock)) { return; } // Lock the video block for reading using (readLock) { if (!bitmap.TryLock(BitmapLockTimeout)) { this.LogDebug(Aspects.VideoRenderer, $"{nameof(VideoRenderer)} bitmap lock timed out at {clockPosition}"); bitmap.Lock(); } // Compute a safe number of bytes to copy // At this point, we it is assumed the strides are equal var bufferLength = Math.Min(block.BufferLength, target.BufferLength); // Copy the block data into the back buffer of the target bitmap. Buffer.MemoryCopy( block.Buffer.ToPointer(), target.Scan0.ToPointer(), bufferLength, bufferLength); // with the locked video block, raise the rendering video event. MediaElement?.RaiseRenderingVideoEvent(block, TargetBitmapData, clockPosition); } bitmap.AddDirtyRect(TargetBitmapData.UpdateRect); bitmap.Unlock(); }
/// <summary> /// Initializes the target bitmap if not available and locks it for loading the back-buffer. /// This method needs to be called from the GUI thread. /// </summary> /// <param name="block">The block.</param> /// <returns> /// The locking result. Returns a null pointer on back buffer for invalid. /// </returns> private BitmapDataBuffer LockTargetBitmap(VideoBlock block) { // TODO: Evaluate if we need to skip the locking if scrubbing is not enabled // Example: if (!MediaElement.ScrubbingEnabled && (!MediaElement.IsPlaying || MediaElement.IsSeeking)) return result // Figure out what we need to do var needsCreation = TargetBitmap == null && MediaElement.HasVideo; var needsModification = MediaElement.HasVideo && TargetBitmap != null && (TargetBitmap.PixelWidth != block.PixelWidth || TargetBitmap.PixelHeight != block.PixelHeight); var hasValidDimensions = block.PixelWidth > 0 && block.PixelHeight > 0; // Instantiate or update the target bitmap if ((needsCreation || needsModification) && hasValidDimensions) { TargetBitmap = new WriteableBitmap( block.PixelWidth, block.PixelHeight, DpiX, DpiY, MediaPixelFormats[Constants.Video.VideoPixelFormat], null); } else if (hasValidDimensions == false) { TargetBitmap = null; } // Update the target ViewBox image if not already set if (MediaElement.VideoView.Source != TargetBitmap) { MediaElement.VideoView.Source = TargetBitmap; } // Lock the back-buffer and create a pointer to it TargetBitmap?.Lock(); // Return the appropriate buffer result return(TargetBitmap != null ? new BitmapDataBuffer(TargetBitmap) : null); }
private void ApplyLayoutTransforms(VideoBlock b) { if (MediaElement?.VideoView == null) { return; } ScaleTransform scaleTransform; RotateTransform rotateTransform; if (MediaElement.VideoView.LayoutTransform is TransformGroup layoutTransforms) { scaleTransform = layoutTransforms.Children[0] as ScaleTransform; rotateTransform = layoutTransforms.Children[1] as RotateTransform; } else { layoutTransforms = new TransformGroup(); scaleTransform = new ScaleTransform(1, 1); rotateTransform = new RotateTransform(0, 0.5, 0.5); layoutTransforms.Children.Add(scaleTransform); layoutTransforms.Children.Add(rotateTransform); MediaElement.VideoView.LayoutTransform = layoutTransforms; } // return if no proper transforms were found if (scaleTransform == null || rotateTransform == null) { return; } // Process Aspect Ratio according to block. if (b.PixelAspectWidth != b.PixelAspectHeight) { var scaleX = b.PixelAspectWidth > b.PixelAspectHeight ? Convert.ToDouble(b.PixelAspectWidth) / Convert.ToDouble(b.PixelAspectHeight) : 1d; var scaleY = b.PixelAspectHeight > b.PixelAspectWidth ? Convert.ToDouble(b.PixelAspectHeight) / Convert.ToDouble(b.PixelAspectWidth) : 1d; if (Math.Abs(scaleTransform.ScaleX - scaleX) > double.Epsilon || Math.Abs(scaleTransform.ScaleY - scaleY) > double.Epsilon) { scaleTransform.ScaleX = scaleX; scaleTransform.ScaleY = scaleY; } } else { if (Math.Abs(scaleTransform.ScaleX - 1d) > double.Epsilon || Math.Abs(scaleTransform.ScaleY - 1d) > double.Epsilon) { scaleTransform.ScaleX = 1d; scaleTransform.ScaleY = 1d; } } // Process Rotation if (Math.Abs(MediaCore.State.VideoRotation - rotateTransform.Angle) > double.Epsilon) { rotateTransform.Angle = MediaCore.State.VideoRotation; } }
/// <summary> /// Sends the packets to the CC packet buffer for state management. /// </summary> /// <param name="currentBlock">The current block.</param> /// <param name="mediaCore">The media core.</param> public void SendPackets(VideoBlock currentBlock, MediaEngine mediaCore) { Buffer.Write(currentBlock, mediaCore); }
public StandardPageBuilder WithMainVideo(VideoBlock v) { this._mainVideo = v; return(this); }
private static void HandleVideoFrame(VideoBlock e) { TotalBytes += (ulong)e.BufferLength; TotalDurationSeconds += e.Duration.TotalSeconds; PrintFrameInfo(e); if (IsBenchmarking) { return; } if (DecompressDispatcher == null) { return; } DecompressDispatcher.Invoke(() => { if (TargetBitmap == null) { TargetBitmap = new WriteableBitmap(e.PixelWidth, e.PixelHeight, 96, 96, PixelFormats.Bgr24, null); } TargetBitmap.Dispatcher.Invoke(() => { TargetBitmap.Lock(); if (TargetBitmap.BackBufferStride != e.BufferStride) { var sourceBase = e.Buffer; var targetBase = TargetBitmap.BackBuffer; for (var y = 0; y < TargetBitmap.PixelHeight; y++) { var sourceAddress = sourceBase + (e.BufferStride * y); var targetAddress = targetBase + (TargetBitmap.BackBufferStride * y); Utils.CopyMemory(targetAddress, sourceAddress, (uint)e.BufferStride); } } else { Utils.CopyMemory(TargetBitmap.BackBuffer, e.Buffer, (uint)e.BufferLength); } TargetBitmap.AddDirtyRect(new Int32Rect(0, 0, e.PixelWidth, e.PixelHeight)); TargetBitmap.Unlock(); if (SaveSnapshots == false) { return; } var fileSequence = Math.Round(e.StartTime.TotalSeconds, 0); var outputFile = Path.Combine(OutputPath, $"{fileSequence:0000}.png"); if (File.Exists(outputFile)) { return; } var bitmapFrame = BitmapFrame.Create(TargetBitmap); using (var stream = File.OpenWrite(outputFile)) { var bitmapEncoder = new PngBitmapEncoder(); bitmapEncoder.Frames.Clear(); bitmapEncoder.Frames.Add(bitmapFrame); bitmapEncoder.Save(stream); } }); }); }
public bool MaterializeVideoFrame(MediaFrame input, ref VideoBlock output) { if (output == null) { output = new VideoBlock(); } if (input is VideoFrame == false || output is VideoBlock == false) { throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null"); } var source = (VideoFrame)input; var target = (VideoBlock)output; // Retrieve a suitable scaler or create it on the fly var newScaler = ffmpeg.sws_getCachedContext( Scaler, source.Pointer->width, source.Pointer->height, NormalizePixelFormat(source.Pointer), source.Pointer->width, source.Pointer->height, AVPixelFormat.AV_PIX_FMT_BGRA, ffmpeg.SWS_POINT, null, null, null); // if it's the first time we set the scaler, simply assign it. if (Scaler == null) { Scaler = newScaler; RC.Current.Add(Scaler); } // Reassign to the new scaler and remove the reference to the existing one // The get cached context function automatically frees the existing scaler. if (Scaler != newScaler) { RC.Current.Remove(Scaler); Scaler = newScaler; } // Perform scaling and save the data to our unmanaged buffer pointer if (target.Allocate(source, AVPixelFormat.AV_PIX_FMT_BGRA) && target.TryAcquireWriterLock(out var writeLock)) { using (writeLock) { var targetStride = new[] { target.PictureBufferStride }; var targetScan = default(byte_ptrArray8); targetScan[0] = (byte *)target.Buffer; // The scaling is done here var outputHeight = ffmpeg.sws_scale( Scaler, source.Pointer->data, source.Pointer->linesize, 0, source.Pointer->height, targetScan, targetStride); if (outputHeight <= 0) { return(false); } } } else { return(false); } return(true); }