private uint VideoFormatCallback(ref IntPtr opaque, ref uint chroma, ref uint width, ref uint height, ref uint pitches, ref uint lines) { Debug.WriteLine(String.Format("Initialize Video Content : {0}x{1}", width, height)); if (_context == null) { uint tmpWidth = width; uint tmpHeight = height; if (DisplayThreadDispatcher == null) { throw new InvalidOperationException("VlcPlayer not be ready, if you want to use VlcPlay no in XAML, please read this Wiki: \"https://github.com/higankanshi/Meta.Vlc/wiki/Use-VlcPlayer-with-other-controls\"."); } DisplayThreadDispatcher.Invoke(DispatcherPriority.Normal, new Action(() => { _context = new VideoDisplayContext(tmpWidth, tmpHeight, ChromaType.RV32); })); } chroma = (uint)_context.ChromaType; width = (uint)_context.Width; height = (uint)_context.Height; pitches = (uint)_context.Stride; lines = (uint)_context.Height; VideoSource = _context.Image; return((uint)_context.Size); }
private void TakeSnapshot() { DisplayThreadDispatcher.BeginInvoke(DispatcherPriority.Background, new Action(() => { if (_snapshotContext != null) { _snapshotContext.Save(this, this.VideoSource); _snapshotContext = null; } })); }
private void TakeSnapshot() { DisplayThreadDispatcher.BeginInvoke(DispatcherPriority.Background, new Action(() => { if (_snapshotContext == null) { return; } _snapshotContext.GetName(this); switch (_snapshotContext.Format) { case SnapshotFormat.BMP: var bmpE = new BmpBitmapEncoder(); bmpE.Frames.Add(BitmapFrame.Create(VideoSource)); using ( Stream stream = File.Create(String.Format("{0}\\{1}.bmp", _snapshotContext.Path, _snapshotContext.Name)) ) { bmpE.Save(stream); } break; case SnapshotFormat.JPG: var jpgE = new JpegBitmapEncoder(); jpgE.Frames.Add(BitmapFrame.Create(VideoSource)); using ( Stream stream = File.Create(String.Format("{0}\\{1}.jpg", _snapshotContext.Path, _snapshotContext.Name)) ) { jpgE.QualityLevel = _snapshotContext.Quality; jpgE.Save(stream); } break; case SnapshotFormat.PNG: var pngE = new PngBitmapEncoder(); pngE.Frames.Add(BitmapFrame.Create(VideoSource)); using ( Stream stream = File.Create(String.Format("{0}\\{1}.png", _snapshotContext.Path, _snapshotContext.Name)) ) { pngE.Save(stream); } break; } _snapshotContext = null; })); }
private IntPtr VideoLockCallback(IntPtr opaque, ref IntPtr planes) { if (VlcMediaPlayer.Volume != Volume) { VlcMediaPlayer.Volume = Volume; } if (!_context.IsAspectRatioChecked) { var tracks = VlcMediaPlayer.Media.GetTracks(); var videoMediaTracks = tracks.OfType <VideoTrack>().ToList(); var videoTrack = videoMediaTracks.FirstOrDefault(); if (videoTrack != null) { _context.CheckDisplaySize(videoTrack); var scale = GetScaleTransform(); if (Math.Abs(scale.Width - 1.0) + Math.Abs(scale.Height - 1.0) > 0.0000001) { _context.IsAspectRatioChecked = true; Debug.WriteLine(String.Format("Scale:{0}x{1}", scale.Width, scale.Height)); Debug.WriteLine(String.Format("Resize Image to {0}x{1}", _context.DisplayWidth, _context.DisplayHeight)); } else { _checkCount++; if (_checkCount > 5) { _context.IsAspectRatioChecked = true; } } if (DisplayThreadDispatcher != null) { DisplayThreadDispatcher.BeginInvoke( new Action(() => { ScaleTransform = new ScaleTransform(scale.Width, scale.Height); })); } } } return(planes = _context.MapView); }
private uint VideoFormatCallback(ref IntPtr opaque, ref uint chroma, ref uint width, ref uint height, ref uint pitches, ref uint lines) { Debug.WriteLine(String.Format("Initialize Video Content : {0}x{1}", width, height)); var videoFormatChangingArgs = new VideoFormatChangingEventArgs(width, height, ChromaType.RV32); if (VideoFormatChanging != null) { VideoFormatChanging(this, videoFormatChangingArgs); } if (_context == null || videoFormatChangingArgs.Width != _context.Width || videoFormatChangingArgs.Height != _context.Height) { if (DisplayThreadDispatcher == null) { throw new NullReferenceException(String.Format("Image = {0}, Image.SeparateThreadDispatcher = {1}, ThreadSeparatedImage.CommonDispatcher = {2}", Image, Image.SeparateThreadDispatcher, ThreadSeparatedImage.CommonDispatcher)); } DisplayThreadDispatcher.Invoke(DispatcherPriority.Normal, new Action(() => { if (_context != null) { _context.Dispose(); } _context = new VideoDisplayContext(videoFormatChangingArgs.Width, videoFormatChangingArgs.Height, videoFormatChangingArgs.ChromaType); VideoSource = null; })); } _context.IsAspectRatioChecked = false; chroma = (uint)_context.ChromaType; width = (uint)_context.Width; height = (uint)_context.Height; pitches = (uint)_context.Stride; lines = (uint)_context.Height; return((uint)_context.Size); }
private uint VideoFormatCallback(void **opaque, byte *chroma, uint *width, uint *height, uint *pitches, uint *lines) { Debug.WriteLine($"Initialize Video Content : {*width}x{*height}"); var videoFormatChangingArgs = new VideoFormatChangingEventArgs(*width, *height, ChromaType.RV32); VideoFormatChanging?.Invoke(this, videoFormatChangingArgs); if (_context == null || videoFormatChangingArgs.Width != _context.Width || videoFormatChangingArgs.Height != _context.Height) { if (DisplayThreadDispatcher == null) { throw new NullReferenceException( $"Image = {Image}, Image.SeparateThreadDispatcher = {Image.SeparateThreadDispatcher}, ThreadSeparatedImage.CommonDispatcher = {ThreadSeparatedImage.CommonDispatcher}"); } DisplayThreadDispatcher.Invoke(DispatcherPriority.Normal, new Action(() => { if (_context != null) { _context.Dispose(); } _context = new VideoDisplayContext(videoFormatChangingArgs.Width, videoFormatChangingArgs.Height, videoFormatChangingArgs.ChromaType); VideoSource = null; })); } _context.IsAspectRatioChecked = false; *(uint *)chroma = (uint)_context.ChromaType; *width = (uint)_context.Width; *height = (uint)_context.Height; *pitches = (uint)_context.Stride; *lines = (uint)_context.Height; return((uint)_context.Size); }
private uint VideoFormatCallback(ref IntPtr opaque, ref uint chroma, ref uint width, ref uint height, ref uint pitches, ref uint lines) { Debug.WriteLine(String.Format("Initialize Video Content : {0}x{1}", width, height)); if (_context == null) { uint tmpWidth = width; uint tmpHeight = height; if (DisplayThreadDispatcher != null) { DisplayThreadDispatcher.Invoke(DispatcherPriority.Normal, new Action(() => { _context = new VideoDisplayContext(tmpWidth, tmpHeight, ChromaType.RV32); })); } } chroma = (uint)_context.ChromaType; width = (uint)_context.Width; height = (uint)_context.Height; pitches = (uint)_context.Stride; lines = (uint)_context.Height; VideoSource = _context.Image; return((uint)_context.Size); }
protected virtual void OnPropertyChanged <T>(Expression <Func <T> > expr) { if (_disposing) { return; } if (_isStopping) { return; } if (PropertyChanged != null) { var bodyExpr = expr.Body as MemberExpression; var propInfo = bodyExpr.Member as PropertyInfo; var propName = propInfo.Name; if (DisplayThreadDispatcher != null) { DisplayThreadDispatcher.BeginInvoke( new Action(() => { PropertyChanged(this, new PropertyChangedEventArgs(propName)); })); } } }
private void CheckAspectRatio() { if (!_context.IsAspectRatioChecked) { var tracks = VlcMediaPlayer.Media.GetTrackInfo(); var videoMediaTracks = tracks.OfType <VideoTrack>().ToList(); var videoTrack = videoMediaTracks.FirstOrDefault(); if (videoTrack != null) { _context.CheckDisplaySize(videoTrack); var scale = GetScaleTransform(); if (Math.Abs(scale.Width - 1.0) + Math.Abs(scale.Height - 1.0) > 0.0000001) { _context.IsAspectRatioChecked = true; Debug.WriteLine($"Scale:{scale.Width}x{scale.Height}"); Debug.WriteLine($"Resize Image to {_context.DisplayWidth}x{_context.DisplayHeight}"); } else { _checkCount++; if (_checkCount > 5) { _context.IsAspectRatioChecked = true; } } if (DisplayThreadDispatcher != null) { DisplayThreadDispatcher.BeginInvoke( new Action(() => { ScaleTransform = new ScaleTransform(scale.Width, scale.Height); })); } } } }