static GamingTheatre() { if (Directory.Exists("Temp")) { Directory.Delete("Temp", true); } VideoStreamDecoder vsd = new VideoStreamDecoder(@"assets.shine:movie/LOGO_32.mov"); while (true) { var res = vsd.TryDecodeNextFrame(out IntPtr dataPoint, out int pitch); if (!res) { break; } var ImGc = new ImagingFactory(); var WICBIT = new WICBitmap(ImGc, vsd.FrameSize.Width, vsd.FrameSize.Height, SharpDX.WIC.PixelFormat.Format32bppPBGRA, new DataRectangle(dataPoint, pitch)); // var mp = new System.Drawing.Bitmap(vsd.FrameSize.Width, vsd.FrameSize.Height, pitch, System.Drawing.Imaging.PixelFormat.Format32bppPArgb, dataPoint); // mp.Save("test/" + logo_frames.Count + ".png"); // mp.Dispose(); ImGc.Dispose(); logo_frames.Add(WICBIT); } vsd.Dispose(); }
private unsafe void DecodeAllFramesToImages() { using (var vsd = new VideoStreamDecoder(device)) { //Console.WriteLine($"codec name: {vsd.CodecName}"); var info = vsd.GetContextInfo(); info.ToList().ForEach(x => Console.WriteLine($"{x.Key} = {x.Value}")); var sourceSize = vsd.FrameSize; var sourcePixelFormat = vsd.PixelFormat; var destinationSize = sourceSize; var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24; using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat)) { var frameNumber = 0; while (vsd.TryDecodeNextFrame(out var frame) && activeThread) { var convertedFrame = vfc.Convert(frame); Bitmap bitmap; bitmap = new Bitmap(convertedFrame.width, convertedFrame.height, convertedFrame.linesize[0], System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)convertedFrame.data[0]); BitmapToImageSource(bitmap); frameNumber++; } } } }
unsafe public VideoSource(string path) { res_decode.Set(); Stream = new VideoStreamDecoder(path); PixelSize = new Size(Stream.FrameSize.Width, Stream.FrameSize.Height); FrameRate = (double)Stream.pStream->avg_frame_rate.num / (double)Stream.pStream->avg_frame_rate.den; }
public static IReadOnlyDictionary <string, string> DecodeInfo(string url) { _EnsureBinariesAreSet(); using (var vsd = new VideoStreamDecoder(url, AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)) { return(GetDecoderInfo(vsd)); } }
public void VideoDecode() { VideoStreamDecoder dec = new VideoStreamDecoder(); FileStream fs = File.OpenRead(@"D:\Repos\LupoCV\src\LupoCV.CLI\bin\Debug\netcoreapp3.1\frames.avc"); byte[] buffer = new byte[1024 * 16]; while (fs.Read(buffer) > 0) { dec.Decode(buffer); } }
public void Dispose() { _replayProcess.RemoveCache(_guid); _disposeEvent.Set(); lock (_obj) { if (_decoder != null) { _decoder.VideoFrameEvent -= onVideoFrame; _decoder.Dispose(); } _decoder = null; } }
private static IReadOnlyDictionary <string, string> GetDecoderInfo(VideoStreamDecoder vsd) { _EnsureBinariesAreSet(); var info = vsd.GetContextInfo(); var dict = new Dictionary <string, string>(); dict["CodecName"] = vsd.CodecName; foreach (var kvp in info) { dict[kvp.Key] = kvp.Value; } return(dict); }
private Texture2D DecodeFrameToTexture2D(String filename, int frameIndex = 10, AVHWDeviceType HWDevice = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE) { using (var vsd = new VideoStreamDecoder(filename, HWDevice)) { Debug.Log($"codec name: {vsd.CodecName}"); var info = vsd.GetContextInfo(); info.ToList().ForEach(x => Debug.Log($"{x.Key} = {x.Value}")); var sourceSize = vsd.FrameSize; var sourcePixelFormat = HWDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE ? vsd.PixelFormat : GetHWPixelFormat(HWDevice); var destinationSize = sourceSize; var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24; using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat)) { var currentFrame = 0; while (vsd.TryDecodeNextFrame(out var frame) && _isRunning) { Debug.Log($"Processing frame: {currentFrame}"); var avframe = vfc.Convert(frame); if (OnFrameRendered != null) { byte[] imageData; vsd.AvFrameToImageByteArray(avframe, out imageData); OnFrameRendered(imageData); } if (currentFrame == frameIndex) { Debug.Log($"Saving frame: {frameIndex}"); return(vsd.AVFrameToTexture2D(avframe)); } currentFrame++; } return(new Texture2D(4, 4)); } } }
private unsafe void DecodeAllFramesToImages(object state) { try { using (var decoder = new VideoStreamDecoder(url, videoInputType)) { videoInfo = decoder.GetVideoInfo(); var info = decoder.GetContextInfo(); info.ToList().ForEach(x => Console.WriteLine($"{x.Key} = {x.Value}")); var sourceSize = decoder.FrameSize; var sourcePixelFormat = hwDeviceType == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE ? decoder.PixelFormat : GetHWPixelFormat(hwDeviceType); var destinationSize = sourceSize; var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24; using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat)) { while (decoder.TryDecodeNextFrame(out var frame) && isDecodingEvent.WaitOne()) { var convertedFrame = vfc.Convert(frame); Bitmap bt = new Bitmap(convertedFrame.width, convertedFrame.height, convertedFrame.linesize[0], System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)convertedFrame.data[0]); if (isEncodingThreadRunning) { decodedFrameQueue.Enqueue(convertedFrame); } BitmapToImageSource(bt); } } } } catch (ApplicationException e) { Console.WriteLine(e.Message); } catch (ObjectDisposedException e) { Console.WriteLine(e.Message); } }
public static IEnumerable <(PointerBitmap bitmap, VideoFrameState state)> DecodeFrames(string url, AVHWDeviceType HWDevice) { _EnsureBinariesAreSet(); using (var vsd = new VideoStreamDecoder(url, HWDevice)) { var info = GetDecoderInfo(vsd); var state = new Dictionary <string, long>(); var context = new VideoFrameState(info, state); var sourceSize = vsd.FrameSize; var sourcePixelFormat = HWDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE ? vsd.PixelFormat : GetHWPixelFormat(HWDevice); var destinationSize = sourceSize; var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24; long index = 0; using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat)) { while (vsd.TryDecodeNextFrame(out var frame)) { var convertedFrame = vfc.Convert(frame); state["index"] = index; state["pts"] = frame.pts; // state["pkt_pts"] = frame.pkt_pts; state["pkt_dts"] = frame.pkt_dts; state["best_effort_timestamp"] = frame.best_effort_timestamp; state["display_picture_number"] = frame.display_picture_number; state["coded_picture_number"] = frame.coded_picture_number; state["decode_error_flags"] = frame.decode_error_flags; yield return(AsPointerBitmap(convertedFrame), context); ++index; } } } }
public VideoDisplayManager(VideoCacheManager cache, ITimeProcess playProcess) { _replayProcess = playProcess; _replayProcess.JumpEvent += onJump; _replayProcess.FastTimesEvent += onFastTimes; _replayProcess.AddCache(_guid); _render = new D3DImageSource(); _render.ImageSourceChanged += render_ImageSourceChanged; _decoder = new VideoStreamDecoder(); _decoder.VideoFrameEvent += onVideoFrame; _cache = cache; _cache.PropertyChanged += _cache_PropertyChanged; _disposeEvent.Reset(); new Thread(run) { IsBackground = true }.Start(); onJump(); onFastTimes(); }
public StreamingCamera(String address, Device d3dDevice, int streams) { this.d3dDevice = d3dDevice; texture = new Texture(d3dDevice, 1280, 720, 1, Usage.Dynamic, Format.A8R8G8B8, Pool.Default); texture2 = new Texture(d3dDevice, 1280, 720, 1, Usage.Dynamic, Format.A8R8G8B8, Pool.Default); ffWrapper = new VideoStreamDecoder(); while(true) { if(ffWrapper.Initialize(address) > 0) { break; } } //if (result < 0) // throw new Exception("Could not initialize decoder."); ffWrapper.frameDone += ffWrapper_frameDone; if (streams == 2) ffWrapper.frame2Done += ffWrapper_frame2Done; }
private unsafe void DecodeAllFramesToImages(string url) { using (VideoStreamDecoder vsd = new VideoStreamDecoder(url)) { IReadOnlyDictionary <string, string> info = vsd.GetContextInfo(); System.Drawing.Size sourceSize = vsd.FrameSize; AVPixelFormat sourcePixelFormat = vsd.PixelFormat; Size destinationSize = sourceSize; AVPixelFormat destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24; using (VideoFrameConverter vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat)) { AVFrame frame = default(AVFrame); vsd.TryDecodeNextFrame(out frame); AVFrame convertedFrame = vfc.Convert(frame); using (Bitmap bitmap = new Bitmap(convertedFrame.width, convertedFrame.height, convertedFrame.linesize[0u], PixelFormat.Format24bppRgb, (IntPtr)(void *)convertedFrame.data[0u])) { this.Image = Imaging.CreateBitmapSourceFromHBitmap(bitmap.GetHbitmap(), IntPtr.Zero, System.Windows.Int32Rect.Empty, System.Windows.Media.Imaging.BitmapSizeOptions.FromEmptyOptions()); } } } }
private unsafe void DecodeAllFramesToImages() { _filepath = "test.mxf"; String filepath = ""; if (!String.IsNullOrEmpty(_filepath)) { filepath = _filepath; } else { return; } using (var vsd = new VideoStreamDecoder(filepath)) { var info = vsd.GetContextInfo(); info.ToList().ForEach(x => Console.WriteLine($"{x.Key} = {x.Value}")); var sourceSize = vsd.FrameSize; var sourcePixelFormat = vsd.PixelFormat; var destnationSize = sourceSize; var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24; using (var vfc = new VideoFrameConveter(sourceSize, sourcePixelFormat, destnationSize, destinationPixelFormat)) { var frameNumber = 0; while (vsd.TryDecodeNextFrame(out var frame) && activeThread) { var convertedFrame = vfc.Convert(frame); Bitmap bitmap; bitmap = new Bitmap(convertedFrame.width, convertedFrame.height, convertedFrame.linesize[0], System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)convertedFrame.data[0]); frameNumber++; } } } }
private unsafe void DecodeAllFramesToImages() { //video="웹캠 디바이스 이름" //string url = "video=AVerMedia GC550 Video Capture"; //sample rtsp source //string url = "rtsp://184.72.239.149/vod/mp4:BigBuckBunny_115k.mov"; using (var vsd = new VideoStreamDecoder(url, type)) { var info = vsd.GetContextInfo(); enCodecInfo = vsd.GetCodecInfo(); info.ToList().ForEach(x => Console.WriteLine($"{x.Key} = {x.Value}")); sourceSize = vsd.FrameSize; destinationSize = sourceSize; var sourcePixelFormat = vsd.PixelFormat; var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24; using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat)) { while (vsd.TryDecodeNextFrame(out var frame) && activeThread) { var convertedFrame = vfc.Convert(frame); Bitmap bitmap = new Bitmap(convertedFrame.width, convertedFrame.height, convertedFrame.linesize[0], System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)convertedFrame.data[0]); if (activeEncodingThread) { decodedFrameQueue.Enqueue(convertedFrame); } //display video image BitmapToImageSource(bitmap); } } } }
public static unsafe byte[]? GetThumbnail(FfmpegSettings settings, bool extendedLogging) { try { if (UseNativeBinding) { bool isGrayByte = settings.GrayScale == 1; AVHWDeviceType HWDevice = HardwareAccelerationMode switch { FFHardwareAccelerationMode.vdpau => AVHWDeviceType.AV_HWDEVICE_TYPE_VDPAU, FFHardwareAccelerationMode.dxva2 => AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2, FFHardwareAccelerationMode.vaapi => AVHWDeviceType.AV_HWDEVICE_TYPE_VAAPI, FFHardwareAccelerationMode.qsv => AVHWDeviceType.AV_HWDEVICE_TYPE_QSV, FFHardwareAccelerationMode.cuda => AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA, _ => AVHWDeviceType.AV_HWDEVICE_TYPE_NONE }; using var vsd = new VideoStreamDecoder(settings.File, HWDevice); if (vsd.PixelFormat < 0 || vsd.PixelFormat >= AVPixelFormat.AV_PIX_FMT_NB) { throw new Exception($"Invalid source pixel format"); } Size sourceSize = vsd.FrameSize; Size destinationSize = isGrayByte ? new Size(16, 16) : new Size(100, Convert.ToInt32(sourceSize.Height * (100 / (double)sourceSize.Width))); AVPixelFormat destinationPixelFormat = isGrayByte ? AVPixelFormat.AV_PIX_FMT_GRAY8 : AVPixelFormat.AV_PIX_FMT_BGRA; using var vfc = new VideoFrameConverter(sourceSize, vsd.PixelFormat, destinationSize, destinationPixelFormat); if (!vsd.TryDecodeFrame(out var srcFrame, settings.Position)) { throw new Exception($"Failed decoding frame at {settings.Position}"); } AVFrame convertedFrame = vfc.Convert(srcFrame); if (isGrayByte) { int length = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, convertedFrame.width, convertedFrame.height, 1).ThrowExceptionIfError(); byte[] data = new byte[length]; Marshal.Copy((IntPtr)convertedFrame.data[0], data, 0, length); return(data); } else { int width = convertedFrame.width; int height = convertedFrame.height; var totalBytes = width * height * 4; var rgbaBytes = new byte[totalBytes]; int stride = convertedFrame.linesize[0]; if (stride == width * 4) { Marshal.Copy((IntPtr)convertedFrame.data[0], rgbaBytes, 0, totalBytes); } else { var sourceOffset = 0; var destOffset = 0; var byteWidth = width * 4; for (var y = 0; y < height; y++) { Marshal.Copy((IntPtr)convertedFrame.data[0] + sourceOffset, rgbaBytes, destOffset, byteWidth); sourceOffset += stride; destOffset += byteWidth; } } var image = Image.LoadPixelData <SixLabors.ImageSharp.PixelFormats.Bgra32>(rgbaBytes, width, height); using MemoryStream stream = new(); image.Save(stream, new SixLabors.ImageSharp.Formats.Jpeg.JpegEncoder()); bool equal = rgbaBytes.SequenceEqual(stream.ToArray()); return(stream.ToArray()); } } } catch (Exception e) { Logger.Instance.Info($"Failed using native FFmpeg binding on '{settings.File}', try switching to process mode. Exception: {e}"); } //https://docs.microsoft.com/en-us/dotnet/csharp/how-to/concatenate-multiple-strings#string-literals string ffmpegArguments = $" -hide_banner -loglevel {(extendedLogging ? "error" : "quiet")}" + $" -y -hwaccel {HardwareAccelerationMode} -ss {settings.Position} -i \"{settings.File}\"" + $" -t 1 -f {(settings.GrayScale == 1 ? "rawvideo -pix_fmt gray" : "mjpeg")} -vframes 1" + $" {(settings.GrayScale == 1 ? "-s 16x16" : "-vf scale=100:-1")} {CustomFFArguments} \"-\""; using var process = new Process { StartInfo = new ProcessStartInfo { Arguments = ffmpegArguments, FileName = FFmpegPath, CreateNoWindow = true, RedirectStandardInput = false, RedirectStandardOutput = true, WorkingDirectory = Path.GetDirectoryName(FFmpegPath) !, RedirectStandardError = extendedLogging, WindowStyle = ProcessWindowStyle.Hidden } }; string errOut = string.Empty; byte[]? bytes = null; try { process.EnableRaisingEvents = true; process.Start(); if (extendedLogging) { process.ErrorDataReceived += new DataReceivedEventHandler((sender, e) => { if (e.Data?.Length > 0) { errOut += Environment.NewLine + e.Data; } }); process.BeginErrorReadLine(); } using var ms = new MemoryStream(); process.StandardOutput.BaseStream.CopyTo(ms); if (!process.WaitForExit(TimeoutDuration)) { throw new TimeoutException($"FFmpeg timed out on file: {settings.File}"); } else if (extendedLogging) { process.WaitForExit(); // Because of asynchronous event handlers, see: https://github.com/dotnet/runtime/issues/18789 } if (process.ExitCode != 0) { throw new FFInvalidExitCodeException($"FFmpeg exited with: {process.ExitCode}"); } bytes = ms.ToArray(); if (bytes.Length == 0) { bytes = null; // Makes subsequent checks easier } else if (settings.GrayScale == 1 && bytes.Length != 256) { bytes = null; errOut += $"{Environment.NewLine}graybytes length != 256"; } } catch (Exception e) { errOut += $"{Environment.NewLine}{e.Message}"; try { if (process.HasExited == false) { process.Kill(); } } catch { } bytes = null; } if (bytes == null || errOut.Length > 0) { string message = $"{((bytes == null) ? "ERROR: Failed to retrieve" : "WARNING: Problems while retrieving")} {(settings.GrayScale == 1 ? "graybytes" : "thumbnail")} from: {settings.File}"; if (extendedLogging) { message += $":{Environment.NewLine}{FFmpegPath} {ffmpegArguments}"; } Logger.Instance.Info($"{message}{errOut}"); } return(bytes); }