コード例 #1
0
        private Task ConvertStream(HttpContent httpContent, Stream outputStream)
        {
            Task convertTask = new Task(() => {

                var convertSettings = new ConvertSettings {
                    CustomOutputArgs = "-map 0",
                    CustomInputArgs = "-vcodec h264"
                };

                var ffMpeg = new FFMpegConverter();
                ffMpeg.ConvertProgress += FfMpeg_ConvertProgress;
                ffMpeg.LogReceived += FfMpeg_LogReceived;

                //var task = ffMpeg.ConvertLiveMedia(Format.h264, "C:\\Work\\Test\\converted.avi", Format.avi, convertSettings);
                var task = ffMpeg.ConvertLiveMedia(Format.h264, outputStream, Format.mpeg, convertSettings);

                task.Start();

                var ffmpegStream = new FFMPegStream(task);
                var copyTask = httpContent.CopyToAsync(ffmpegStream);
                copyTask.Wait();
                ffmpegStream.Close();

                task.Wait();

                //                ffMpeg.ConvertMedia(@"C:\Work\Test\MyHomeSecureNode\devices\test\video.h264", "C:\\Work\\Test\\converted.avi", Format.avi);

                outputStream.Close();
            });

            convertTask.Start();
            return convertTask;
        }
コード例 #2
0
        public ActionResult Index()
        {
            //Download Video From Youtube
            string url     = "https://www.youtube.com/watch?v=bDuzU4XLEEs";
            var    youTube = YouTube.Default;
            var    video   = youTube.GetVideo(url);
            //------------
            //Save Video
            //System.IO.File.WriteAllBytes(@"C:\Downloads\" + video.FullName, video.GetBytes());



            var ffmpeg = new FFMpegConverter();

            using (System.IO.Stream stream = new System.IO.MemoryStream(video.GetBytes(), 0, video.GetBytes().Length))
            {
                System.IO.StreamWriter sw = new System.IO.StreamWriter(stream);
                //             sw.Write(video.GetBytes(), 0, video.GetBytes().Length);

                var result = ffmpeg.ConvertLiveMedia(stream, Format.mp4, @"C:\Downloads\result2.gif", Format.gif, new ConvertSettings()
                {
                    VideoFrameRate = 1, MaxDuration = 10
                });
                result.Start();
            }

            //var videoName = @"C:\Downloads\" + video.FullName;
            //ffmpeg.ConvertMedia(videoName, null, @"C:\Downloads\result.gif", null, new ConvertSettings() { VideoFrameRate = 1, MaxDuration = 10 });

            return(View());
        }
コード例 #3
0
        static async Task <MemoryStream> getFrameFromVideo(float startTime)
        {
            Stream rawBmpOutputStream = new MemoryStream();
            var    ffProbe            = new NReco.VideoInfo.FFProbe();
            var    videoInfo          = ffProbe.GetMediaInfo(path);
            var    convertSettings    = new ConvertSettings()
            {
                VideoFrameSize = "1280*720", // lets resize to exact frame size
                VideoFrameRate = 24,         // lets consume 24 frames per second
                MaxDuration    = 1           // lets consume live stream for first 5 seconds
            };

            convertSettings.Seek = startTime;
            var videoConv  = new FFMpegConverter();
            var ffMpegTask = videoConv.ConvertLiveMedia(
                path,
                null,               // autodetect live stream format
                rawBmpOutputStream, // this is your special stream that will capture bitmaps
                Format.gif,
                convertSettings);

            ffMpegTask.Start();
            ffMpegTask.Wait();
            return((MemoryStream)rawBmpOutputStream);
        }
コード例 #4
0
        //public static byte[] ConvertVideo(Bitmap[] frames, int frameRate, string videoFormat = Format.h264)
        //{
        //    if (frames.Length == 0) throw new ArgumentOutOfRangeException("生成视频文件的图片数不能为0");
        //    var first = frames[0];
        //    ValidatePixelFormat(first.PixelFormat);

        //    var width = first.Width;
        //    var height = first.Height;

        //    var ff = new FFMpegConverter();
        //    using (var output = new MemoryStream())
        //    {
        //        var task = ff.ConvertLiveMedia(
        //                    "rawvideo",
        //                    output,
        //                    videoFormat,
        //                    new ConvertSettings()
        //                    {
        //                        // bgr24 = windows bitmap pixel format
        //                        // framerate = set frame rate of _input_ (in this example 5 = 5 bitmaps for 1 second of the video)
        //                        CustomInputArgs = string.Format(" -pix_fmt bgr24 -video_size {0}x{1} -framerate {2} ",
        //                            width, height, frameRate)
        //                    });

        //        task.Start();

        //        foreach (var frame in frames)
        //        {
        //            task.Write(frame);
        //        }

        //        // wait until convert is finished and stop live streaming
        //        task.Stop();

        //        //var data = output.ToArray();
        //        //using (var h264stream = new FileStream("out.h264", FileMode.Create, FileAccess.Write))
        //        //{
        //        //    h264stream.Write(data, 0, data.Length);
        //        //    ff.ConvertMedia("out.h264", "h264", "out.avi", null, new ConvertSettings());
        //        //}
        //        //return data;

        //        return output.ToArray();
        //    }
        //}

        public static Bitmap[] ConvertVideo(byte[] data, int frameRate, int outputWidth, int outputHeight)
        {
            Bitmap[] frames = null;
            using (MemoryStream input = new MemoryStream(data))
            {
                var ff = new FFMpegConverter();
                using (var output = new BitmapStream(outputWidth, outputHeight))
                {
                    var task = ff.ConvertLiveMedia(
                        input, // 从内存流中读取
                        null,  // 自动从流中检测格式
                        output,
                        "rawvideo",
                        new ConvertSettings()
                    {
                        VideoFrameSize   = String.Format("{0}x{1}", outputWidth, outputHeight), // 可以设置输出图像的大小
                        CustomOutputArgs = " -pix_fmt bgr24 ",                                  // windows bitmap pixel format
                        VideoFrameRate   = frameRate
                    });


                    task.Start();
                    task.Wait(); // use task.wait if input is provided with ff.Write method
                    frames = output.GetBitmaps();
                }
            }
            return(frames);
        }
コード例 #5
0
        /// <summary>
        /// Begin processing the request.
        /// </summary>
        /// <param name="request">The request object.</param>
        /// <param name="callback">The callback used to Continue or Cancel the request (async).</param>
        /// <returns>To handle the request return true and call
        /// <see cref="ICallback.Continue"/> once the response header information is available
        /// <see cref="ICallback.Continue"/> can also be called from inside this method if
        /// header information is available immediately).
        /// To cancel the request return false.</returns>
        public bool ProcessRequest(IRequest request, ICallback callback)
        {
            Task.Run(() =>
            {
                using (callback)
                {
                    var httpWebRequest = (HttpWebRequest)WebRequest.Create(request.Url);

                    var httpWebResponse = (HttpWebResponse)httpWebRequest.GetResponse();

                    // Get the stream associated with the response.
                    var receiveStream = httpWebResponse.GetResponseStream();
                    var mime          = httpWebResponse.ContentType;

                    var stream = new MemoryStream();
                    receiveStream.CopyTo(stream);
                    httpWebResponse.Close();
                    stream.Position = 0;

                    //FFmpeg =
                    _converter.ConvertLiveMedia(stream, Format.mp4, outputStream, Format.webm, new ConvertSettings()
                    {
                        VideoCodec      = "libvpx",
                        AudioCodec      = "libvorbis",
                        CustomInputArgs = "-preset ultrafast"
                    });
                    //FFmpeg.Start();
                    //FFmpeg.Wait();

                    callback.Continue();
                }
            });
            return(true);
        }
コード例 #6
0
        private void Init()
        {
            _age       = new FaceAge(AppId, AgeKey);
            _gender    = new FaceGender(AppId, GenderKey);
            _traking   = LocatorFactory.GetTrackingLocator(AppId, FtKey, _age, _gender) as FaceTracking;
            _detection = LocatorFactory.GetDetectionLocator(AppId, FdKey) as FaceDetection;
            _recognize = new FaceRecognize(AppId, FrKey);
            _processor = new FaceProcessor(_traking, _recognize);

            //init cache
            if (Directory.Exists(FaceLibraryPath))
            {
                var files = Directory.GetFiles(FaceLibraryPath);
                foreach (var file in files)
                {
                    var info = new FileInfo(file);
                    _cache.Add(info.Name.Replace(info.Extension, ""), File.ReadAllBytes(file));
                }
            }

            stride     = width * pixelSize;
            bufferSize = stride * height;

            _pImage = Marshal.AllocHGlobal(bufferSize);
            _image  = new Bitmap(width, height, stride, PixelFormat.Format24bppRgb, _pImage);

            var ffmpeg = new FFMpegConverter();

            outputStream = new MemoryStream();

            var setting =
                new ConvertSettings
            {
                CustomOutputArgs = "-s 1920x1080", //根据业务需求-r参数可以调整,取决于摄像机的FPS
            };                                     //-s 1920x1080 -q:v 2 -b:v 64k

            //-an -r 15 -pix_fmt bgr24 -updatefirst 1
            //task = ffmpeg.ConvertLiveMedia("rtsp://*****:*****@192.168.1.64:554/h264/ch1/main/av_stream", null,
            //    outputStream, Format.raw_video, setting);

            /*
             * USB摄像头捕获
             * 通过ffmpeg可以捕获USB摄像,如下代码所示。
             * 首先通过:ffmpeg -list_devices true -f dshow -i dummy命令,可以列出系统中存在的USB摄像设备(或通过控制面板的设备管理工具查看设备名称),例如在我电脑中叫USB2.0 PC CAMERA。
             * 然后根据捕获的分辨率,修改视频图形信息,包括width和height,一般像素大小不用修改,如果要参考设备支持的分辨率,可以使用:
             * ffmpeg -list_options true -f dshow -i video="USB2.0 PC CAMERA"命令
             */
            task = ffmpeg.ConvertLiveMedia("video=Logitech HD Webcam C270", "dshow",
                                           outputStream, Format.raw_video, setting);

            task.OutputDataReceived += DataReceived;
            task.Start();

            _renderTask = new Task(Render);
            _renderTask.Start();
        }
コード例 #7
0
        private Task ConvertStream(/*Stream input, */ Stream outputStream)
        {
            Task convertTask = new Task(() => {
                try
                {
                    var convertSettings = new ConvertSettings
                    {
                        CustomOutputArgs = "-map 0",
                        CustomInputArgs  = "-vcodec h264"
                    };
                    //convertSettings.SetVideoFrameSize(360, 360);
                    var ffMpeg              = new FFMpegConverter();
                    ffMpeg.ConvertProgress += FfMpeg_ConvertProgress;
                    ffMpeg.LogReceived     += FfMpeg_LogReceived;

                    //task = ffMpeg.ConvertLiveMedia(Format.h264, outStream, Format.mjpeg, convertSettings);
                    //task = ffMpeg.ConvertLiveMedia(inStream, Format.h264, outStream, Format.mjpeg, convertSettings);
                    //            task = ffMpeg.ConvertLiveMedia(inStream, Format.h264, $@"Files\video2.mpeg", Format.mjpeg, convertSettings);
                    //            task.Start();
                    //ffMpeg.ConvertMedia($@"Files\video.mpeg4", Format.h264, $@"Files\video3.mpeg", Format.mjpeg, convertSettings);
                    //var task = ffMpeg.ConvertLiveMedia(Format.h264, "C:\\Work\\Test\\converted.avi", Format.avi, convertSettings);
                    //ffMpeg.ConvertMedia($@"Files\video.mpeg4", $@"Files\video2.flv", Format.flv);
                    //var setting = new NReco.VideoConverter.ConvertSettings();
                    //setting.SetVideoFrameSize(360, 360);
                    //setting.VideoCodec = "h264";
                    //ffMpeg.ConvertMedia($@"Files\video.mpeg4", Format.h264, $@"Files\video2.mjpeg", Format.mjpeg, setting);
                    //task.Stop();
                    var task = ffMpeg.ConvertLiveMedia(Format.h264, outputStream, Format.mjpeg, convertSettings);

                    task.Start();

                    ffinStream = new FFMPegStream(task);
                    //var copyTask = input.CopyToAsync(ffmpegStream);
                    //copyTask.Wait();
                    //ffmpegStream.Close();

                    task.Wait();
                    //ffMpeg.ConvertMedia(@"C:\Work\Test\MyHomeSecureNode\devices\test\video.h264", "C:\\Work\\Test\\converted.avi", Format.avi);

                    outputStream.Close();
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                }
            });

            convertTask.Start();
            return(convertTask);
        }
コード例 #8
0
ファイル: Main.cs プロジェクト: vebin/FaceRecognization
        private void Init()
        {
            _traking   = LocatorFactory.GetTrackingLocator(AppId, FtKey) as FaceTracking;
            _detection = LocatorFactory.GetDetectionLocator(AppId, FdKey) as FaceDetection;
            _recognize = new FaceRecognize(AppId, FrKey);
            _processor = new FaceProcessor(_traking, _recognize);

            //init cache
            if (Directory.Exists(FaceLibraryPath))
            {
                var files = Directory.GetFiles(FaceLibraryPath);
                foreach (var file in files)
                {
                    var info = new FileInfo(file);
                    _cache.Add(info.Name.Replace(info.Extension, ""), File.ReadAllBytes(file));
                }
            }

            _pImage = Marshal.AllocHGlobal(1920 * 1080 * 3);
            _image  = new Bitmap(1920, 1080, 1920 * 3, PixelFormat.Format24bppRgb, _pImage);

            var ffmpeg = new FFMpegConverter();

            outputStream = new MemoryStream();

            var setting =
                new ConvertSettings
            {
                CustomOutputArgs = "-an -r 15 -pix_fmt bgr24 -updatefirst 1" //根据业务需求-r参数可以调整,取决于摄像机的FPS
            };                                                               //-s 1920x1080 -q:v 2 -b:v 64k

            task = ffmpeg.ConvertLiveMedia("rtsp://*****:*****@192.168.1.64:554/h264/ch1/main/av_stream", null,
                                           outputStream, Format.raw_video, setting);

            task.OutputDataReceived += DataReceived;
            task.Start();

            _renderTask = new Task(Render);
            _renderTask.Start();
        }
コード例 #9
0
ファイル: VideoDecoder.cs プロジェクト: kiersten11/SwarmSight
        public void Start(bool open = true, string customArgs = "", int?FrameBufferCapacity = null, int?MinimumWorkingFrames = null)
        {
            if (open)
            {
                Open(VideoPath);
            }

            _filereader = new FFMpegConverter();

            if (FrameDecoder != null)
            {
                FrameDecoder.ClearBuffer();
            }

            //Set the format of the output bitmap
            FrameDecoder = new FrameDecoder
                           (
                PlayerOutputWidth,
                PlayerOutputHeight,
                PixelFormat.Format24bppRgb
                           );

            if (FrameBufferCapacity != null)
            {
                FrameDecoder.FrameBufferCapacity = (int)FrameBufferCapacity;
            }

            if (MinimumWorkingFrames != null)
            {
                FrameDecoder.MinimumWorkingFrames = (int)MinimumWorkingFrames;
            }

            BufferInitialized?.Invoke(this, null);

            FrameDecoder.Processor       = Processor;
            FrameDecoder.FrameReady     += OnFrameReady;
            _filereader.LogReceived     += OnLogReceived;
            _filereader.ConvertProgress += _filereader_ConvertProgress;

            //Set conversion settings
            _settings = new ConvertSettings
            {
                VideoFrameSize   = PlayerOutputWidth + "x" + PlayerOutputHeight,
                CustomOutputArgs = " -pix_fmt bgr24 " + customArgs,
            };

            //Set start time
            if (PlayStartTimeInSec > 0)
            {
                //Adjust frame index for seeking
                CurrentFrame = (int)Math.Round(PlayStartTimeInSec * VideoInfo.FPS, 0);

                _settings.Seek = (float?)PlayStartTimeInSec;
            }

            //Set end time (if valid)
            if (PlayEndTimeInSec != null && PlayEndTimeInSec > PlayStartTimeInSec)
            {
                _settings.MaxDuration = (float?)(PlayEndTimeInSec - PlayStartTimeInSec);
            }

            //Setup to convert from the file into the bitmap intercepting stream
            _readingTask = _filereader.ConvertLiveMedia
                           (
                VideoPath,
                null,     // autodetect stream format
                FrameDecoder,
                "rawvideo",
                _settings
                           );

            _readingThread = new Thread(StartDecoding)
            {
                IsBackground = true
            };
            _readingThread.Start();
        }
コード例 #10
0
        private void stream_Click(object x, EventArgs y)
        {
            // Get the IDeckLinkOutput interface
            var input  = (IDeckLinkInput)_DeckLink;
            var output = (IDeckLinkOutput)_DeckLink;

            if (_Streaming)
            {
                KillStream();
            }
            else
            {
                IDeckLinkDisplayModeIterator displayIterator;
                input.GetDisplayModeIterator(out displayIterator);

                var supportedModes = new List <IDeckLinkDisplayMode>();

                input.SetCallback(this);
                input.SetScreenPreviewCallback(this);

                var flags   = _BMDVideoInputFlags.bmdVideoInputFlagDefault | _BMDVideoInputFlags.bmdVideoInputEnableFormatDetection;
                var format  = _BMDPixelFormat.bmdFormat8BitYUV;
                var display = _BMDDisplayMode.bmdModeHD1080i50;

                _BMDDisplayModeSupport support;
                IDeckLinkDisplayMode   tmp;
                input.DoesSupportVideoMode(display, format, flags, out support, out tmp);

                if (support != _BMDDisplayModeSupport.bmdDisplayModeSupported)
                {
                    throw new Exception("display mode not working: " + support);
                }

                if (writeRaw.Checked)
                {
                    _VideoWriter = new BinaryWriter(File.Open(_RawVideoFile, FileMode.OpenOrCreate));
                    _AudioWriter = new BinaryWriter(File.Open(_RawAudioFile, FileMode.OpenOrCreate));
                }

                if (writeEncoded.Checked)
                {
                    _EncodedStream = new FileStream(_EncodedVideoFile, FileMode.Create, FileAccess.Write);

                    _EncodeTask = _VideoConverter.ConvertLiveMedia(
                        "rawvideo",
                        _EncodedStream,
                        "h264",
                        new ConvertSettings()
                    {
                        CustomInputArgs = " -pix_fmt uyvy422 -video_size 1920x1080 -framerate 25",
                    });

                    _EncodeTask.Start();
                }

                input.EnableVideoInput(display, format, flags);
                input.EnableAudioInput(_AudioSampleRate, _AudioSampleType, _AudioChannels);

                input.StartStreams();

                stream.Text = "Kill";
                _Streaming  = true;
            }
        }
コード例 #11
0
        static void Main(string[] args)
        {
            GifImage a;

            Console.CursorSize = 8;
            path       = Console.ReadLine();
            imageList  = new List <byte[]> {
            };
            frameCount = 0;
            //check if file is gif or video
            string ext = Path.GetExtension(path);

            if (ext.Equals(".gif"))
            {
                try
                {
                    a = new GifImage(path)
                    {
                        ReverseAtEnd = false
                    };
                    Timer timer = new Timer((object o) => TimerTick(o, ref a), null, 0, 60);
                }
                catch (Exception e)
                {
                    Console.WriteLine(e.Message);
                }
            }
            //gif
            else
            {
                Stream rawBmpOutputStream = new MemoryStream();
                var    ffProbe            = new NReco.VideoInfo.FFProbe();
                var    videoInfo          = ffProbe.GetMediaInfo(path);

                var videoConv  = new FFMpegConverter();
                var ffMpegTask = videoConv.ConvertLiveMedia(
                    path,
                    null,               // autodetect live stream format
                    rawBmpOutputStream, // this is your special stream that will capture bitmaps
                    Format.gif,
                    new ConvertSettings()
                {
                    VideoFrameSize = "1280*720", // lets resize to exact frame size
                    VideoFrameRate = 24,         // lets consume 24 frames per second
                    MaxDuration    = 10          // lets consume live stream for first 5 seconds
                });
                ffMpegTask.Start();
                ffMpegTask.Wait();
                //var sr = new BinaryReader(rawBmpOutputStream);
                //sr.BaseStream.Position = 0;

                //using (var fileStream = File.Create(@"D:\test.gif"))
                //{
                //    rawBmpOutputStream.Seek(0, SeekOrigin.Begin);
                //    rawBmpOutputStream.CopyTo(fileStream);
                //}

                try
                {
                    a = new GifImage(rawBmpOutputStream)
                    {
                        ReverseAtEnd = false
                    };
                    Timer timer = new Timer((object o) => TimerTick(o, ref a), null, 0, 60);
                }
                catch (Exception e)
                {
                    Console.WriteLine(e.Message);
                }
            }
            //video
            Console.WindowWidth  = Console.LargestWindowWidth;
            Console.WindowHeight = Console.LargestWindowHeight;
            // var t = new Timer((object o) => TimerTick(o, ref a), null, 0, 100);

            Console.ReadKey();
        }