public CameraEncoder(VideoEncodeCfg cfgVideo, Action <MediaFrame> callBack, Canvas canvas = null) { _cfgVideo = cfgVideo; _fps = cfgVideo.frameRate; this.Canvas = canvas; _capturer = CreateCapturer(); var @params = new X264Params(_cfgVideo.width, _cfgVideo.height, _fps, cfgVideo.videoBitRate); if (cfgVideo.Params.ContainsKey("X264Encode")) { @params.method = (int)cfgVideo.Params["X264Encode"]; } if (cfgVideo.Params.ContainsKey("KeyFrameRate")) { @params.key_frame_max = (int)cfgVideo.Params["KeyFrameRate"]; } _x264 = new X264Native(@params); _x264.Init(); _ffscale = new FFScale(_cfgVideo.width, _cfgVideo.height, 0, 12, _cfgVideo.width, _cfgVideo.height, 12, 12); _draw = cfgVideo.Draw; _draw.SetSize(_cfgVideo.width, _cfgVideo.height); _callBack = callBack; }
public ScreenCapturer(int width, int height, int fps, Action <byte[]> callBack) { _width = width; _height = height; _fps = fps; _callBack = callBack; _ffscale = new FFScale(width, height, 3, 24, width, height, 0, 12); }
public MixerVideoCapturer(Mixer.Video.Canvas canvas, int width, int height, int fps, Action <byte[]> callBack) { _width = width; _height = height; _fps = fps; _callBack = callBack; this.Canvas = canvas; _ffscale = new FFScale(width, height, 3, 24, width, height, 0, 12); }
public static void Test() { try { FFScale s = new FFScale(320, 240, 29, 32, 320, 240, 0, 12); byte[] @in = new byte[320 * 240 * 4]; byte[] @out = s.FormatS(@in); @out = s.FormatS(@in); } catch (Exception e) { } }
public static void Test() { int width = 320, height = 240; X264Native x264 = new X264Native(new X264Params(width, height, 10, 320)); //x264.SetIKeyIntMax(10); x264.Init(); var ls = MediaServer.Media.ReadFile.GetBuffByFile1(@"D:\video_monitor\v2v_super_exchange\branch\branch_V3.16.0.0(dongtaifuyong)\bin\Debug\1237.yuv"); AVCodecCfg cf = AVCodecCfg.CreateVideo(width, height, (int)SLW.Media.AVCode.CODEC_ID_H264, 100000); FFImp ffimp = new FFImp(cf, true); //FFScale ffscale = new FFScale(width, height, 26, 12, width, height, 12, 12); FFScale ffscale = new FFScale(width, height, 0, 12, width, height, 3, 24); foreach (var item1 in ls) { var item = ffscale.FormatS(item1); var in_buf = FunctionEx.BytesToIntPtr(item); var out_buf = Marshal.AllocHGlobal(item.Length); //var bKeyFrame = false; //var nOutLen = 0; var nInLen = item.Length; // var size = X264Encode(x264.obj, in_buf, ref nInLen, out_buf, ref nOutLen, ref bKeyFrame); // var buf = FunctionEx.IntPtrToBytes(out_buf, 0, size); var buf = x264.Encode(item); Console.WriteLine(buf.To16Strs(0, 16)); var size = buf.Length; if (w == null) //OK { w = new BinaryWriter(new FileStream("4567.es", FileMode.Create)); } w.Write(buf); ////Media.MediaFrame mf = new Media.MediaFrame(); ////mf.nIsKeyFrame = (byte)(x264.IsKeyFrame() ? 1 : 0); ////mf.nWidth = width; ////mf.nHeight = height; ////mf.nEncoder = Media.MediaFrame.H264Encoder; ////mf.nTimetick = 0; ////mf.nSize = size; ////mf.Data = buf; ////buf = mf.GetBytes(); ////fs.Write(BitConverter.GetBytes(buf.Length), 0, 4); //fs.Write(buf, 0, buf.Length); //fs.Flush(); //IntPtr intt = IntPtr.Zero; //var sssss = ffimp.VideoDec(buf, ref intt); ////Console.WriteLine(buf.Take(32).ToArray().To16Strs()); //// var size = Encode1(ii, in_buf, ref nInLen, out_buf); } //fs.Close(); }
public void SetSize(int width, int height) { _width = width; _height = height; _scale = new FFScale(width, height, 0, 12, width, height, 3, 24); if (_image != null) { _image.Dispose(); } _image = null; lock (_sync) { _buffer = null; } }
private void FFScaleSetup(AMMediaType mt) { //e436eb7d-524f-11ce-9f53-0020af0ba770 RGB24 //32595559-0000-0010-8000-00aa00389b71 YUY2 // if (mt.subType.ToString() == "32595559-0000-0010-8000-00aa00389b71") { _ffscale = new FFScale(_width, _height, 1, 12, _width, _height, 0, 12); } else if (mt.subType.ToString() == "e436eb7d-524f-11ce-9f53-0020af0ba770") { _ffscale = new FFScale(_width, _height, 2, 24, _width, _height, 0, 12); } else { throw new Exception("FFScaleSetup Error"); } }
public CameraAForge(int camera, int width, int height, Action <byte[]> callBack) { _callBack = callBack; _ffscale = new FFScale(width, height, 3, 24, width, height, 0, 12); videoDevices = new AForge.Video.DirectShow.FilterInfoCollection(FilterCategory.VideoInputDevice); videoDevice = new AForge.Video.DirectShow.VideoCaptureDevice(videoDevices[camera].MonikerString); foreach (var item in videoDevice.VideoCapabilities) { if (item.FrameSize.Width == width && item.FrameSize.Height == height) { videoDevice.VideoResolution = item; } } if (videoDevice.VideoResolution == null) { throw new Exception(string.Format("摄像头不支持{0}*{1}分辨率", width, height)); } videoDevice.NewFrame += NewFrame; videoDevice.RGBRawFrame += YUVFrame; }
public FFImp(AVCodecCfg cfg, bool isDec, bool isvideo = true) { this.isDec = isDec; this.cfg = cfg; var pcfg = FunctionEx.StructToIntPtr(cfg); pAVObj = FunctionEx.StructToIntPtr(new AVModel()); lock (_lock) { if (!ffimp_init()) { throw new Exception("ffimp init error"); } } if (isvideo) { int pOutBufSize = cfg.width * cfg.height * 3; if (pOutBufSize == 0) { pOutBufSize = 1920 * 1080 * 4; } pOutBuf = Marshal.AllocHGlobal(pOutBufSize); int init_r = 0; lock (_lock) { if (this.isDec) { init_r = ffimp_video_decode_init(ref pAVObj, pcfg, pOutBuf, pOutBufSize); } else { init_r = ffimp_video_encode_init(ref pAVObj, pcfg, pOutBuf, pOutBufSize); } _ffscale = new FFScale(cfg.width, cfg.height, 0, 12, cfg.width, cfg.height, 0, 12); } } else { int pOutBufSize = cfg.width * cfg.height * 3; if (pOutBufSize == 0) { pOutBufSize = 2048 * 2; } pOutBuf = Marshal.AllocHGlobal(pOutBufSize); int init_r = 0; lock (_lock) { if (this.isDec) { init_r = ffimp_audio_decode_init(ref pAVObj, pcfg, pOutBuf, pOutBufSize); } else { init_r = ffimp_audio_decode_init(ref pAVObj, pcfg, pOutBuf, pOutBufSize); } // _ffscale = new FFScale(cfg.width, cfg.height, 0, 12, cfg.width, cfg.height, 0, 12); } } Marshal.FreeHGlobal(pcfg); }