public void Test264File() { int width = 320, height = 240; var x264 = new X264Native(new X264Params(width, height, 10, 320)); //x264.SetIKeyIntMax(10); x264.Init(); var ls = StreamingKit.Media.ReadFile.GetBuffByFile1(@".\test.yuv"); AVCodecCfg cf = AVCodecCfg.CreateVideo(width, height, (int)StreamingKit.AVCode.CODEC_ID_H264, 100000); FFImp ffimp = new FFImp(cf, true); //FFScale ffscale = new FFScale(width, height, 26, 12, width, height, 12, 12); FFScale ffscale = new FFScale(width, height, 0, 12, width, height, 3, 24); foreach (var item1 in ls) { var item = ffscale.FormatS(item1); var in_buf = FunctionEx.BytesToIntPtr(item); var out_buf = Marshal.AllocHGlobal(item.Length); //var bKeyFrame = false; //var nOutLen = 0; var nInLen = item.Length; // var size = X264Encode(x264.obj, in_buf, ref nInLen, out_buf, ref nOutLen, ref bKeyFrame); // var buf = FunctionEx.IntPtrToBytes(out_buf, 0, size); var buf = x264.Encode(item); Console.WriteLine(buf.To16Strs(0, 16)); var size = buf.Length; if (w == null) //OK { w = new BinaryWriter(new FileStream("4567.es", FileMode.Create)); } w.Write(buf); } }
public int AudioEnc(byte[] inData, ref byte[] outData) { lock (this) { if (_isReleased) { return(0); } var pInData = FunctionEx.BytesToIntPtr(inData); BFrame bframe = new BFrame(); bframe.Buff = FunctionEx.BytesToIntPtr(inData); bframe.Size = inData.Length; IntPtr pbframe = FunctionEx.StructToIntPtr(bframe); var encFrameBuffLen = 192000; var encFrameBuff = Marshal.AllocHGlobal(encFrameBuffLen); var encSize = ffimp_audio_encode(pAVObj, encFrameBuff, encFrameBuffLen, pbframe); if (encSize == -1) { return(encSize); } outData = FunctionEx.IntPtrToBytes(encFrameBuff, 0, encSize); Marshal.FreeHGlobal(pInData); Marshal.FreeHGlobal(encFrameBuff); return(encSize); } }
public int AudioDec(byte[] inData, ref byte[] outData) { //lock (this) { if (_isReleased) { return(0); } // var size = System.Diagnostics.Process.GetCurrentProcess().PrivateMemorySize64; BFrame bframe = new BFrame(); //bframe.Buff = FunctionEx.BytesToIntPtr(inData); //bframe.Size = inData.Length; // IntPtr pbframe = FunctionEx.StructToIntPtr(bframe); IntPtr pbframe = IntPtr.Zero; var decSize = ffimp_audio_decode(pAVObj, inData, inData.Length, ref pbframe); bframe = FunctionEx.IntPtrToStruct <BFrame>(pbframe, 0, Marshal.SizeOf(bframe)); outData = FunctionEx.IntPtrToBytes(bframe.Buff, 0, bframe.Size); return(bframe.Size); //} }
public void Release() { //lock (this) //{ _isReleased = true; try { var avmode = new AVModel() { context = FunctionEx.IntPtrToStruct <AVModel>(pAVObj).context, }; lock (this) ffimp_free_avobj(FunctionEx.StructToIntPtr(avmode)); avmode = new AVModel() { codec = FunctionEx.IntPtrToStruct <AVModel>(pAVObj).codec, }; lock (this) ffimp_free_avobj(FunctionEx.StructToIntPtr(avmode)); //ffimp_free_avobj(pAVObj); if (_ffscale != null) { _ffscale.Release(); } Marshal.FreeHGlobal(pOutBuf); } catch (Exception e) { Console.WriteLine("ffimp error:{0}", e); } //} }
public unsafe virtual bool Draw(byte[] buffer) { if (_DDMODE) { fixed(void *pBuff = buffer) { IntPtr p = new IntPtr(pBuff); ImageProperties a = new ImageProperties() { dwWidth = _width, dwHeight = _height, dwImageFormat = 0, }; a.lpY = (uint)p.ToInt32(); a.lpU = (uint)(a.lpY + (_width * _height)); a.lpV = (uint)(a.lpU + (_width * _height >> 2)); try { var aaa = DD_Draw(_obj, _hwnd, a); } catch (Exception xe) { } return(true); } } else { IntPtr ptr = FunctionEx.BytesToIntPtr(buffer); try { bool result = DirectDraw(_obj, _hwnd, ptr, buffer.Length); return(result); } finally { Marshal.FreeHGlobal(ptr); } } }
public X264Native(X264Params p) { Bitrate = p.bitrate; _outBuf = new byte[p.width * p.height * 4]; byte[] b = FunctionEx.StructToBytes(p); obj = X264Native_New(b); _DLastX264Native = this; }
private void YUVFrame(object sender, AForge.Video.DirectShow.RGBRawFrameEventArgs e) { byte[] buffer = FunctionEx.IntPtrToBytes(e.Buffer, 0, e.Len); buffer = _ffscale.Convert(buffer); if (_callBack != null) { _callBack(buffer); } }
public void Test264FileFrame() { int width = 320, height = 240; var x264 = new X264Native(new X264Params(width, height, 10, 320)); //x264.SetIKeyIntMax(10); x264.Init(); var fs = new FileStream("./testfile.test", FileMode.CreateNew); var ls = StreamingKit.Media.ReadFile.GetBuffByFile1(@".\test.yuv"); AVCodecCfg cf = AVCodecCfg.CreateVideo(width, height, (int)StreamingKit.AVCode.CODEC_ID_H264, 100000); FFImp ffimp = new FFImp(cf, true); //FFScale ffscale = new FFScale(width, height, 26, 12, width, height, 12, 12); FFScale ffscale = new FFScale(width, height, 0, 12, width, height, 3, 24); foreach (var item1 in ls) { var item = ffscale.FormatS(item1); var in_buf = FunctionEx.BytesToIntPtr(item); var out_buf = Marshal.AllocHGlobal(item.Length); //var bKeyFrame = false; //var nOutLen = 0; var nInLen = item.Length; // var size = X264Encode(x264.obj, in_buf, ref nInLen, out_buf, ref nOutLen, ref bKeyFrame); // var buf = FunctionEx.IntPtrToBytes(out_buf, 0, size); var buf = x264.Encode(item); Console.WriteLine(buf.To16Strs(0, 16)); var size = buf.Length; if (w == null) //OK { w = new BinaryWriter(new FileStream("4567.es", FileMode.Create)); } w.Write(buf); var mf = new MediaFrame(); mf.IsKeyFrame = (byte)(x264.IsKeyFrame() ? 1 : 0); mf.Width = width; mf.Height = height; mf.Encoder = MediaFrame.H264Encoder; //mf.Timetick = 0; mf.Size = size; mf.SetData(buf); buf = mf.GetBytes(); fs.Write(BitConverter.GetBytes(buf.Length), 0, 4); fs.Write(buf, 0, buf.Length); fs.Flush(); // IntPtr intt = IntPtr.Zero; //var sssss = ffimp.VideoDec(buf, ref intt); //Console.WriteLine(buf.Take(32).ToArray().To16Strs()); // var size = Encode1(ii, in_buf, ref nInLen, out_buf); } fs.Close(); }
public static void Test() { int width = 320, height = 240; X264Native x264 = new X264Native(new X264Params(width, height, 10, 320)); //x264.SetIKeyIntMax(10); x264.Init(); var ls = MediaServer.Media.ReadFile.GetBuffByFile1(@"D:\video_monitor\v2v_super_exchange\branch\branch_V3.16.0.0(dongtaifuyong)\bin\Debug\1237.yuv"); AVCodecCfg cf = AVCodecCfg.CreateVideo(width, height, (int)SLW.Media.AVCode.CODEC_ID_H264, 100000); FFImp ffimp = new FFImp(cf, true); //FFScale ffscale = new FFScale(width, height, 26, 12, width, height, 12, 12); FFScale ffscale = new FFScale(width, height, 0, 12, width, height, 3, 24); foreach (var item1 in ls) { var item = ffscale.FormatS(item1); var in_buf = FunctionEx.BytesToIntPtr(item); var out_buf = Marshal.AllocHGlobal(item.Length); //var bKeyFrame = false; //var nOutLen = 0; var nInLen = item.Length; // var size = X264Encode(x264.obj, in_buf, ref nInLen, out_buf, ref nOutLen, ref bKeyFrame); // var buf = FunctionEx.IntPtrToBytes(out_buf, 0, size); var buf = x264.Encode(item); Console.WriteLine(buf.To16Strs(0, 16)); var size = buf.Length; if (w == null) //OK { w = new BinaryWriter(new FileStream("4567.es", FileMode.Create)); } w.Write(buf); ////Media.MediaFrame mf = new Media.MediaFrame(); ////mf.nIsKeyFrame = (byte)(x264.IsKeyFrame() ? 1 : 0); ////mf.nWidth = width; ////mf.nHeight = height; ////mf.nEncoder = Media.MediaFrame.H264Encoder; ////mf.nTimetick = 0; ////mf.nSize = size; ////mf.Data = buf; ////buf = mf.GetBytes(); ////fs.Write(BitConverter.GetBytes(buf.Length), 0, 4); //fs.Write(buf, 0, buf.Length); //fs.Flush(); //IntPtr intt = IntPtr.Zero; //var sssss = ffimp.VideoDec(buf, ref intt); ////Console.WriteLine(buf.Take(32).ToArray().To16Strs()); //// var size = Encode1(ii, in_buf, ref nInLen, out_buf); } //fs.Close(); }
//编码 public byte[] Encode(byte[] data) { if (_isDisoseing || _isDisosed) { return(null); } var pData = FunctionEx.BytesToIntPtr(data); var pOut = Marshal.AllocHGlobal(200); var encSize = Speex.SpeexEncode(this.pSpx, pData, pOut); byte[] buffer = FunctionEx.IntPtrToBytes(pOut, 0, encSize); Marshal.FreeHGlobal(pOut); Marshal.FreeHGlobal(pData); return(buffer); }
//解码 public byte[] Decode(byte[] data) { if (_isDisoseing || _isDisosed) { return(null); } var pData = FunctionEx.BytesToIntPtr(data); var pOut = Marshal.AllocHGlobal(Samples * 2); var decSize = Speex.SpeexDecode(this.pSpx, data.Length, pData, pOut); var bytes = FunctionEx.IntPtrToBytes(pOut, 0, decSize); Marshal.FreeHGlobal(pOut); Marshal.FreeHGlobal(pData); return(bytes); }
public static VideoInfoHeader QueryVideoInfoHeader(AMMediaType amm) { if (amm == null) { } if (amm.formatSize == 0) { return(null); } var ptr = amm.formatPtr; var buff = new byte[amm.formatSize]; Marshal.Copy(ptr, buff, 0, amm.formatSize); return(FunctionEx.BytesToStruct <VideoInfoHeader>(buff)); }
public byte[] Decode(byte[] data) { if (!_inited) { uint samplerate = 0; byte channels = 0; lock (_lock) { var r = NeAACDecInit(_handle, data, (uint)data.Length, ref samplerate, ref channels); if (r == 0) { _inited = true; } } } if (_inited) { NeAACDecFrameInfo info = new NeAACDecFrameInfo(); var pcm = NeAACDecDecode(_handle, ref info, data, data.Length); try { int bufferLenth = info.samples * info.channels; if (bufferLenth > 0 && bufferLenth <= 4096) { byte[] pcm_data = FunctionEx.IntPtrToBytes(pcm, 0, (info.samples * info.channels)); byte[] frame_mono = new byte[2048]; if (info.channels == 1) { return(pcm_data); } else if (info.channels == 2) { //从双声道的数据中提取单通道 for (int i = 0, j = 0; i < 4096 && j < 2048; i += 4, j += 2) { frame_mono[j] = pcm_data[i]; frame_mono[j + 1] = pcm_data[i + 1]; } return(frame_mono); } } } catch (Exception ex) { } } return(new byte[0]); }
public byte[] Cancellation(byte[] play, byte[] mic) { if (_isDisoseing || _isDisosed) { return(null); } var pPlay = FunctionEx.BytesToIntPtr(play); var pMic = FunctionEx.BytesToIntPtr(mic); var pOut = Marshal.AllocHGlobal(mic.Length); Speex.SpeexEchoCancellation(pSpx, pPlay, pMic, pOut); var data = FunctionEx.IntPtrToBytes(pOut, 0, mic.Length); Marshal.FreeHGlobal(pPlay); Marshal.FreeHGlobal(pMic); Marshal.FreeHGlobal(pOut); return(data); }
private void InitConfiguration() { var ptr = faacEncGetCurrentConfiguration(_handle); var pConfiguration = FunctionEx.IntPtrToStruct <faacEncConfiguration>(ptr); pConfiguration.inputFormat = 1; pConfiguration.outputFormat = 1; pConfiguration.useTns = 1; pConfiguration.useLfe = 0; pConfiguration.aacObjectType = 2; pConfiguration.shortctl = 0; pConfiguration.quantqual = 80; pConfiguration.bandWidth = 0; pConfiguration.bitRate = (uint)_bitrate; FunctionEx.IntPtrSetValue(ptr, pConfiguration); faacEncSetConfiguration(_handle, ptr); }
public int VideoEnc(IntPtr frame, ref byte[] outData) { lock (this) { if (_isReleased) { return(0); } var encFrameBuffLen = cfg.width * cfg.height * 3; var encFrameBuff = Marshal.AllocHGlobal(encFrameBuffLen); var encSize = ffimp_video_encode(pAVObj, encFrameBuff, encFrameBuffLen, frame); if (encSize > 0) { outData = FunctionEx.IntPtrToBytes(encFrameBuff, 0, encSize); } Marshal.FreeHGlobal(encFrameBuff); return(encSize); } }
public void InitAudio(int freq, int samples, int channels) { audioCallBack = new Sdl.AudioSpecCallbackDelegate(AudioCallBack); Sdl.SDL_AudioSpec s1 = new Sdl.SDL_AudioSpec() { freq = freq, samples = (short)samples, channels = (byte)channels, format = 0x0010, callback = Marshal.GetFunctionPointerForDelegate(audioCallBack), }; Sdl.SDL_AudioSpec s2 = new Sdl.SDL_AudioSpec(); var p1 = FunctionEx.StructToIntPtr(s1); var p2 = FunctionEx.StructToIntPtr(s2); var cs = Sdl.SDL_OpenAudio(p1, p2); }
//回调函数 private void SampleGrabber_Callback(double SampleTime, IntPtr pBuf, int len) { if (_isDisoseing || _isDisosed) { return; } if (!_isworking) { return; } var buf = FunctionEx.IntPtrToBytes(pBuf, 0, len); buf = _ffscale.Convert(buf); if (_callBack != null) { _callBack(buf); } }
public int VideoEnc(byte[] inData, ref byte[] outData) { lock (this) { if (_isReleased) { return(0); } var pInData = FunctionEx.BytesToIntPtr(inData); IntPtr frame = IntPtr.Zero; if (cfg.pix_fmt == (int)PixelFormat.PIX_FMT_YUV420P) { frame = ffimp_YUVBuff2YUVAVFrame1(pAVObj, pInData, inData.Length); } else if (cfg.pix_fmt == (int)PixelFormat.PIX_FMT_RGB565) { frame = ffimp_RGBBuff2YUVAVFrame1(pAVObj, pInData, inData.Length); } else { frame = ffimp_FMTBuff2YUVAVFrame1(pAVObj, cfg.pix_fmt, pInData, inData.Length); } var encFrameBuffLen = cfg.width * cfg.height * 3; var encFrameBuff = Marshal.AllocHGlobal(encFrameBuffLen); var encSize = ffimp_video_encode(pAVObj, encFrameBuff, encFrameBuffLen, frame); outData = FunctionEx.IntPtrToBytes(encFrameBuff, 0, encSize); Marshal.FreeHGlobal(pInData); Marshal.FreeHGlobal(encFrameBuff); return(encSize); } }
public FFImp(AVCodecCfg cfg, bool isDec, bool isvideo = true) { this.isDec = isDec; this.cfg = cfg; var pcfg = FunctionEx.StructToIntPtr(cfg); pAVObj = FunctionEx.StructToIntPtr(new AVModel()); lock (_lock) { if (!ffimp_init()) { throw new Exception("ffimp init error"); } } if (isvideo) { int pOutBufSize = cfg.width * cfg.height * 3; if (pOutBufSize == 0) { pOutBufSize = 1920 * 1080 * 4; } pOutBuf = Marshal.AllocHGlobal(pOutBufSize); int init_r = 0; lock (_lock) { if (this.isDec) { init_r = ffimp_video_decode_init(ref pAVObj, pcfg, pOutBuf, pOutBufSize); } else { init_r = ffimp_video_encode_init(ref pAVObj, pcfg, pOutBuf, pOutBufSize); } _ffscale = new FFScale(cfg.width, cfg.height, 0, 12, cfg.width, cfg.height, 0, 12); } } else { int pOutBufSize = cfg.width * cfg.height * 3; if (pOutBufSize == 0) { pOutBufSize = 2048 * 2; } pOutBuf = Marshal.AllocHGlobal(pOutBufSize); int init_r = 0; lock (_lock) { if (this.isDec) { init_r = ffimp_audio_decode_init(ref pAVObj, pcfg, pOutBuf, pOutBufSize); } else { init_r = ffimp_audio_decode_init(ref pAVObj, pcfg, pOutBuf, pOutBufSize); } // _ffscale = new FFScale(cfg.width, cfg.height, 0, 12, cfg.width, cfg.height, 0, 12); } } Marshal.FreeHGlobal(pcfg); }