public TransformContext(Format.Video cFormatSource, Format.Video cFormatTarget) { ////lock (helper._oSyncRootGlobal) pContext = Functions.sws_getContext(cFormatSource.nWidth, cFormatSource.nHeight, cFormatSource.ePixelFormat, cFormatTarget.nWidth, cFormatTarget.nHeight, cFormatTarget.ePixelFormat, Constants.SWS_BICUBIC, NULL, NULL, NULL); if (NULL == pContext) throw new Exception("can't init trasform context"); _cFormatSource = cFormatSource; _cFormatTarget = cFormatTarget; }
public TransformContext(Format.Video cFormatSource, Format.Video cFormatTarget) { _oDisposeLock = new object(); ////lock (helper._oSyncRootGlobal) pContext = Functions.sws_getContext(cFormatSource.nWidth, cFormatSource.nHeight, cFormatSource.ePixelFormat, cFormatTarget.nWidth, cFormatTarget.nHeight, cFormatTarget.ePixelFormat, Constants.SWS_BICUBIC, NULL, NULL, NULL); if (NULL == pContext) { throw new Exception("can't init trasform context"); } _cFormatSource = cFormatSource; _cFormatTarget = cFormatTarget; }
public override bool IsAlikeTo(Format cFormat) { if (!(cFormat is Format.Video)) { return(false); } Format.Video cFV = (Format.Video)cFormat; if (ePixelFormat == cFV.ePixelFormat && nWidth == cFV.nWidth && nHeight == cFV.nHeight && nBufferSize == cFV.nBufferSize) { return(true); } else { return(false); } }
private void Init(Format cFormat) { _cGCHandle = GCHandle.Alloc(aBuffer, GCHandleType.Pinned); _pBytes = _cGCHandle.AddrOfPinnedObject(); if (null == cFormat) { return; } AVFrame cAVFrame; if (null != aBuffer) { int nResult; if (cFormat is Format.Video) { Format.Video cFormatVideo = (Format.Video)cFormat; //lock (helper._oSyncRootGlobal) if (0 > (nResult = Functions.avpicture_fill(_pAVFrame, aBuffer, cFormatVideo.ePixelFormat, cFormatVideo.nWidth, cFormatVideo.nHeight))) { throw new Exception("Frame.AVFrameInit.avpicture_fill = " + nResult); } cAVFrame = (AVFrame)Marshal.PtrToStructure(_pAVFrame, typeof(AVFrame)); cAVFrame.quality = 1; cAVFrame.pts = 0; Marshal.StructureToPtr(cAVFrame, _pAVFrame, true); } else { Format.Audio cFormatAudio = (Format.Audio)cFormat; cAVFrame = (AVFrame)Marshal.PtrToStructure(_pAVFrame, typeof(AVFrame)); if (1 > (cAVFrame.nb_samples = cFormatAudio.stAVCodecContext.frame_size)) { cAVFrame.nb_samples = aBuffer.Length / ((cFormatAudio.nBitsPerSample / 8) * cFormatAudio.nChannelsQty); } cAVFrame.channel_layout = cFormatAudio.stAVCodecContext.channel_layout; cAVFrame.format = (int)cFormatAudio.stAVCodecContext.sample_fmt; Marshal.StructureToPtr(cAVFrame, _pAVFrame, true); //lock (helper._oSyncRootGlobal) if (0 > (nResult = Functions.avcodec_fill_audio_frame(_pAVFrame, cFormatAudio.nChannelsQty, cFormatAudio.eSampleFormat, aBuffer, nLengthBuffer, 1))) { throw new Exception("Frame.AVFrameInit.avcodec_fill_audio_frame = " + nResult); } } } }
override public Frame[] Convert(Format cFormatTarget, Frame cFrameSource) //в pAVFrameSource лежат байты в формате this!!! { List <Frame> aRetVal = new List <Frame>(); if (null == cFormatTarget || !(cFormatTarget is Format.Video)) { throw new Exception("target format is null or has a wrong type"); } Format.Video cFormatVideoTarget = (Format.Video)cFormatTarget; try { if (ePixelFormat == cFormatVideoTarget.ePixelFormat && nHeight == cFormatVideoTarget.nHeight && nWidth == cFormatVideoTarget.nWidth) { return new Frame[] { new Frame(cFrameSource.aBytesCopy) { nPTS = cFrameSource.nPTS, bKeyframe = cFrameSource.bKeyframe } } } ; if (eCodecID == cFormatTarget.eCodecID || NULL != _pCodec) { throw new NotImplementedException(); //TODO доделать конверт из encoded в raw } cFrameSource = Transform(cFormatVideoTarget, cFrameSource); int nSize; if (NULL == cFrameSource) { (new Logger()).WriteWarning("Format.Video.Convert: IntPtr.Zero == cFrameSource.AVFrameGet()"); } if (NULL == cFormatVideoTarget.pAVCodecContext) { (new Logger()).WriteWarning("Format.Video.Convert: IntPtr.Zero == cFormatVideoTarget.pAVCodecContext"); } if (null == _cFrame) { _cFrame = new Frame(cFormatVideoTarget.nBufferSize); _cFrame.nPTS = 0; } cFrameSource.nPTS = _cFrame.nPTS; nSize = Functions.avcodec_encode_video(cFormatVideoTarget.pAVCodecContext, _cFrame.aBuffer, _cFrame.nLengthBuffer, cFrameSource.pBytes); if (0 > nSize) { throw new Exception("video encoding failed:" + nSize); } if (0 < nSize) { aRetVal.Add(new Frame(null, _cFrame.aBuffer.Take(nSize).ToArray())); AVCodecContext stAVCodecContext = (AVCodecContext)Marshal.PtrToStructure(cFormatVideoTarget.pAVCodecContext, typeof(AVCodecContext)); if (NULL != stAVCodecContext.coded_frame) { AVFrame cAVFrame = (AVFrame)Marshal.PtrToStructure(stAVCodecContext.coded_frame, typeof(AVFrame)); aRetVal[0].nPTS = cAVFrame.pts; aRetVal[0].bKeyframe = 0 < cAVFrame.key_frame; } } _cFrame.nPTS++; } catch (Exception ex) { (new Logger()).WriteError(ex); } return(aRetVal.ToArray()); }
public void Prepare(Format.Video cFormatVideo, Format.Audio cFormatAudio) { _bDoWritingFrames = false; _aqWritingFrames = new Queue<byte[]>(); _cThreadWritingFramesWorker = new System.Threading.Thread(WritingFramesWorker); _cThreadWritingFramesWorker.IsBackground = true; _cThreadWritingFramesWorker.Priority = System.Threading.ThreadPriority.Normal; _cThreadWritingFramesWorker.Start(); _cFormatVideoTarget = cFormatVideo; _cFormatAudioTarget = cFormatAudio; if (5 < nCacheSize) _nDecodedFramesInPrepare = 5; else _nDecodedFramesInPrepare = nCacheSize; _nPreparedFramesIndx = _nDecodedFramesInPrepare; int nIndx = 0; lock (_cCloseLock) while (_nDecodedFramesInPrepare > nIndx++ && !_bFileEnd) { AddFrameToQueue(); System.Threading.Thread.Sleep(0); } _cThreadDecodeAndCache = new Thread(DecodeAndCache); _cThreadDecodeAndCache.IsBackground = true; _cThreadDecodeAndCache.Priority = Thread.CurrentThread.Priority; _cThreadDecodeAndCache.Start(); }
private void Init(ulong nFrameStart) { try { AVStream stStream; AVCodecContext stCodecCtx; float nVideoDuration, nAudioDuration; nVideoDuration = nAudioDuration = float.MaxValue; AVMediaType eAVMediaType; for (int nIndx = 0; nIndx < _cFormatCtx.nStreamsQty; nIndx++) { stStream = _cFormatCtx.aStreams[nIndx]; stCodecCtx = (AVCodecContext)Marshal.PtrToStructure(stStream.codec, typeof(AVCodecContext)); eAVMediaType = (AVMediaType)stCodecCtx.codec_type; if (AVMediaType.AVMEDIA_TYPE_VIDEO == eAVMediaType) { #region VIDEO _nVideoStreamIndx = stStream.index; long nFrameTarget; nFrameTarget = Functions.av_rescale((long)(nFrameStart * 40), stStream.time_base.den, stStream.time_base.num) / 1000; //FPS if (0 < nFrameStart) _cFormatCtx.Seek(_nVideoStreamIndx, nFrameTarget); (new Logger()).WriteDebug("init: seek [file_start_fr:" + nFrameStart + "] [frame_target:" + nFrameTarget + "]"); //logging _cFormatVideo = new Format.Video((ushort)stCodecCtx.width, (ushort)stCodecCtx.height, stCodecCtx.codec_id, stCodecCtx.pix_fmt, stStream.codec); //nFramesPerSecond = (ushort)stStream.r_frame_rate.num; //nFramesPerSecond = (ushort)stStream.time_base.den; nFramesPerSecond = (ushort)(stStream.avg_frame_rate.num); if (0 < stStream.time_base.num && 0 < stStream.time_base.den && 0 < stStream.duration) nVideoDuration = stStream.duration * stStream.time_base.num / (float)stStream.time_base.den; else { (new Logger()).WriteWarning("init: wrong duration numbers"); if (0 < stStream.nb_frames) nVideoDuration = stStream.nb_frames / (float)nFramesPerSecond; // для mov DvPal, hdv else nVideoDuration = stStream.duration / (float)nFramesPerSecond; // для HD MXF работает только так } _aqVideoPackets = new Queue<IntPtr>(); _aqVideoFrames = new Queue<Frame>(); _aqVideoFramesFree = new Queue<Frame>(); #endregion } else if (AVMediaType.AVMEDIA_TYPE_AUDIO == eAVMediaType && 0 > _nAudioStreamIndx) { #region AUDIO _nAudioStreamIndx = stStream.index; nAudioDuration = stStream.duration / (float)stStream.time_base.den; _cFormatAudio = new Format.Audio(stStream.time_base.den, stCodecCtx.channels, stCodecCtx.codec_id, (AVSampleFormat)stCodecCtx.sample_fmt, stStream.codec); //_cFormatAudio = new Format.Audio(stCodecCtx.sample_rate, stCodecCtx.channels, stCodecCtx.codec_id, (AVSampleFormat)stCodecCtx.sample_fmt, stStream.codec); _pPacketAudio = NULL; _aqAudioPackets = new Queue<IntPtr>(); _aqAudioFrames = new Queue<Frame>(); _aqAudioFramesFree = new Queue<Frame>(); #endregion } } if (0 > _nVideoStreamIndx && 0 > _nAudioStreamIndx) throw new Exception("can't find suitable streams"); if (nVideoDuration < float.MaxValue || nAudioDuration < float.MaxValue) { ulong nVideoFramesQty = nVideoDuration < float.MaxValue ? (ulong)(nVideoDuration * nFramesPerSecond) : ulong.MaxValue; ulong nAudioFramesQty = nAudioDuration < float.MaxValue ? (ulong)(nAudioDuration * nFramesPerSecond) : ulong.MaxValue; //(new Logger()).WriteWarning("Video and audio frames quantity doesn't match!! [video=" + nVideoFramesQty + "] [audio=" + nAudioFramesQty + "]"); if (1 == nVideoFramesQty - nAudioFramesQty || 2 == nVideoFramesQty - nAudioFramesQty) nFramesQty = nVideoFramesQty - nFrameStart; else nFramesQty = (nVideoFramesQty < nAudioFramesQty ? nVideoFramesQty : nAudioFramesQty) - nFrameStart; } } catch { Dispose(); throw; } }
virtual public void Dispose() { if (null != _cFormatAudio) { _cFormatAudio.Dispose(); _cFormatAudio = null; } if (null != _cFormatVideo) { _cFormatVideo.Dispose(); _cFormatVideo = null; } if (null != _cFormatCtx) { _cFormatCtx.Dispose(); _cFormatCtx = null; } lock (_aFramesLocked) { if (0 < _aFramesLocked.Count) _aFramesLocked = _aFramesLocked; } }
private void VideoStreamCreate(Format.Video cFormat) { AVOutputFormat stAVOutputFormat = (AVOutputFormat)Marshal.PtrToStructure(_pFormatOutput, typeof(AVOutputFormat)); if (stAVOutputFormat.video_codec == CodecID.CODEC_ID_NONE) return; _pStreamVideo = _cFormatCtx.StreamAdd(); AVStream stAVStream = (AVStream)Marshal.PtrToStructure(_pStreamVideo, typeof(AVStream)); AVCodecContext stAVCodecContext = (AVCodecContext)Marshal.PtrToStructure(cFormat.pAVCodecContext, typeof(AVCodecContext)); cFormat.stAVCodecContext.codec_id = stAVOutputFormat.video_codec; Marshal.StructureToPtr(cFormat.stAVCodecContext, cFormat.pAVCodecContext, true); _cFormatVideo = new Format.Video(cFormat, stAVStream.codec); _cFormatVideo.stAVCodecContext = (AVCodecContext)Marshal.PtrToStructure(_cFormatVideo.pAVCodecContext, typeof(AVCodecContext)); // some formats want stream headers to be separate if (0 < (stAVOutputFormat.flags & Constants.AVFMT_GLOBALHEADER)) _cFormatVideo.stAVCodecContext.flags |= (int)CodecFlags.CODEC_FLAG_GLOBAL_HEADER; Marshal.StructureToPtr(_cFormatVideo.stAVCodecContext, stAVStream.codec, true); Marshal.StructureToPtr(stAVStream, _pStreamVideo, true); }
public Input(string sFile, ulong nFrameStart) { try { lock (helper._cSyncRootGlobal) { if (!helper._bInitialized) { Functions.av_register_all(); helper._bInitialized = true; } } //Functions.av_log_set_level(Constants.AV_LOG_DEBUG * 10); //System.IO.File.WriteAllText("c:/ffmpeg.log", ""); //System.IO.File.WriteAllText("c:/ffmpeg1.log", ""); //Functions.av_log_set_callback(new Functions.av_log_callback(av_log)); //Functions.av_log_set_callback(Functions.av_log_callback); //pLogCallback = Functions.av_log_get_callback(); //Functions.av_log_set_callback(pLogCallback); _cSyncRoot = new object(); _nPacketIndx = 0; //logging _nTotalVideoPackets = 0; _nTotalAudioPackets = 0; _bClose = false; _cCloseLock = new object(); _bFileEnd = false; _nFPS = 25; //FPS _sFile = sFile; nCacheSize = 100; tsTimeout = TimeSpan.FromSeconds(10); bPrepared = false; AVStream stStream; AVCodecContext stCodecCtx; _cFormatCtx = AVFormatContext.OpenInput(_sFile);// Functions.avformat_open_input(_sFile); _cFormatCtx.StreamInfoFind(); _nVideoStreamIndx = -1; _nAudioStreamIndx = -1; nFramesPerSecond = _nFPS; float nVideoDuration, nAudioDuration; nVideoDuration = nAudioDuration = float.MaxValue; AVMediaType eAVMediaType; for (int nIndx = 0; nIndx < _cFormatCtx.nStreamsQty; nIndx++) { stStream = _cFormatCtx.aStreams[nIndx]; stCodecCtx = (AVCodecContext)Marshal.PtrToStructure(stStream.codec, typeof(AVCodecContext)); eAVMediaType = (AVMediaType)stCodecCtx.codec_type; if (AVMediaType.AVMEDIA_TYPE_VIDEO == eAVMediaType) { #region VIDEO _nVideoStreamIndx = nIndx; long nFrameTarget; nFrameTarget = Functions.av_rescale((long)(nFrameStart * 40), stStream.time_base.den, stStream.time_base.num) / 1000; if (0 < nFrameStart) _cFormatCtx.Seek(_nVideoStreamIndx, nFrameTarget); _cFormatVideo = new Format.Video((ushort)stCodecCtx.width, (ushort)stCodecCtx.height, stCodecCtx.codec_id, stCodecCtx.pix_fmt, stStream.codec); nFramesPerSecond = (ushort)stStream.r_frame_rate.num; //nFramesPerSecond = (ushort)stStream.time_base.den; nVideoDuration = stStream.duration / (ushort)(stStream.time_base.den / stStream.time_base.num); // (float)nFramesPerSecond; _aqVideoPackets = new Queue<IntPtr>(); _aqVideoFrames = new Queue<Frame>(); _aqVideoFramesFree = new Queue<Frame>(); #endregion } else if (AVMediaType.AVMEDIA_TYPE_AUDIO == eAVMediaType && 0 > _nAudioStreamIndx) { #region AUDIO _nAudioStreamIndx = nIndx; nAudioDuration = stStream.duration / (float)stStream.time_base.den; _cFormatAudio = new Format.Audio(stStream.time_base.den, stCodecCtx.channels, stCodecCtx.codec_id, (AVSampleFormat)stCodecCtx.sample_fmt, stStream.codec); _pPacketAudio = NULL; _aqAudioPackets = new Queue<IntPtr>(); _aqAudioFrames = new Queue<Frame>(); _aqAudioFramesFree = new Queue<Frame>(); #endregion } } if (0 > _nVideoStreamIndx && 0 > _nAudioStreamIndx) throw new Exception("can't find suitable streams"); if (nVideoDuration < float.MaxValue || nAudioDuration < float.MaxValue) { ulong nVideoFramesQty = nVideoDuration < float.MaxValue ? (ulong)(nVideoDuration * nFramesPerSecond) : ulong.MaxValue; ulong nAudioFramesQty = nAudioDuration < float.MaxValue ? (ulong)(nAudioDuration * nFramesPerSecond) : ulong.MaxValue; if (1 == nVideoFramesQty - nAudioFramesQty || 2 == nVideoFramesQty - nAudioFramesQty) nFramesQty = nVideoFramesQty - nFrameStart; else nFramesQty = (nVideoFramesQty < nAudioFramesQty ? nVideoFramesQty : nAudioFramesQty) - nFrameStart; } } catch { Dispose(); throw; } }