예제 #1
0
 public override bool IsAlikeTo(Format cFormat)
 {
     if (!(cFormat is Format.Audio))
     {
         return(false);
     }
     Format.Audio cFV = (Format.Audio)cFormat;
     if (stAVCodecContext.frame_size == cFV.stAVCodecContext.frame_size &&
         nBitsPerSample == cFV.nBitsPerSample &&
         nChannelsQty == cFV.nChannelsQty &&
         nBufferSize == cFV.nBufferSize &&
         stAVCodecContext.channel_layout == cFV.stAVCodecContext.channel_layout &&
         eSampleFormat == cFV.eSampleFormat &&
         nSamplesRate == cFV.nSamplesRate &&
         eCodecID == cFV.eCodecID &&
         stAVCodecContext.time_base.den == cFV.stAVCodecContext.time_base.den &&
         stAVCodecContext.time_base.num == cFV.stAVCodecContext.time_base.num &&
         stAVCodecContext.pkt_timebase.den == cFV.stAVCodecContext.pkt_timebase.den &&
         stAVCodecContext.pkt_timebase.num == cFV.stAVCodecContext.pkt_timebase.num &&
         stAVCodecContext.sample_aspect_ratio.den == cFV.stAVCodecContext.sample_aspect_ratio.den &&
         stAVCodecContext.sample_aspect_ratio.num == cFV.stAVCodecContext.sample_aspect_ratio.num
         )
     {
         return(true);
     }
     else
     {
         return(false);
     }
 }
예제 #2
0
 public TransformContext(Format.Audio cFormatSource, Format.Audio cFormatTarget)
 {
     _oDisposeLock = new object();
     pContext      = NULL;
     ////lock (helper._oSyncRootGlobal)
     {
         pContext = Functions.swr_alloc_set_opts(pContext, (long)cFormatTarget.stAVCodecContext.channel_layout, cFormatTarget.eSampleFormat, cFormatTarget.nSamplesRate, (long)cFormatSource.stAVCodecContext.channel_layout, cFormatSource.eSampleFormat, cFormatSource.nSamplesRate, 0, NULL);
         if (NULL == pContext || 0 > Functions.swr_init(pContext))
         {
             throw new Exception("can't init audio transform context");
         }
     }
     _cFormatSource = cFormatSource;
     _cFormatTarget = cFormatTarget;
 }
예제 #3
0
        private void Init(Format cFormat)
        {
            _cGCHandle = GCHandle.Alloc(aBuffer, GCHandleType.Pinned);
            _pBytes    = _cGCHandle.AddrOfPinnedObject();
            if (null == cFormat)
            {
                return;
            }
            AVFrame cAVFrame;

            if (null != aBuffer)
            {
                int nResult;
                if (cFormat is Format.Video)
                {
                    Format.Video cFormatVideo = (Format.Video)cFormat;
                    //lock (helper._oSyncRootGlobal)
                    if (0 > (nResult = Functions.avpicture_fill(_pAVFrame, aBuffer, cFormatVideo.ePixelFormat, cFormatVideo.nWidth, cFormatVideo.nHeight)))
                    {
                        throw new Exception("Frame.AVFrameInit.avpicture_fill = " + nResult);
                    }
                    cAVFrame         = (AVFrame)Marshal.PtrToStructure(_pAVFrame, typeof(AVFrame));
                    cAVFrame.quality = 1;
                    cAVFrame.pts     = 0;
                    Marshal.StructureToPtr(cAVFrame, _pAVFrame, true);
                }
                else
                {
                    Format.Audio cFormatAudio = (Format.Audio)cFormat;
                    cAVFrame = (AVFrame)Marshal.PtrToStructure(_pAVFrame, typeof(AVFrame));
                    if (1 > (cAVFrame.nb_samples = cFormatAudio.stAVCodecContext.frame_size))
                    {
                        cAVFrame.nb_samples = aBuffer.Length / ((cFormatAudio.nBitsPerSample / 8) * cFormatAudio.nChannelsQty);
                    }
                    cAVFrame.channel_layout = cFormatAudio.stAVCodecContext.channel_layout;
                    cAVFrame.format         = (int)cFormatAudio.stAVCodecContext.sample_fmt;
                    Marshal.StructureToPtr(cAVFrame, _pAVFrame, true);
                    //lock (helper._oSyncRootGlobal)
                    if (0 > (nResult = Functions.avcodec_fill_audio_frame(_pAVFrame, cFormatAudio.nChannelsQty, cFormatAudio.eSampleFormat, aBuffer, nLengthBuffer, 1)))
                    {
                        throw new Exception("Frame.AVFrameInit.avcodec_fill_audio_frame = " + nResult);
                    }
                }
            }
        }
예제 #4
0
        public Frame(Format.Audio cFormat, Frame cFrame)
            : this()
        {
            if (null != cFrame._aBuffer)
            {
                throw new NotImplementedException();
            }
            AVFrame cAVFrame = (AVFrame)Marshal.PtrToStructure(cFrame._pAVFrame, typeof(AVFrame));

            if (0 < cAVFrame.width || 0 < cAVFrame.height || 1 > cAVFrame.nb_samples)
            {
                throw new NotImplementedException();
            }
            int nLineSize = cFormat.nBitsPerSample / 8 * cAVFrame.nb_samples;

            //int nReminder = nLineSize % 64;
            //if(0 < nReminder)
            //	nLineSize += 64 - nReminder;
            _nLength = cFormat.nChannelsQty * nLineSize;
            _aBuffer = new byte[_nLength];
            (new Logger()).WriteDebug4("frame format_frame: - new bytes processed! [size=" + _nLength + "]");
            bool bPlanar = (1 < cAVFrame.data.Count(o => NULL != o));

            if (!bPlanar)
            {
                nLineSize = nLength;
            }
            for (int nIndx = 0; cAVFrame.data.Length > nIndx; nIndx++)
            {
                if (NULL == cAVFrame.data[nIndx])
                {
                    break;
                }
                Marshal.Copy(cAVFrame.data[nIndx], _aBuffer, nIndx * nLineSize, nLineSize);
            }
            Init(cFormat);
        }
예제 #5
0
        public void PassSamples(Format.Audio cFormat)
        {
            if (1 > _nLength)
            {
                throw new NotImplementedException();
            }

            AVFrame cAVFrame = (AVFrame)Marshal.PtrToStructure(_pAVFrame, typeof(AVFrame));

            if (0 < cAVFrame.width || 0 < cAVFrame.height || 1 > cAVFrame.nb_samples)
            {
                throw new NotImplementedException();
            }
            int nLineSize = cFormat.nBitsPerSample / 8 * cAVFrame.nb_samples;

            _nLength = cFormat.nChannelsQty * nLineSize;
            if (null == _aBuffer)
            {
                _aBuffer = new byte[_nLength];
                (new Logger()).WriteDebug("passSamples: - new bytes processed! [size=" + _nLength + "]");
            }
            bool bPlanar = (1 < cAVFrame.data.Count(o => NULL != o));

            if (!bPlanar)
            {
                nLineSize = nLength;
            }
            for (int nIndx = 0; cAVFrame.data.Length > nIndx; nIndx++)
            {
                if (NULL == cAVFrame.data[nIndx])
                {
                    break;
                }
                Marshal.Copy(cAVFrame.data[nIndx], _aBuffer, nIndx * nLineSize, nLineSize);
            }
        }
예제 #6
0
            override public Frame[] Convert(Format cFormatTarget, Frame cFrameSource)             //в cFrameSource лежат байты в формате this!!!
            {
                List <Frame> aRetVal = new List <Frame>();

                if (null == cFormatTarget || !(cFormatTarget is Format.Audio))
                {
                    throw new Exception("target format is null or has a wrong type");
                }
                Format.Audio cFormatAudioTarget = (Format.Audio)cFormatTarget;
                IntPtr       pPacket            = NULL;
                Frame        cFrameConverted;
                AVFrame      cAVFrame;
                int          nIndx = 0, nFrameSize, nSize, nPacketGot = 0, nOffset = 0;

                try
                {
                    if (eCodecID == cFormatTarget.eCodecID)
                    {
                        if (nSamplesRate == cFormatAudioTarget.nSamplesRate && eSampleFormat == cFormatAudioTarget.eSampleFormat && nChannelsQty == cFormatAudioTarget.nChannelsQty)
                        {
                            return new Frame[] { new Frame(null, cFrameSource.aBytesCopy)
                                                 {
                                                     nPTS = cFrameSource.nPTS, bKeyframe = cFrameSource.bKeyframe
                                                 } }
                        }
                        ;
                        if (NULL != _pCodec)
                        {
                            throw new NotImplementedException(); //TODO доделать конверт из encoded в raw
                        }
                    }
                    if (nBufferSize < cFrameSource.nLength)
                    {
                        throw new Exception("wrong bytes qty for specified audio format. Should be less than " + nBufferSize + " but got " + cFrameSource.nLength);
                    }

                    while (true)
                    {
                        cFrameConverted = Transform(cFormatAudioTarget, cFrameSource);
                        if (null == cFrameConverted || 1 > cFrameConverted.nLength)
                        {
                            break;
                        }
                        cFrameSource = null;
                        cAVFrame     = (AVFrame)Marshal.PtrToStructure(cFrameConverted, typeof(AVFrame));
                        if (null == aByteStream)
                        {
                            aByteStream = new List <List <byte> >();
                            for (nIndx = 0; cAVFrame.data.Length > nIndx; nIndx++)
                            {
                                if (NULL == cAVFrame.data[nIndx])
                                {
                                    break;
                                }
                                aByteStream.Add(new List <byte>());
                            }
                            if (1 > aByteStream.Count)
                            {
                                aByteStream.Add(new List <byte>());
                            }
                        }
                        int nLineSize = cFrameConverted.nLength / aByteStream.Count;
                        for (nIndx = 0; aByteStream.Count > nIndx; nIndx++)
                        {
                            aByteStream[nIndx].AddRange(cFrameConverted.aBuffer.Skip((int)((long)cAVFrame.data[nIndx] - (long)cAVFrame.data[0])).Take(nLineSize));
                        }
                    }
                    pPacket = Functions.av_malloc(Marshal.SizeOf(typeof(AVPacket)));
                    Functions.av_init_packet(pPacket);
                    AVPacket stPacket = (AVPacket)Marshal.PtrToStructure(pPacket, typeof(AVPacket));
                    stPacket.size = 0;
                    if (null == _cFrame)
                    {
                        _cFrame = new Frame(cFormatAudioTarget.nBufferSize);
                    }
                    if (1 > (nFrameSize = cFormatAudioTarget.stAVCodecContext.frame_size))
                    {
                        nFrameSize = cFrameConverted.nLength / ((cFormatAudioTarget.nBitsPerSample / 8) * cFormatAudioTarget.nChannelsQty);
                    }
                    nFrameSize *= (cFormatAudioTarget.nBitsPerSample / 8);
                    if (null == cFormatAudioTarget._cFrame)
                    {
                        cFormatTarget._cFrame = new Frame(this, nFrameSize * cFormatAudioTarget.nChannelsQty);
                    }
                    if (2 > aByteStream.Count)
                    {
                        nFrameSize *= cFormatAudioTarget.nChannelsQty;
                    }
                    while (nFrameSize <= aByteStream[0].Count && _cFrame.nLengthBuffer > (nOffset + stPacket.size))
                    {
                        for (nIndx = 0; aByteStream.Count > nIndx; nIndx++)
                        {
                            aByteStream[nIndx].CopyTo(0, cFormatTarget._cFrame.aBuffer, nIndx * nFrameSize, nFrameSize);
                            aByteStream[nIndx].RemoveRange(0, nFrameSize);
                        }
                        stPacket.data = _cFrame.pBytes + nOffset;
                        stPacket.size = _cFrame.nLengthBuffer - nOffset;
                        Marshal.StructureToPtr(stPacket, pPacket, true);

                        //lock (helper._oSyncRootGlobal)
                        nSize = Functions.avcodec_encode_audio2(cFormatAudioTarget.pAVCodecContext, pPacket, cFormatTarget._cFrame, ref nPacketGot);
                        if (0 > nSize)
                        {
                            throw new Exception("audio encoding failed\n");
                        }
                        if (0 < nPacketGot)
                        {
                            stPacket = (AVPacket)Marshal.PtrToStructure(pPacket, typeof(AVPacket));
                            if (0 < stPacket.size)
                            {
                                aRetVal.Add(new Frame(_cFrame.aBuffer.Skip(nOffset).Take(stPacket.size).ToArray())
                                {
                                    nPTS = stPacket.pts
                                });                                                                                                          //TODO нужно сделать "наследование" одного фрейма от другого (один aBytes на оба Frame'а)
                                nOffset += stPacket.size;
                            }
                        }
                    }
                }
                catch (Exception ex)
                {
                    (new Logger()).WriteError(ex);
                }
                finally
                {
                    if (NULL != pPacket)
                    {
                        Functions.av_free_packet(pPacket);
                        Functions.av_freep(ref pPacket);
                    }
                    //if (NULL != pAVFrame)
                    //    Functions.avcodec_free_frame(ref pAVFrame);
                }
                return(aRetVal.ToArray());
            }
예제 #7
0
                public TransformContext(Format.Audio cFormatSource, Format.Audio cFormatTarget)
				{
                    pContext = NULL;
                    ////lock (helper._oSyncRootGlobal)
                    {
                        pContext = Functions.swr_alloc_set_opts(pContext, (long)cFormatTarget.stAVCodecContext.channel_layout, cFormatTarget.eSampleFormat, cFormatTarget.nSamplesRate, (long)cFormatSource.stAVCodecContext.channel_layout, cFormatSource.eSampleFormat, cFormatSource.nSamplesRate, 0, NULL);
                        if (NULL == pContext || 0 > Functions.swr_init(pContext))
                            throw new Exception("can't init audio transform context");
                    }
					_cFormatSource = cFormatSource;
					_cFormatTarget = cFormatTarget;
				}
예제 #8
0
			public void Prepare(Format.Video cFormatVideo, Format.Audio cFormatAudio)
			{
				_bDoWritingFrames = false;
				_aqWritingFrames = new Queue<byte[]>();
				_cThreadWritingFramesWorker = new System.Threading.Thread(WritingFramesWorker);
				_cThreadWritingFramesWorker.IsBackground = true;
				_cThreadWritingFramesWorker.Priority = System.Threading.ThreadPriority.Normal;
				_cThreadWritingFramesWorker.Start();

				_cFormatVideoTarget = cFormatVideo;
				_cFormatAudioTarget = cFormatAudio;
				if (5 < nCacheSize)
					_nDecodedFramesInPrepare = 5;
				else
					_nDecodedFramesInPrepare = nCacheSize;
				_nPreparedFramesIndx = _nDecodedFramesInPrepare;
				int nIndx = 0;

				lock (_cCloseLock)
					while (_nDecodedFramesInPrepare > nIndx++ && !_bFileEnd)
					{
						AddFrameToQueue();
						System.Threading.Thread.Sleep(0);
					}
				_cThreadDecodeAndCache = new Thread(DecodeAndCache);
				_cThreadDecodeAndCache.IsBackground = true;
				_cThreadDecodeAndCache.Priority = Thread.CurrentThread.Priority;
				_cThreadDecodeAndCache.Start();
			}
예제 #9
0
			private void Init(ulong nFrameStart)
			{
				try
				{
                    AVStream stStream;
                    AVCodecContext stCodecCtx;

                    float nVideoDuration, nAudioDuration;
                    nVideoDuration = nAudioDuration = float.MaxValue;
                    AVMediaType eAVMediaType;
                    for (int nIndx = 0; nIndx < _cFormatCtx.nStreamsQty; nIndx++)
                    {
                        stStream = _cFormatCtx.aStreams[nIndx];
                        stCodecCtx = (AVCodecContext)Marshal.PtrToStructure(stStream.codec, typeof(AVCodecContext));
                        eAVMediaType = (AVMediaType)stCodecCtx.codec_type;
                        if (AVMediaType.AVMEDIA_TYPE_VIDEO == eAVMediaType)
                        {
                            #region VIDEO
                            _nVideoStreamIndx = stStream.index;
                            long nFrameTarget;
                            nFrameTarget = Functions.av_rescale((long)(nFrameStart * 40), stStream.time_base.den, stStream.time_base.num) / 1000; //FPS
                            if (0 < nFrameStart)
                                _cFormatCtx.Seek(_nVideoStreamIndx, nFrameTarget);
							(new Logger()).WriteDebug("init: seek [file_start_fr:" + nFrameStart + "] [frame_target:" + nFrameTarget + "]"); //logging

                            _cFormatVideo = new Format.Video((ushort)stCodecCtx.width, (ushort)stCodecCtx.height, stCodecCtx.codec_id, stCodecCtx.pix_fmt, stStream.codec);
                            //nFramesPerSecond = (ushort)stStream.r_frame_rate.num;
                            //nFramesPerSecond = (ushort)stStream.time_base.den;
							nFramesPerSecond = (ushort)(stStream.avg_frame_rate.num);

							if (0 < stStream.time_base.num && 0 < stStream.time_base.den && 0 < stStream.duration)
								nVideoDuration = stStream.duration * stStream.time_base.num / (float)stStream.time_base.den;
							else
							{
								(new Logger()).WriteWarning("init: wrong duration numbers");
								if (0 < stStream.nb_frames)
									nVideoDuration = stStream.nb_frames / (float)nFramesPerSecond;  // для mov DvPal, hdv 
								else
									nVideoDuration = stStream.duration / (float)nFramesPerSecond;   // для HD MXF работает только так
							}
                            _aqVideoPackets = new Queue<IntPtr>();
                            _aqVideoFrames = new Queue<Frame>();
                            _aqVideoFramesFree = new Queue<Frame>();
                            #endregion
                        }
                        else if (AVMediaType.AVMEDIA_TYPE_AUDIO == eAVMediaType && 0 > _nAudioStreamIndx)
                        {
                            #region AUDIO
                            _nAudioStreamIndx = stStream.index;
                            nAudioDuration = stStream.duration / (float)stStream.time_base.den;
                            _cFormatAudio = new Format.Audio(stStream.time_base.den, stCodecCtx.channels, stCodecCtx.codec_id, (AVSampleFormat)stCodecCtx.sample_fmt, stStream.codec);
                            //_cFormatAudio = new Format.Audio(stCodecCtx.sample_rate, stCodecCtx.channels, stCodecCtx.codec_id, (AVSampleFormat)stCodecCtx.sample_fmt, stStream.codec);

                            _pPacketAudio = NULL;
                            _aqAudioPackets = new Queue<IntPtr>();
                            _aqAudioFrames = new Queue<Frame>();
                            _aqAudioFramesFree = new Queue<Frame>();
                            #endregion
                        }
                    }
                    if (0 > _nVideoStreamIndx && 0 > _nAudioStreamIndx)
                        throw new Exception("can't find suitable streams");
                    if (nVideoDuration < float.MaxValue || nAudioDuration < float.MaxValue)
                    {
                        ulong nVideoFramesQty = nVideoDuration < float.MaxValue ? (ulong)(nVideoDuration * nFramesPerSecond) : ulong.MaxValue;
                        ulong nAudioFramesQty = nAudioDuration < float.MaxValue ? (ulong)(nAudioDuration * nFramesPerSecond) : ulong.MaxValue;
						//(new Logger()).WriteWarning("Video and audio frames quantity doesn't match!! [video=" + nVideoFramesQty + "] [audio=" + nAudioFramesQty + "]");
                        if (1 == nVideoFramesQty - nAudioFramesQty || 2 == nVideoFramesQty - nAudioFramesQty)
                            nFramesQty = nVideoFramesQty - nFrameStart;
                        else
                            nFramesQty = (nVideoFramesQty < nAudioFramesQty ? nVideoFramesQty : nAudioFramesQty) - nFrameStart;
                    }
                }
				catch
				{
					Dispose();
					throw;
				}
			}
예제 #10
0
		virtual public void Dispose()
		{
            if (null != _cFormatAudio)
            {
                _cFormatAudio.Dispose();
                _cFormatAudio = null;
            }
            if (null != _cFormatVideo)
            {
                _cFormatVideo.Dispose();
                _cFormatVideo = null;
            }
            if (null != _cFormatCtx)
            {
                _cFormatCtx.Dispose();
                _cFormatCtx = null;
            }
            lock (_aFramesLocked)
            {
                if (0 < _aFramesLocked.Count)
                    _aFramesLocked = _aFramesLocked;
            }
        }
예제 #11
0
			public Input(string sFile, ulong nFrameStart)
			{
				try
				{
					lock (helper._cSyncRootGlobal)
					{
						if (!helper._bInitialized)
						{
							Functions.av_register_all();
							helper._bInitialized = true;
						}
					}

					//Functions.av_log_set_level(Constants.AV_LOG_DEBUG * 10);
					//System.IO.File.WriteAllText("c:/ffmpeg.log", "");
					//System.IO.File.WriteAllText("c:/ffmpeg1.log", "");

					//Functions.av_log_set_callback(new Functions.av_log_callback(av_log));
					//Functions.av_log_set_callback(Functions.av_log_callback);
					//pLogCallback = Functions.av_log_get_callback();
					//Functions.av_log_set_callback(pLogCallback);


					_cSyncRoot = new object();
					_nPacketIndx = 0; //logging
					_nTotalVideoPackets = 0;
					_nTotalAudioPackets = 0;
					_bClose = false;
					_cCloseLock = new object();
					_bFileEnd = false;
					_nFPS = 25; //FPS
					_sFile = sFile;


					nCacheSize = 100;
					tsTimeout = TimeSpan.FromSeconds(10);
					bPrepared = false;

					AVStream stStream;
					AVCodecContext stCodecCtx;
					_cFormatCtx = AVFormatContext.OpenInput(_sFile);// Functions.avformat_open_input(_sFile);
					_cFormatCtx.StreamInfoFind();
					_nVideoStreamIndx = -1;
					_nAudioStreamIndx = -1;


					nFramesPerSecond = _nFPS;
					float nVideoDuration, nAudioDuration;
					nVideoDuration = nAudioDuration = float.MaxValue;
					AVMediaType eAVMediaType;
					for (int nIndx = 0; nIndx < _cFormatCtx.nStreamsQty; nIndx++)
					{
						stStream = _cFormatCtx.aStreams[nIndx];
						stCodecCtx = (AVCodecContext)Marshal.PtrToStructure(stStream.codec, typeof(AVCodecContext));
						eAVMediaType = (AVMediaType)stCodecCtx.codec_type;
						if (AVMediaType.AVMEDIA_TYPE_VIDEO == eAVMediaType)
						{
							#region VIDEO
							_nVideoStreamIndx = nIndx;
							long nFrameTarget;
							nFrameTarget = Functions.av_rescale((long)(nFrameStart * 40), stStream.time_base.den, stStream.time_base.num) / 1000;
							if (0 < nFrameStart)
								_cFormatCtx.Seek(_nVideoStreamIndx, nFrameTarget);

							_cFormatVideo = new Format.Video((ushort)stCodecCtx.width, (ushort)stCodecCtx.height, stCodecCtx.codec_id, stCodecCtx.pix_fmt, stStream.codec);
							nFramesPerSecond = (ushort)stStream.r_frame_rate.num;
							//nFramesPerSecond = (ushort)stStream.time_base.den;
							nVideoDuration = stStream.duration / (ushort)(stStream.time_base.den / stStream.time_base.num);  // (float)nFramesPerSecond;

							_aqVideoPackets = new Queue<IntPtr>();
							_aqVideoFrames = new Queue<Frame>();
							_aqVideoFramesFree = new Queue<Frame>();
							#endregion
						}
						else if (AVMediaType.AVMEDIA_TYPE_AUDIO == eAVMediaType && 0 > _nAudioStreamIndx)
						{
							#region AUDIO
							_nAudioStreamIndx = nIndx;
							nAudioDuration = stStream.duration / (float)stStream.time_base.den;
							_cFormatAudio = new Format.Audio(stStream.time_base.den, stCodecCtx.channels, stCodecCtx.codec_id, (AVSampleFormat)stCodecCtx.sample_fmt, stStream.codec);

							_pPacketAudio = NULL;
							_aqAudioPackets = new Queue<IntPtr>();
							_aqAudioFrames = new Queue<Frame>();
							_aqAudioFramesFree = new Queue<Frame>();
							#endregion
						}
					}
					if (0 > _nVideoStreamIndx && 0 > _nAudioStreamIndx)
						throw new Exception("can't find suitable streams");
					if (nVideoDuration < float.MaxValue || nAudioDuration < float.MaxValue)
					{
						ulong nVideoFramesQty = nVideoDuration < float.MaxValue ? (ulong)(nVideoDuration * nFramesPerSecond) : ulong.MaxValue;
						ulong nAudioFramesQty = nAudioDuration < float.MaxValue ? (ulong)(nAudioDuration * nFramesPerSecond) : ulong.MaxValue;
						if (1 == nVideoFramesQty - nAudioFramesQty || 2 == nVideoFramesQty - nAudioFramesQty)
							nFramesQty = nVideoFramesQty - nFrameStart;
						else
							nFramesQty = (nVideoFramesQty < nAudioFramesQty ? nVideoFramesQty : nAudioFramesQty) - nFrameStart;
					}
				}
				catch
				{
					Dispose();
					throw;
				}
			}
예제 #12
0
			private void AudioStreamCreate(Format.Audio cFormat)
			{
				AVOutputFormat stAVOutputFormat = (AVOutputFormat)Marshal.PtrToStructure(_pFormatOutput, typeof(AVOutputFormat));
				if (stAVOutputFormat.audio_codec == CodecID.CODEC_ID_NONE)
					return;
				_pStreamAudio = _cFormatCtx.StreamAdd();
				AVStream stAVStream = (AVStream)Marshal.PtrToStructure(_pStreamAudio, typeof(AVStream));
				_cFormatAudio = new Format.Audio(cFormat, stAVStream.codec);
				AVCodecContext stAVCodecContext = (AVCodecContext)Marshal.PtrToStructure(_cFormatAudio.pAVCodecContext, typeof(AVCodecContext));
				if (0 < (stAVOutputFormat.flags & Constants.AVFMT_GLOBALHEADER))
					stAVCodecContext.flags |= (int)CodecFlags.CODEC_FLAG_GLOBAL_HEADER;
				Marshal.StructureToPtr(stAVCodecContext, stAVStream.codec, true);
				Marshal.StructureToPtr(stAVStream, _pStreamAudio, true);
			}