Beispiel #1
0
            private void PacketNext()
			{
				Logger.Timings cTimings = new Logger.Timings("ffmpeg:file");
				_nPacketIndx++; // logging
				IntPtr pPacket;
				AVPacket stPacket;
				if (!_bFileEnd)
				{
                    do
                    {
                        pPacket = Functions.av_malloc(Marshal.SizeOf(typeof(AVPacket)));
                        helpers.WinAPI.memset(pPacket, 0, Marshal.SizeOf(typeof(AVPacket)));
                        Functions.av_init_packet(pPacket);
                        //stPacket = (AVPacket)Marshal.PtrToStructure(pPacket, typeof(AVPacket));
                        //stPacket.data = NULL;
                        //stPacket.size = 0;
                        //Marshal.StructureToPtr(stPacket, pPacket, true);
                        cTimings.Restart("allocation");
                        int nResult;
                        if (-1 < (nResult = _cFormatCtx.PacketRead(pPacket)))
                        {
							cTimings.Restart("reeding packet");
                            stPacket = (AVPacket)Marshal.PtrToStructure(pPacket, typeof(AVPacket));
                            if (_nVideoStreamIndx == stPacket.stream_index)
                            {
                                _aqVideoPackets.Enqueue(pPacket);
                                _nTotalVideoPackets++;
                            }
                            else if (_nAudioStreamIndx == stPacket.stream_index)
                            {
                                _aqAudioPackets.Enqueue(pPacket);
                                _nTotalAudioPackets++;
                            }
                            else
                            {
                                Functions.av_free_packet(pPacket);
                                Functions.av_freep(ref pPacket);
                            }
                            break;
                        }
                        else if (!bFileEndless)
                        {
                            if (-541478725 != nResult)
                                (new Logger()).WriteError("File.Input.GetAndSortNextPacket.PacketRead = " + nResult);
                            _bFileEnd = true;
							(new Logger()).WriteDebug("_bFileEnd = true");
                        }
                        else
                            Thread.Sleep(20);
                    } while (bFileEndless);
				}
				cTimings.Stop("packets: > 40ms", 40);
			}
Beispiel #2
0
			private void FrameDecodeAudio()
			{
				(new Logger()).WriteDebug4("in");
				if (null == _cFormatAudioTarget)
					throw new NotImplementedException("null == cFormatTarget"); //UNDONE нужно доделать возвращение сырых пакетов

				Logger.Timings cTimings = new Logger.Timings("ffmpeg:file");
                bool bFrameDecoded = false;
				Frame cSamplesTarget = null;
				Frame cFrame;
                if (1 > _aqAudioFramesFree.Count) //ротация кадров
                {
                    cFrame = new Frame(_cFormatAudioTarget);
                    cFrame.Disposing += cFrameAudio_Disposing;
                    (new Logger()).WriteDebug3("audio frame added. total:" + nFramesQueueAudio++);
                }
                else
                    lock (_aqAudioFramesFree)
                        cFrame = _aqAudioFramesFree.Dequeue();

				int nBytesCapacity = 0;
				int nBytesOffset = 0;
				byte[] aPacketBytes;
				int nLength = 0;
				AVPacket stPacket;
				if (null != _aBytesRemainder)
				{
                    if (_aBytesRemainder.Length > cFrame.aBuffer.Length)
                    {
                        //(new Logger()).WriteWarning("_aBytesRemainder.Length > cFrame.aBuffer.Length : " + _aBytesRemainder.Length + ":" + cFrame.aBuffer.Length);
                        Array.Copy(_aBytesRemainder, 0, cFrame.aBuffer, 0, cFrame.aBuffer.Length);
                        _aBytesRemainder = _aBytesRemainder.Skip(cFrame.aBuffer.Length).Take(_aBytesRemainder.Length - cFrame.aBuffer.Length).ToArray();
                        nBytesOffset += cFrame.aBuffer.Length;
                    }
                    else
                    {
                        Array.Copy(_aBytesRemainder, 0, cFrame.aBuffer, 0, _aBytesRemainder.Length);
                        nBytesOffset += _aBytesRemainder.Length;
                        _aBytesRemainder = null;
                    }
				}
				while (cFrame.nLength > nBytesOffset)
				{
					aPacketBytes = null;
					if (NULL == _pPacketAudioDub)
					{
                        _pPacketAudioDub = Functions.av_malloc(Marshal.SizeOf(typeof(AVPacket)));
						helpers.WinAPI.memset(_pPacketAudioDub, 0, Marshal.SizeOf(typeof(AVPacket)));
                        Functions.av_init_packet(_pPacketAudioDub);
						_stPacketAudio = (AVPacket)Marshal.PtrToStructure(_pPacketAudioDub, typeof(AVPacket));

					}
					cTimings.Restart("allocation");
					while (true)
					{
						// NOTE: the audio packet can contain several frames 
						while (_stPacketAudio.size > 0)
						{
                            if (null == _cFrameAudio)
                                _cFrameAudio = new Frame();
                            Marshal.StructureToPtr(_stPacketAudio, _pPacketAudioDub, true);
                            nLength = Functions.avcodec_decode_audio4(_cFormatAudio.pAVCodecContext, _cFrameAudio, ref bFrameDecoded, _pPacketAudioDub);
							cTimings.CheckIn("decode");
							if (nLength < 0)
							{
								_stPacketAudio.size = 0;
								break;
							}
							_stPacketAudio.data += nLength;
							_stPacketAudio.size -= nLength;
                            if (!bFrameDecoded)
								continue;
							cTimings.Restart("frame");
                            cSamplesTarget = _cFormatAudio.Transform(_cFormatAudioTarget, new Frame(_cFormatAudio, _cFrameAudio));
                            
                            
                            //cSamplesTarget = new Frame(_cFormatAudio);
                            //aPacketBytes = cSamplesTarget.aBytes;
                            //cSamplesTarget.Dispose();
                            
                            
                            aPacketBytes = cSamplesTarget.aBytes;
							cTimings.Restart("transform");
							break;
						}
						if (null != aPacketBytes)
							break;
						if (NULL != _pPacketAudio)
						{
                            Functions.av_free_packet(_pPacketAudio);
							Functions.av_freep(ref _pPacketAudio);
                            cTimings.Restart("packet free");
						}
						while (!_bFileEnd && 1 > _aqAudioPackets.Count)
						{
							lock (_oSyncRoot)
                                PacketNext();
						}
						if (_bFileEnd && 1 > _aqAudioPackets.Count)
							break;
						lock (_oSyncRoot)
							_pPacketAudio = _aqAudioPackets.Dequeue();
						stPacket = (AVPacket)Marshal.PtrToStructure(_pPacketAudio, typeof(AVPacket));

						_stPacketAudio.data = stPacket.data;
						_stPacketAudio.size = stPacket.size;
						cTimings.Restart("packets");
					}
					if (null == aPacketBytes)
						throw new Exception("audio packet is null");
					nBytesCapacity = aPacketBytes.Length;
                    if (cFrame.nLength < nBytesOffset + nBytesCapacity)
					{
						nBytesCapacity = cFrame.nLength - nBytesOffset;
						_aBytesRemainder = new byte[aPacketBytes.Length - nBytesCapacity];
						Array.Copy(aPacketBytes, nBytesCapacity, _aBytesRemainder, 0, _aBytesRemainder.Length);
					}
					Array.Copy(aPacketBytes, 0, cFrame.aBuffer, nBytesOffset, nBytesCapacity);
					nBytesOffset += nBytesCapacity;
					cTimings.Restart("accumulation");
				}
				cTimings.Stop("frame:decode:audio: >40ms", 40);//FPS
				lock (_aqAudioFrames)
					_aqAudioFrames.Enqueue(cFrame);
				(new Logger()).WriteDebug4("return");
			}
Beispiel #3
0
			public Frame FrameNextAudioGet()
			{
				(new Logger()).WriteDebug4("in");
				if (null == _aqAudioFrames)
					return null;
				Frame cRetVal = null;
				Logger.Timings cTimings = new Logger.Timings("ffmpeg:file");
				if (!_bFileEnd)
				{
					DateTime dtTimedOut = DateTime.MaxValue;
					while (!_bFileEnd && 1 > _aqAudioFrames.Count)
					{
                        if (bFileEndless)
                            return null;
                        if (DateTime.MaxValue == dtTimedOut)
						{
							try
							{
								dtTimedOut = DateTime.Now.Add(tsTimeout);
							}
							catch
							{
								dtTimedOut = DateTime.MaxValue.AddTicks(-1);
							}
							(new Logger()).WriteDebug("frame:next:audio: queue is empty"); //logging
						}
						System.Threading.Thread.Sleep(5);
                        if (DateTime.Now > dtTimedOut)
                            throw new TimeoutException("audio queue is empty");
					}
				}
				cTimings.Restart("frame waiting");
				lock (_aqAudioFrames)
				{
					if (0 < _aqAudioFrames.Count)
						cRetVal = _aqAudioFrames.Dequeue();
					cTimings.CheckIn("dequeue"); // logging
				}
				cTimings.Stop("frame:next:audio: >20ms", 20);
				(new Logger()).WriteDebug4("return");
				return cRetVal;
			}
Beispiel #4
0
			private void FrameDecodeVideo()
			{
				(new Logger()).WriteDebug4("in");
				if (null == _cFormatVideoTarget)
					throw new NotImplementedException("null == cFormatTarget"); //UNDONE нужно доделать возвращение сырых пакетов
				Logger.Timings cTimings = new Logger.Timings("ffmpeg:file");
                
                int nVideoFrameFinished = 0;
				IntPtr pPacketNext = NULL;

				while (true)
				{
					while (NULL == pPacketNext)
					{
						while (1 > _aqVideoPackets.Count)
						{
							if (_bFileEnd)
							{
								pPacketNext = Functions.av_malloc(Marshal.SizeOf(typeof(AVPacket)));
								helpers.WinAPI.memset(pPacketNext, 0, Marshal.SizeOf(typeof(AVPacket)));
								Functions.av_init_packet(pPacketNext);
								break;
							}
							lock (_oSyncRoot)
                                PacketNext();
						}
						if (!_bFileEnd)
							pPacketNext = _aqVideoPackets.Peek();
					}
					cTimings.Restart("packets");
					if (null == _cFrameVideo)
						_cFrameVideo = new Frame(_cFormatVideo);
					try
					{
						int nError = Functions.avcodec_decode_video2(_cFormatVideo.pAVCodecContext, _cFrameVideo, ref nVideoFrameFinished, pPacketNext);
						if (!_bFileEnd)
						{
							Functions.av_free_packet(pPacketNext);
							Functions.av_freep(ref pPacketNext);
							_aqVideoPackets.Dequeue();
						}
					}
					catch (Exception ex)
					{
						(new Logger()).WriteError(ex);
					}
						
					cTimings.Restart("decode");
					if (0 < nVideoFrameFinished)
					{
						Frame cFrame;
                        if (1 > _aqVideoFramesFree.Count) //ротация кадров
                        {
                            cFrame = new Frame(_cFormatVideoTarget);
                            cFrame.Disposing += cFrameVideo_Disposing;
							(new Logger()).WriteDebug3("video frame added. total:" + nFramesQueueVideo++);
                        }
                        else
                            lock (_aqVideoFramesFree)
                                cFrame = _aqVideoFramesFree.Dequeue();
						_cFormatVideo.Transform(_cFormatVideoTarget, _cFrameVideo, cFrame);
                        cFrame.bKeyframe = _cFrameVideo.bKeyframe;
                        cFrame.nPTS = _cFrameVideo.nPTS;
                        lock (_aqVideoFrames)
							_aqVideoFrames.Enqueue(cFrame);


						if (_bDoWritingFrames)
						{
							if (null != cFrame)
							{
								byte[] aBytes = new byte[_cFormatVideoTarget.nBufferSize];
								System.Runtime.InteropServices.Marshal.Copy(cFrame.pBytes, aBytes, 0, (int)_cFormatVideoTarget.nBufferSize);
								lock (_aqWritingFrames)
									_aqWritingFrames.Enqueue(aBytes);
							}
						}

						cTimings.Restart("transform");
						if (!_bFileEnd)
							break;
					}
					else if (_bFileEnd)
					{
						if (NULL != pPacketNext)
						{
							Functions.av_free_packet(pPacketNext);
							Functions.av_freep(ref pPacketNext);
						}
						throw new Exception("file ended");
					}
				}
				cTimings.Stop("frame:decode:video: >40ms", 40); //FPS
				(new Logger()).WriteDebug4("return");
			}
Beispiel #5
0
			override public void Dispose()
			{
				if (_bClose)
				{
					(new Logger()).WriteDebug2("in: already disposed");
					return;
				}
				_bClose = true;
				(new Logger()).WriteDebug2("in [hc = " + GetHashCode() + "][v_frames: " + (_aqVideoFrames == null ? "null" : "" + _aqVideoFrames.Count) + " + " + (_aqVideoFramesFree == null ? "null" : "" + _aqVideoFramesFree.Count) + " ][a_frames: " + (_aqAudioFrames == null ? "null" : "" + _aqAudioFrames.Count) + " + " + (_aqAudioFramesFree == null ? "null" : "" + _aqAudioFramesFree.Count) + " ]");
				Logger.Timings cTimings = new Logger.Timings("ffmpeg:file:input:dispose:");
				try
				{
                    if (null != _cThreadWritingFramesWorker)
                        _cThreadWritingFramesWorker.Abort();
                    lock (_cCloseLock)
                    {
                        Frame cFrame;
                        if (null != _aqVideoFrames)
                        {
                            while (0 < _aqVideoFrames.Count)
                            {
                                cFrame = _aqVideoFrames.Dequeue();
                                cFrame.Disposing -= cFrameVideo_Disposing;
                                cFrame.Dispose();
                            }
                            while (0 < _aqVideoFramesFree.Count)
                            {
                                cFrame = _aqVideoFramesFree.Dequeue();
                                cFrame.Disposing -= cFrameVideo_Disposing;
                                cFrame.Dispose();
                            }
                            _aqVideoFrames = null;
                            _aqVideoFramesFree = null;
                        }
                        if (null != _aqAudioFrames)
                        {
                            while (0 < _aqAudioFrames.Count)
                            {
                                cFrame = _aqAudioFrames.Dequeue();
                                cFrame.Disposing -= cFrameAudio_Disposing;
                                cFrame.Dispose();
                            }
                            while (0 < _aqAudioFramesFree.Count)
                            {
                                cFrame = _aqAudioFramesFree.Dequeue();
                                cFrame.Disposing -= cFrameAudio_Disposing;
                                cFrame.Dispose();
                            }
                            _aqAudioFrames = null;
                            _aqAudioFramesFree = null;
                        }
                    }
                    IntPtr pPacket;
					if (null != _aqAudioPackets)
					{
						while (0 < _aqAudioPackets.Count)
						{
							pPacket = _aqAudioPackets.Dequeue();
							if (NULL != pPacket)
							{
								Functions.av_free_packet(pPacket);
								Functions.av_freep(ref pPacket);
							}
						}
					}
					if (null != _aqVideoPackets)
					{
						while (0 < _aqVideoPackets.Count)
						{
							pPacket = _aqVideoPackets.Dequeue();
							if (NULL != pPacket)
							{
								Functions.av_free_packet(pPacket);
								Functions.av_freep(ref pPacket);
							}
						}
					}
					if (NULL != _pPacketAudio)
					{
						Functions.av_free_packet(_pPacketAudio);
						Functions.av_freep(ref _pPacketAudio);
					}
					if (NULL != _pPacketAudioDub)
					{
						Functions.av_free_packet(_pPacketAudioDub);
						Functions.av_freep(ref _pPacketAudioDub);
					}
                    if (null != _cFrameVideo)
                        _cFrameVideo.Dispose();
                    if (null != _cFrameAudio)
                        _cFrameAudio.Dispose();
                    base.Dispose();
				}
				catch (Exception ex) 
				{
					(new Logger()).WriteError(ex);
				}
				cTimings.Stop("disposing > 20", 20);
				(new Logger()).WriteDebug3("out [hc: " + GetHashCode() + "]");
			}