private void FrameDecodeAudio() { (new Logger()).WriteDebug4("in"); if (null == _cFormatAudioTarget) throw new NotImplementedException("null == cFormatTarget"); //UNDONE нужно доделать возвращение сырых пакетов Logger.Timings cTimings = new Logger.Timings("ffmpeg:file"); bool bFrameDecoded = false; Frame cSamplesTarget = null; Frame cFrame; if (1 > _aqAudioFramesFree.Count) //ротация кадров { cFrame = new Frame(_cFormatAudioTarget); cFrame.Disposing += cFrameAudio_Disposing; (new Logger()).WriteDebug3("audio frame added. total:" + nFramesQueueAudio++); } else lock (_aqAudioFramesFree) cFrame = _aqAudioFramesFree.Dequeue(); int nBytesCapacity = 0; int nBytesOffset = 0; byte[] aPacketBytes; int nLength = 0; AVPacket stPacket; if (null != _aBytesRemainder) { if (_aBytesRemainder.Length > cFrame.aBuffer.Length) { //(new Logger()).WriteWarning("_aBytesRemainder.Length > cFrame.aBuffer.Length : " + _aBytesRemainder.Length + ":" + cFrame.aBuffer.Length); Array.Copy(_aBytesRemainder, 0, cFrame.aBuffer, 0, cFrame.aBuffer.Length); _aBytesRemainder = _aBytesRemainder.Skip(cFrame.aBuffer.Length).Take(_aBytesRemainder.Length - cFrame.aBuffer.Length).ToArray(); nBytesOffset += cFrame.aBuffer.Length; } else { Array.Copy(_aBytesRemainder, 0, cFrame.aBuffer, 0, _aBytesRemainder.Length); nBytesOffset += _aBytesRemainder.Length; _aBytesRemainder = null; } } while (cFrame.nLength > nBytesOffset) { aPacketBytes = null; if (NULL == _pPacketAudioDub) { _pPacketAudioDub = Functions.av_malloc(Marshal.SizeOf(typeof(AVPacket))); helpers.WinAPI.memset(_pPacketAudioDub, 0, Marshal.SizeOf(typeof(AVPacket))); Functions.av_init_packet(_pPacketAudioDub); _stPacketAudio = (AVPacket)Marshal.PtrToStructure(_pPacketAudioDub, typeof(AVPacket)); } cTimings.Restart("allocation"); while (true) { // NOTE: the audio packet can contain several frames while (_stPacketAudio.size > 0) { if (null == _cFrameAudio) _cFrameAudio = new Frame(); Marshal.StructureToPtr(_stPacketAudio, _pPacketAudioDub, true); nLength = Functions.avcodec_decode_audio4(_cFormatAudio.pAVCodecContext, _cFrameAudio, ref bFrameDecoded, _pPacketAudioDub); cTimings.CheckIn("decode"); if (nLength < 0) { _stPacketAudio.size = 0; break; } _stPacketAudio.data += nLength; _stPacketAudio.size -= nLength; if (!bFrameDecoded) continue; cTimings.Restart("frame"); cSamplesTarget = _cFormatAudio.Transform(_cFormatAudioTarget, new Frame(_cFormatAudio, _cFrameAudio)); //cSamplesTarget = new Frame(_cFormatAudio); //aPacketBytes = cSamplesTarget.aBytes; //cSamplesTarget.Dispose(); aPacketBytes = cSamplesTarget.aBytes; cTimings.Restart("transform"); break; } if (null != aPacketBytes) break; if (NULL != _pPacketAudio) { Functions.av_free_packet(_pPacketAudio); Functions.av_freep(ref _pPacketAudio); cTimings.Restart("packet free"); } while (!_bFileEnd && 1 > _aqAudioPackets.Count) { lock (_oSyncRoot) PacketNext(); } if (_bFileEnd && 1 > _aqAudioPackets.Count) break; lock (_oSyncRoot) _pPacketAudio = _aqAudioPackets.Dequeue(); stPacket = (AVPacket)Marshal.PtrToStructure(_pPacketAudio, typeof(AVPacket)); _stPacketAudio.data = stPacket.data; _stPacketAudio.size = stPacket.size; cTimings.Restart("packets"); } if (null == aPacketBytes) throw new Exception("audio packet is null"); nBytesCapacity = aPacketBytes.Length; if (cFrame.nLength < nBytesOffset + nBytesCapacity) { nBytesCapacity = cFrame.nLength - nBytesOffset; _aBytesRemainder = new byte[aPacketBytes.Length - nBytesCapacity]; Array.Copy(aPacketBytes, nBytesCapacity, _aBytesRemainder, 0, _aBytesRemainder.Length); } Array.Copy(aPacketBytes, 0, cFrame.aBuffer, nBytesOffset, nBytesCapacity); nBytesOffset += nBytesCapacity; cTimings.Restart("accumulation"); } cTimings.Stop("frame:decode:audio: >40ms", 40);//FPS lock (_aqAudioFrames) _aqAudioFrames.Enqueue(cFrame); (new Logger()).WriteDebug4("return"); }
public Frame FrameNextAudioGet() { (new Logger()).WriteDebug4("in"); if (null == _aqAudioFrames) return null; Frame cRetVal = null; Logger.Timings cTimings = new Logger.Timings("ffmpeg:file"); if (!_bFileEnd) { DateTime dtTimedOut = DateTime.MaxValue; while (!_bFileEnd && 1 > _aqAudioFrames.Count) { if (bFileEndless) return null; if (DateTime.MaxValue == dtTimedOut) { try { dtTimedOut = DateTime.Now.Add(tsTimeout); } catch { dtTimedOut = DateTime.MaxValue.AddTicks(-1); } (new Logger()).WriteDebug("frame:next:audio: queue is empty"); //logging } System.Threading.Thread.Sleep(5); if (DateTime.Now > dtTimedOut) throw new TimeoutException("audio queue is empty"); } } cTimings.Restart("frame waiting"); lock (_aqAudioFrames) { if (0 < _aqAudioFrames.Count) cRetVal = _aqAudioFrames.Dequeue(); cTimings.CheckIn("dequeue"); // logging } cTimings.Stop("frame:next:audio: >20ms", 20); (new Logger()).WriteDebug4("return"); return cRetVal; }