Beispiel #1
0
            public Frame[] Flush()
            {
                if (null == _cFrame)
                {
                    _cFrame = new Frame(nBufferSize);
                }
                List <Frame> aRetVal = new List <Frame>();
                IntPtr       pPacket = NULL;

                try
                {
                    int nPacketGot = 0, nOffset = 0;
                    pPacket = Functions.av_malloc(Marshal.SizeOf(typeof(AVPacket)));
                    Functions.av_init_packet(pPacket);
                    AVPacket stPacket = (AVPacket)Marshal.PtrToStructure(pPacket, typeof(AVPacket));
                    stPacket.size = 0;
                    while (true)
                    {
                        stPacket.data = _cFrame.pBytes + nOffset;
                        stPacket.size = _cFrame.nLengthBuffer - nOffset;
                        Marshal.StructureToPtr(stPacket, pPacket, true);
                        //lock (helper._oSyncRootGlobal)
                        Functions.avcodec_encode_audio2(pAVCodecContext, pPacket, NULL, ref nPacketGot);
                        stPacket = (AVPacket)Marshal.PtrToStructure(pPacket, typeof(AVPacket));
                        if (0 < nPacketGot && 0 < stPacket.size)
                        {
                            aRetVal.Add(new Frame(_cFrame.aBuffer.Skip(nOffset).Take(stPacket.size).ToArray())
                            {
                                nPTS = stPacket.pts
                            });                                                                                                          //TODO нужно сделать "наследование" одного фрейма от другого (один aBytes на оба Frame'а)
                            nOffset += stPacket.size;
                        }
//                        else
                        break;
                    }
                }
                catch (Exception ex)
                {
                    (new Logger()).WriteError(ex);
                }
                finally
                {
                    if (NULL != pPacket)
                    {
                        Functions.av_free_packet(pPacket);
                        Functions.av_freep(ref pPacket);
                    }
                }
                return(aRetVal.ToArray());
            }
Beispiel #2
0
            override public Frame[] Convert(Format cFormatTarget, Frame cFrameSource)             //в cFrameSource лежат байты в формате this!!!
            {
                List <Frame> aRetVal = new List <Frame>();

                if (null == cFormatTarget || !(cFormatTarget is Format.Audio))
                {
                    throw new Exception("target format is null or has a wrong type");
                }
                Format.Audio cFormatAudioTarget = (Format.Audio)cFormatTarget;
                IntPtr       pPacket            = NULL;
                Frame        cFrameConverted;
                AVFrame      cAVFrame;
                int          nIndx = 0, nFrameSize, nSize, nPacketGot = 0, nOffset = 0;

                try
                {
                    if (eCodecID == cFormatTarget.eCodecID)
                    {
                        if (nSamplesRate == cFormatAudioTarget.nSamplesRate && eSampleFormat == cFormatAudioTarget.eSampleFormat && nChannelsQty == cFormatAudioTarget.nChannelsQty)
                        {
                            return new Frame[] { new Frame(null, cFrameSource.aBytesCopy)
                                                 {
                                                     nPTS = cFrameSource.nPTS, bKeyframe = cFrameSource.bKeyframe
                                                 } }
                        }
                        ;
                        if (NULL != _pCodec)
                        {
                            throw new NotImplementedException(); //TODO доделать конверт из encoded в raw
                        }
                    }
                    if (nBufferSize < cFrameSource.nLength)
                    {
                        throw new Exception("wrong bytes qty for specified audio format. Should be less than " + nBufferSize + " but got " + cFrameSource.nLength);
                    }

                    while (true)
                    {
                        cFrameConverted = Transform(cFormatAudioTarget, cFrameSource);
                        if (null == cFrameConverted || 1 > cFrameConverted.nLength)
                        {
                            break;
                        }
                        cFrameSource = null;
                        cAVFrame     = (AVFrame)Marshal.PtrToStructure(cFrameConverted, typeof(AVFrame));
                        if (null == aByteStream)
                        {
                            aByteStream = new List <List <byte> >();
                            for (nIndx = 0; cAVFrame.data.Length > nIndx; nIndx++)
                            {
                                if (NULL == cAVFrame.data[nIndx])
                                {
                                    break;
                                }
                                aByteStream.Add(new List <byte>());
                            }
                            if (1 > aByteStream.Count)
                            {
                                aByteStream.Add(new List <byte>());
                            }
                        }
                        int nLineSize = cFrameConverted.nLength / aByteStream.Count;
                        for (nIndx = 0; aByteStream.Count > nIndx; nIndx++)
                        {
                            aByteStream[nIndx].AddRange(cFrameConverted.aBuffer.Skip((int)((long)cAVFrame.data[nIndx] - (long)cAVFrame.data[0])).Take(nLineSize));
                        }
                    }
                    pPacket = Functions.av_malloc(Marshal.SizeOf(typeof(AVPacket)));
                    Functions.av_init_packet(pPacket);
                    AVPacket stPacket = (AVPacket)Marshal.PtrToStructure(pPacket, typeof(AVPacket));
                    stPacket.size = 0;
                    if (null == _cFrame)
                    {
                        _cFrame = new Frame(cFormatAudioTarget.nBufferSize);
                    }
                    if (1 > (nFrameSize = cFormatAudioTarget.stAVCodecContext.frame_size))
                    {
                        nFrameSize = cFrameConverted.nLength / ((cFormatAudioTarget.nBitsPerSample / 8) * cFormatAudioTarget.nChannelsQty);
                    }
                    nFrameSize *= (cFormatAudioTarget.nBitsPerSample / 8);
                    if (null == cFormatAudioTarget._cFrame)
                    {
                        cFormatTarget._cFrame = new Frame(this, nFrameSize * cFormatAudioTarget.nChannelsQty);
                    }
                    if (2 > aByteStream.Count)
                    {
                        nFrameSize *= cFormatAudioTarget.nChannelsQty;
                    }
                    while (nFrameSize <= aByteStream[0].Count && _cFrame.nLengthBuffer > (nOffset + stPacket.size))
                    {
                        for (nIndx = 0; aByteStream.Count > nIndx; nIndx++)
                        {
                            aByteStream[nIndx].CopyTo(0, cFormatTarget._cFrame.aBuffer, nIndx * nFrameSize, nFrameSize);
                            aByteStream[nIndx].RemoveRange(0, nFrameSize);
                        }
                        stPacket.data = _cFrame.pBytes + nOffset;
                        stPacket.size = _cFrame.nLengthBuffer - nOffset;
                        Marshal.StructureToPtr(stPacket, pPacket, true);

                        //lock (helper._oSyncRootGlobal)
                        nSize = Functions.avcodec_encode_audio2(cFormatAudioTarget.pAVCodecContext, pPacket, cFormatTarget._cFrame, ref nPacketGot);
                        if (0 > nSize)
                        {
                            throw new Exception("audio encoding failed\n");
                        }
                        if (0 < nPacketGot)
                        {
                            stPacket = (AVPacket)Marshal.PtrToStructure(pPacket, typeof(AVPacket));
                            if (0 < stPacket.size)
                            {
                                aRetVal.Add(new Frame(_cFrame.aBuffer.Skip(nOffset).Take(stPacket.size).ToArray())
                                {
                                    nPTS = stPacket.pts
                                });                                                                                                          //TODO нужно сделать "наследование" одного фрейма от другого (один aBytes на оба Frame'а)
                                nOffset += stPacket.size;
                            }
                        }
                    }
                }
                catch (Exception ex)
                {
                    (new Logger()).WriteError(ex);
                }
                finally
                {
                    if (NULL != pPacket)
                    {
                        Functions.av_free_packet(pPacket);
                        Functions.av_freep(ref pPacket);
                    }
                    //if (NULL != pAVFrame)
                    //    Functions.avcodec_free_frame(ref pAVFrame);
                }
                return(aRetVal.ToArray());
            }
Beispiel #3
0
			private void FrameDecodeAudio()
			{
				(new Logger()).WriteDebug4("in");
				if (null == _cFormatAudioTarget)
					throw new NotImplementedException("null == cFormatTarget"); //UNDONE нужно доделать возвращение сырых пакетов

				Logger.Timings cTimings = new Logger.Timings("ffmpeg:file");
                bool bFrameDecoded = false;
				Frame cSamplesTarget = null;
				Frame cFrame;
                if (1 > _aqAudioFramesFree.Count) //ротация кадров
                {
                    cFrame = new Frame(_cFormatAudioTarget);
                    cFrame.Disposing += cFrameAudio_Disposing;
                    (new Logger()).WriteDebug3("audio frame added. total:" + nFramesQueueAudio++);
                }
                else
                    lock (_aqAudioFramesFree)
                        cFrame = _aqAudioFramesFree.Dequeue();

				int nBytesCapacity = 0;
				int nBytesOffset = 0;
				byte[] aPacketBytes;
				int nLength = 0;
				AVPacket stPacket;
				if (null != _aBytesRemainder)
				{
                    if (_aBytesRemainder.Length > cFrame.aBuffer.Length)
                    {
                        //(new Logger()).WriteWarning("_aBytesRemainder.Length > cFrame.aBuffer.Length : " + _aBytesRemainder.Length + ":" + cFrame.aBuffer.Length);
                        Array.Copy(_aBytesRemainder, 0, cFrame.aBuffer, 0, cFrame.aBuffer.Length);
                        _aBytesRemainder = _aBytesRemainder.Skip(cFrame.aBuffer.Length).Take(_aBytesRemainder.Length - cFrame.aBuffer.Length).ToArray();
                        nBytesOffset += cFrame.aBuffer.Length;
                    }
                    else
                    {
                        Array.Copy(_aBytesRemainder, 0, cFrame.aBuffer, 0, _aBytesRemainder.Length);
                        nBytesOffset += _aBytesRemainder.Length;
                        _aBytesRemainder = null;
                    }
				}
				while (cFrame.nLength > nBytesOffset)
				{
					aPacketBytes = null;
					if (NULL == _pPacketAudioDub)
					{
                        _pPacketAudioDub = Functions.av_malloc(Marshal.SizeOf(typeof(AVPacket)));
						helpers.WinAPI.memset(_pPacketAudioDub, 0, Marshal.SizeOf(typeof(AVPacket)));
                        Functions.av_init_packet(_pPacketAudioDub);
						_stPacketAudio = (AVPacket)Marshal.PtrToStructure(_pPacketAudioDub, typeof(AVPacket));

					}
					cTimings.Restart("allocation");
					while (true)
					{
						// NOTE: the audio packet can contain several frames 
						while (_stPacketAudio.size > 0)
						{
                            if (null == _cFrameAudio)
                                _cFrameAudio = new Frame();
                            Marshal.StructureToPtr(_stPacketAudio, _pPacketAudioDub, true);
                            nLength = Functions.avcodec_decode_audio4(_cFormatAudio.pAVCodecContext, _cFrameAudio, ref bFrameDecoded, _pPacketAudioDub);
							cTimings.CheckIn("decode");
							if (nLength < 0)
							{
								_stPacketAudio.size = 0;
								break;
							}
							_stPacketAudio.data += nLength;
							_stPacketAudio.size -= nLength;
                            if (!bFrameDecoded)
								continue;
							cTimings.Restart("frame");
                            cSamplesTarget = _cFormatAudio.Transform(_cFormatAudioTarget, new Frame(_cFormatAudio, _cFrameAudio));
                            
                            
                            //cSamplesTarget = new Frame(_cFormatAudio);
                            //aPacketBytes = cSamplesTarget.aBytes;
                            //cSamplesTarget.Dispose();
                            
                            
                            aPacketBytes = cSamplesTarget.aBytes;
							cTimings.Restart("transform");
							break;
						}
						if (null != aPacketBytes)
							break;
						if (NULL != _pPacketAudio)
						{
                            Functions.av_free_packet(_pPacketAudio);
							Functions.av_freep(ref _pPacketAudio);
                            cTimings.Restart("packet free");
						}
						while (!_bFileEnd && 1 > _aqAudioPackets.Count)
						{
							lock (_oSyncRoot)
                                PacketNext();
						}
						if (_bFileEnd && 1 > _aqAudioPackets.Count)
							break;
						lock (_oSyncRoot)
							_pPacketAudio = _aqAudioPackets.Dequeue();
						stPacket = (AVPacket)Marshal.PtrToStructure(_pPacketAudio, typeof(AVPacket));

						_stPacketAudio.data = stPacket.data;
						_stPacketAudio.size = stPacket.size;
						cTimings.Restart("packets");
					}
					if (null == aPacketBytes)
						throw new Exception("audio packet is null");
					nBytesCapacity = aPacketBytes.Length;
                    if (cFrame.nLength < nBytesOffset + nBytesCapacity)
					{
						nBytesCapacity = cFrame.nLength - nBytesOffset;
						_aBytesRemainder = new byte[aPacketBytes.Length - nBytesCapacity];
						Array.Copy(aPacketBytes, nBytesCapacity, _aBytesRemainder, 0, _aBytesRemainder.Length);
					}
					Array.Copy(aPacketBytes, 0, cFrame.aBuffer, nBytesOffset, nBytesCapacity);
					nBytesOffset += nBytesCapacity;
					cTimings.Restart("accumulation");
				}
				cTimings.Stop("frame:decode:audio: >40ms", 40);//FPS
				lock (_aqAudioFrames)
					_aqAudioFrames.Enqueue(cFrame);
				(new Logger()).WriteDebug4("return");
			}
Beispiel #4
0
			private void FrameDecodeAudio()
			{
				if (null == _cFormatAudioTarget)
					throw new NotImplementedException("null == cFormatTarget"); //UNDONE нужно доделать возвращение сырых пакетов
				Logger.Timings cTimings = new Logger.Timings();
				int nBufferSizeSource;
				if(null == _cFrameAudio)
					_cFrameAudio = new Frame(_cFormatAudio, _cFormatAudio.nBufferSize);
				Frame cSamplesTarget = null;
				Frame cFrame;
				if (1 > _aqAudioFramesFree.Count) //ротация кадров
				{
					cFrame = new Frame(_cFormatAudioTarget, _cFormatAudioTarget.nBufferSize / _nFPS);
					cFrame.Disposing += new Frame.DisposingDelegate(cFrameAudio_Disposing);
				}
				else
					lock (_aqAudioFramesFree)
						cFrame = _aqAudioFramesFree.Dequeue();

				int nBytesCapacity = 0;
				int nBytesOffset = 0;
				byte[] aPacketBytes;
				int nLength = 0;
				AVPacket stPacket;
				if (null != _aBytesRemainder)
				{
					Array.Copy(_aBytesRemainder, 0, cFrame.aBuffer, 0, _aBytesRemainder.Length);
					nBytesOffset += _aBytesRemainder.Length;
					_aBytesRemainder = null;
				}
				while (cFrame.nLength > nBytesOffset)
				{
					aPacketBytes = null;
					if (NULL == _pPacketAudioDub)
					{
						_pPacketAudioDub = Functions.avcodec_alloc_frame();
						_stPacketAudio = (AVPacket)Marshal.PtrToStructure(_pPacketAudioDub, typeof(AVPacket));
					}
					cTimings.Restart("allocation");
					while (true)
					{
						// NOTE: the audio packet can contain several frames 
						while (_stPacketAudio.size > 0)
						{
							nBufferSizeSource = _cFrameAudio.nLengthBuffer;
							Marshal.StructureToPtr(_stPacketAudio, _pPacketAudioDub, true);
							nLength = Functions.avcodec_decode_audio3(_cFormatAudio.pAVCodecContext, _cFrameAudio.aBuffer, ref nBufferSizeSource, _pPacketAudioDub);
							cTimings.CheckIn("decode");
							if (nLength < 0)
							{
								_stPacketAudio.size = 0;
								break;
							}
							_stPacketAudio.data += nLength;
							_stPacketAudio.size -= nLength;
							nLength = nBufferSizeSource;
							if (nLength <= 0)
								continue;
							cTimings.Restart("frame");
							_cFrameAudio.nLength = nLength;
							cSamplesTarget = _cFormatAudio.Transform(_cFormatAudioTarget, _cFrameAudio);
							aPacketBytes = cSamplesTarget.aBytes;
							cTimings.Restart("transform");
							break;
						}
						if (null != aPacketBytes)
							break;
						if (NULL != _pPacketAudio)
						{
							Functions.av_free_packet(_pPacketAudio);
							Functions.av_freep(ref _pPacketAudio);
							cTimings.Restart("packet free");
						}
						while (!_bFileEnd && 1 > _aqAudioPackets.Count)
						{
							lock (_cSyncRoot)
								GetAndSortNextPacket();
						}
						if (_bFileEnd && 1 > _aqAudioPackets.Count)
							break;
						lock (_cSyncRoot)
							_pPacketAudio = _aqAudioPackets.Dequeue();
						stPacket = (AVPacket)Marshal.PtrToStructure(_pPacketAudio, typeof(AVPacket));

						_stPacketAudio.data = stPacket.data;
						_stPacketAudio.size = stPacket.size;
						cTimings.Restart("packets");
					}
					if (null == aPacketBytes)
						throw new Exception("audio packet is null");
					nBytesCapacity = aPacketBytes.Length;
					if (cFrame.nLength < nBytesOffset + aPacketBytes.Length)
					{
						nBytesCapacity = cFrame.nLength - nBytesOffset;
						_aBytesRemainder = new byte[aPacketBytes.Length - nBytesCapacity];
						Array.Copy(aPacketBytes, nBytesCapacity, _aBytesRemainder, 0, _aBytesRemainder.Length);
					}
					Array.Copy(aPacketBytes, 0, cFrame.aBuffer, nBytesOffset, nBytesCapacity);
					nBytesOffset += nBytesCapacity;
					cTimings.Restart("accumulation");
				}
				cTimings.Stop("frame:decode:audio: >40ms", 40);
				lock (_aqAudioFrames)
					_aqAudioFrames.Enqueue(cFrame);
			}
Beispiel #5
0
			public void AudioFrameNext(Format.Audio cFormatSource, Frame cFrameSource)
			{
				if (NULL == _pStreamAudio)
					throw new Exception("there is no audio stream in file");
				if (null == cFrameSource)
					return;
				AVStream stAVStream = (AVStream)Marshal.PtrToStructure(_pStreamAudio, typeof(AVStream));
				//_nPTSAudio = (double)stAVStream.pts.val * stAVStream.time_base.num / stAVStream.time_base.den;
				Frame cFrame;
				AVPacket stAVPacket = new AVPacket();
				IntPtr pAVPacket;
				if (null == _cFrameAudio)
					_cFrameAudio = new Frame(_cFormatAudio, _cFormatAudio.nBufferSize);
				Frame[] aFrames = cFormatSource.Convert(_cFormatAudio, cFrameSource, _cFrameAudio);
				for (int nIndx = 0; aFrames.Length > nIndx; nIndx++)
				{
					cFrame = aFrames[nIndx];
					pAVPacket = Functions.av_malloc((uint)(Marshal.SizeOf(stAVPacket)));
					Functions.av_init_packet(pAVPacket);
					stAVPacket = (AVPacket)Marshal.PtrToStructure(pAVPacket, typeof(AVPacket));
					if (cFrame.nPTS != Constants.AV_NOPTS_VALUE)
						stAVPacket.pts = Functions.av_rescale_q(cFrame.nPTS, _cFormatAudio.stAVCodecContext.time_base, stAVStream.time_base);
					//stAVPacket.pts = stAVPacket.dts = Functions.av_rescale_q((long)_nPTSAudio, _cFormatVideo.stAVCodecContext.time_base, stAVStream.time_base);
					stAVPacket.flags |= Constants.AV_PKT_FLAG_KEY;
					stAVPacket.stream_index = stAVStream.index;
					stAVPacket.size = cFrame.nLength;
					stAVPacket.data = cFrame.p;
					Marshal.StructureToPtr(stAVPacket, pAVPacket, true);

					_cFormatCtx.PacketWrite(pAVPacket);
					Functions.av_free_packet(pAVPacket);
					Functions.av_freep(ref pAVPacket);
					cFrame.Dispose();
					//_nPTSAudio++;
				}
			}
Beispiel #6
0
			public void VideoFrameNext(Format.Video cFormatSource, Frame cFrameSource)
			{
				if (NULL == _pStreamVideo)
					throw new Exception("there is no video stream in file");
				AVStream stAVStream = (AVStream)Marshal.PtrToStructure(_pStreamVideo, typeof(AVStream));
				//_nPTSVideo = (double)stAVStream.pts.val * stAVStream.time_base.num / stAVStream.time_base.den;
				//(1 / FPS) * sample rate * frame number
				if (null == _cFrameVideo)
					_cFrameVideo = new Frame(_cFormatVideo, _cFormatVideo.nBufferSize);
				Frame[] aFrames = cFormatSource.Convert(_cFormatVideo, cFrameSource, _cFrameVideo);
				AVPacket stAVPacket = new AVPacket();
				IntPtr pAVPacket;
				for (int nIndx = 0; aFrames.Length > nIndx; nIndx++)
				{
					cFrameSource = aFrames[nIndx];
					pAVPacket = Functions.av_malloc((uint)(Marshal.SizeOf(stAVPacket)));
					Functions.av_init_packet(pAVPacket);
					stAVPacket = (AVPacket)Marshal.PtrToStructure(pAVPacket, typeof(AVPacket));
					if (cFrameSource.nPTS != Constants.AV_NOPTS_VALUE)
						stAVPacket.pts = Functions.av_rescale_q(cFrameSource.nPTS, _cFormatVideo.stAVCodecContext.time_base, stAVStream.time_base);
					if (cFrameSource.bKeyframe)
						stAVPacket.flags |= Constants.AV_PKT_FLAG_KEY;
					stAVPacket.stream_index = stAVStream.index;
					stAVPacket.size = cFrameSource.nLength;
					stAVPacket.data = cFrameSource.p;
					Marshal.StructureToPtr(stAVPacket, pAVPacket, true);
					//System.IO.File.AppendAllText("packets", stAVPacket.pts + "\t" + stAVPacket.size + "\r\n");
					_cFormatCtx.PacketWrite(pAVPacket);
					Functions.av_free_packet(pAVPacket);
					Functions.av_freep(ref pAVPacket);
					//cFrameSource.Dispose();
				}
			}