Exemplo n.º 1
0
				public Frame Process(Frame cFrameSource)
				{
					if (null == cFrame)
						cFrame = new Frame(_cFormatTarget);
					Process(cFrameSource, cFrame);
                    return cFrame;
				}
Exemplo n.º 2
0
				public void Process(Frame cFrameSource, Frame cFrameTarget)
				{
					int nResult = 0;
                    AVFrame cAVFrameSource = (AVFrame)Marshal.PtrToStructure(cFrameSource, typeof(AVFrame));
					AVFrame cAVFrameTarget = (AVFrame)Marshal.PtrToStructure(cFrameTarget, typeof(AVFrame));
                    nResult = Functions.sws_scale(pContext, cAVFrameSource.data, cAVFrameSource.linesize, 0, _cFormatSource.nHeight, cAVFrameTarget.data, cAVFrameTarget.linesize);
				}
Exemplo n.º 3
0
            public Frame Transform(Video cFormatVideoTarget, Frame cFrameSource, Frame cFrameTarget)
			{
                if (!_ahTransformContexts.ContainsKey(cFormatVideoTarget) || null == _ahTransformContexts[cFormatVideoTarget])
                {
                    foreach (Video cFormatVideo in _ahTransformContexts.Keys.Where(o => NULL == o.pAVCodecContext).ToArray())
                    {
                        _ahTransformContexts[cFormatVideo].Dispose();
                        _ahTransformContexts.Remove(cFormatVideo);
                    }
                    _ahTransformContexts.Add(cFormatVideoTarget, new TransformContext(this, cFormatVideoTarget));
                }
                if (null == cFrameTarget)
				    return _ahTransformContexts[cFormatVideoTarget].Process(cFrameSource);
				_ahTransformContexts[cFormatVideoTarget].Process(cFrameSource, cFrameTarget);
                return cFrameTarget;
			}
Exemplo n.º 4
0
            private bool cFrameAudio_Disposing(Frame cFrame)
			{
                lock (_aqAudioFramesFree)
                {
                    if (20 < _aqAudioFramesFree.Count)
                    {
						(new Logger()).WriteDebug3("audio frame removed. total:" + nFramesQueueAudio--);
                        return true;
                    }
                    else
                        _aqAudioFramesFree.Enqueue(cFrame);
                }
				return false;
			}
Exemplo n.º 5
0
			public Frame Transform(Video cFormatVideoTarget, Frame cFrameSource)
			{
                return Transform(cFormatVideoTarget, cFrameSource, null);
			}
Exemplo n.º 6
0
			private void FrameDecodeAudio()
			{
				if (null == _cFormatAudioTarget)
					throw new NotImplementedException("null == cFormatTarget"); //UNDONE нужно доделать возвращение сырых пакетов
				Logger.Timings cTimings = new Logger.Timings();
				int nBufferSizeSource;
				if(null == _cFrameAudio)
					_cFrameAudio = new Frame(_cFormatAudio, _cFormatAudio.nBufferSize);
				Frame cSamplesTarget = null;
				Frame cFrame;
				if (1 > _aqAudioFramesFree.Count) //ротация кадров
				{
					cFrame = new Frame(_cFormatAudioTarget, _cFormatAudioTarget.nBufferSize / _nFPS);
					cFrame.Disposing += new Frame.DisposingDelegate(cFrameAudio_Disposing);
				}
				else
					lock (_aqAudioFramesFree)
						cFrame = _aqAudioFramesFree.Dequeue();

				int nBytesCapacity = 0;
				int nBytesOffset = 0;
				byte[] aPacketBytes;
				int nLength = 0;
				AVPacket stPacket;
				if (null != _aBytesRemainder)
				{
					Array.Copy(_aBytesRemainder, 0, cFrame.aBuffer, 0, _aBytesRemainder.Length);
					nBytesOffset += _aBytesRemainder.Length;
					_aBytesRemainder = null;
				}
				while (cFrame.nLength > nBytesOffset)
				{
					aPacketBytes = null;
					if (NULL == _pPacketAudioDub)
					{
						_pPacketAudioDub = Functions.avcodec_alloc_frame();
						_stPacketAudio = (AVPacket)Marshal.PtrToStructure(_pPacketAudioDub, typeof(AVPacket));
					}
					cTimings.Restart("allocation");
					while (true)
					{
						// NOTE: the audio packet can contain several frames 
						while (_stPacketAudio.size > 0)
						{
							nBufferSizeSource = _cFrameAudio.nLengthBuffer;
							Marshal.StructureToPtr(_stPacketAudio, _pPacketAudioDub, true);
							nLength = Functions.avcodec_decode_audio3(_cFormatAudio.pAVCodecContext, _cFrameAudio.aBuffer, ref nBufferSizeSource, _pPacketAudioDub);
							cTimings.CheckIn("decode");
							if (nLength < 0)
							{
								_stPacketAudio.size = 0;
								break;
							}
							_stPacketAudio.data += nLength;
							_stPacketAudio.size -= nLength;
							nLength = nBufferSizeSource;
							if (nLength <= 0)
								continue;
							cTimings.Restart("frame");
							_cFrameAudio.nLength = nLength;
							cSamplesTarget = _cFormatAudio.Transform(_cFormatAudioTarget, _cFrameAudio);
							aPacketBytes = cSamplesTarget.aBytes;
							cTimings.Restart("transform");
							break;
						}
						if (null != aPacketBytes)
							break;
						if (NULL != _pPacketAudio)
						{
							Functions.av_free_packet(_pPacketAudio);
							Functions.av_freep(ref _pPacketAudio);
							cTimings.Restart("packet free");
						}
						while (!_bFileEnd && 1 > _aqAudioPackets.Count)
						{
							lock (_cSyncRoot)
								GetAndSortNextPacket();
						}
						if (_bFileEnd && 1 > _aqAudioPackets.Count)
							break;
						lock (_cSyncRoot)
							_pPacketAudio = _aqAudioPackets.Dequeue();
						stPacket = (AVPacket)Marshal.PtrToStructure(_pPacketAudio, typeof(AVPacket));

						_stPacketAudio.data = stPacket.data;
						_stPacketAudio.size = stPacket.size;
						cTimings.Restart("packets");
					}
					if (null == aPacketBytes)
						throw new Exception("audio packet is null");
					nBytesCapacity = aPacketBytes.Length;
					if (cFrame.nLength < nBytesOffset + aPacketBytes.Length)
					{
						nBytesCapacity = cFrame.nLength - nBytesOffset;
						_aBytesRemainder = new byte[aPacketBytes.Length - nBytesCapacity];
						Array.Copy(aPacketBytes, nBytesCapacity, _aBytesRemainder, 0, _aBytesRemainder.Length);
					}
					Array.Copy(aPacketBytes, 0, cFrame.aBuffer, nBytesOffset, nBytesCapacity);
					nBytesOffset += nBytesCapacity;
					cTimings.Restart("accumulation");
				}
				cTimings.Stop("frame:decode:audio: >40ms", 40);
				lock (_aqAudioFrames)
					_aqAudioFrames.Enqueue(cFrame);
			}
Exemplo n.º 7
0
			public void AudioFrameNext(Format.Audio cFormatSource, Frame cFrameSource)
			{
				if (NULL == _pStreamAudio)
					throw new Exception("there is no audio stream in file");
				if (null == cFrameSource)
					return;
				AVStream stAVStream = (AVStream)Marshal.PtrToStructure(_pStreamAudio, typeof(AVStream));
				//_nPTSAudio = (double)stAVStream.pts.val * stAVStream.time_base.num / stAVStream.time_base.den;
				Frame cFrame;
				AVPacket stAVPacket = new AVPacket();
				IntPtr pAVPacket;
				if (null == _cFrameAudio)
					_cFrameAudio = new Frame(_cFormatAudio, _cFormatAudio.nBufferSize);
				Frame[] aFrames = cFormatSource.Convert(_cFormatAudio, cFrameSource, _cFrameAudio);
				for (int nIndx = 0; aFrames.Length > nIndx; nIndx++)
				{
					cFrame = aFrames[nIndx];
					pAVPacket = Functions.av_malloc((uint)(Marshal.SizeOf(stAVPacket)));
					Functions.av_init_packet(pAVPacket);
					stAVPacket = (AVPacket)Marshal.PtrToStructure(pAVPacket, typeof(AVPacket));
					if (cFrame.nPTS != Constants.AV_NOPTS_VALUE)
						stAVPacket.pts = Functions.av_rescale_q(cFrame.nPTS, _cFormatAudio.stAVCodecContext.time_base, stAVStream.time_base);
					//stAVPacket.pts = stAVPacket.dts = Functions.av_rescale_q((long)_nPTSAudio, _cFormatVideo.stAVCodecContext.time_base, stAVStream.time_base);
					stAVPacket.flags |= Constants.AV_PKT_FLAG_KEY;
					stAVPacket.stream_index = stAVStream.index;
					stAVPacket.size = cFrame.nLength;
					stAVPacket.data = cFrame.p;
					Marshal.StructureToPtr(stAVPacket, pAVPacket, true);

					_cFormatCtx.PacketWrite(pAVPacket);
					Functions.av_free_packet(pAVPacket);
					Functions.av_freep(ref pAVPacket);
					cFrame.Dispose();
					//_nPTSAudio++;
				}
			}
Exemplo n.º 8
0
				public void Process(Frame cSamplesSource, Frame cSamplesTarget)
				{
                    int nSamplesSourceQty = 0;
                    if(null != cSamplesSource)
                        nSamplesSourceQty = cSamplesSource.nLength / (_cFormatSource.nChannelsQty * _cFormatSource.nBitsPerSample / 8);
                    int nSamplesTargetQty = cSamplesTarget.nLengthBuffer / (_cFormatTarget.nChannelsQty * _cFormatTarget.nBitsPerSample / 8);
                    nSamplesTargetQty = Functions.swr_convert(pContext, cSamplesTarget, nSamplesTargetQty, cSamplesSource, nSamplesSourceQty);
                    cSamplesTarget.nLength = nSamplesTargetQty * _cFormatTarget.nChannelsQty * _cFormatTarget.nBitsPerSample / 8;
				}
Exemplo n.º 9
0
			public Frame Transform(Audio cFormatAudioTarget, Frame cSamplesSource, Frame cSamplesTarget)
			{
				if (nSamplesRate != cFormatAudioTarget.nSamplesRate || eSampleFormat != cFormatAudioTarget.eSampleFormat || nChannelsQty != cFormatAudioTarget.nChannelsQty)
				{

					if (!_ahTransformContexts.ContainsKey(cFormatAudioTarget) || null == _ahTransformContexts[cFormatAudioTarget])
                    {
                        foreach (Audio cFormatAudio in _ahTransformContexts.Keys.Where(o => NULL == o.pAVCodecContext).ToArray())
                        {
                            _ahTransformContexts[cFormatAudio].Dispose();
                            _ahTransformContexts.Remove(cFormatAudio);
                        }
                        _ahTransformContexts.Add(cFormatAudioTarget, new TransformContext(this, cFormatAudioTarget));
                    }

					if (null == cSamplesTarget)
						return _ahTransformContexts[cFormatAudioTarget].Process(cSamplesSource);
					_ahTransformContexts[cFormatAudioTarget].Process(cSamplesSource, cSamplesTarget);
					return cSamplesTarget;
				}
				return cSamplesSource;
			}
Exemplo n.º 10
0
            public void FrameNextAudio(Frame cFrameSource)
			{
                FrameNextAudio(null, cFrameSource);
			}
Exemplo n.º 11
0
            public void FrameNextAudio(Format.Audio cFormatSource, Frame cFrameSource)
			{
                if (null == cFrameSource)
                    return;
                if (NULL == _pStreamAudio)
					throw new Exception("there is no audio stream in file");
                FrameNext(_pStreamAudio, cFormatSource, cFrameSource, _cFormatAudio, _pBitStreamFilterAudio);
			}
Exemplo n.º 12
0
            public void FrameNextVideo(Frame cFrameSource)
			{
                FrameNextVideo(null, cFrameSource);
			}
Exemplo n.º 13
0
            public void FrameNextVideo(Format.Video cFormatSource, Frame cFrameSource)
			{
                if (null == cFrameSource)
                    return;
                if (NULL == _pStreamVideo)
					throw new Exception("there is no video stream in file");
                FrameNext(_pStreamVideo, cFormatSource, cFrameSource, _cFormatVideo, _pBitStreamFilterVideo);
			}
Exemplo n.º 14
0
        public Frame(Format.Audio cFormat, Frame cFrame)
            : this()
        {
            if (null != cFrame._aBuffer)
                throw new NotImplementedException();
            AVFrame cAVFrame = (AVFrame)Marshal.PtrToStructure(cFrame._pAVFrame, typeof(AVFrame));
            if (0 < cAVFrame.width || 0 < cAVFrame.height || 1 > cAVFrame.nb_samples)
                throw new NotImplementedException();
            int nLineSize = cFormat.nBitsPerSample / 8 * cAVFrame.nb_samples;
			//int nReminder = nLineSize % 64;
			//if(0 < nReminder)
			//	nLineSize += 64 - nReminder;
            _nLength = cFormat.nChannelsQty * nLineSize;
            _aBuffer = new byte[_nLength];
            bool bPlanar = (1 < cAVFrame.data.Count(o => NULL != o));
            if (!bPlanar)
                nLineSize = nLength;
            for (int nIndx = 0; cAVFrame.data.Length > nIndx; nIndx++)
            {
                if (NULL == cAVFrame.data[nIndx])
                    break;
                Marshal.Copy(cAVFrame.data[nIndx], _aBuffer, nIndx * nLineSize, nLineSize);
            }
            Init(cFormat);
        }
Exemplo n.º 15
0
			bool cFrameAudio_Disposing(Frame cFrame)
			{
				lock (_aqAudioFramesFree)
					_aqAudioFramesFree.Enqueue(cFrame);
				return false;
			}
Exemplo n.º 16
0
			bool cFrameVideo_Disposing(Frame cFrame)
			{
				lock (_aqVideoFramesFree)
					_aqVideoFramesFree.Enqueue(cFrame);
				return false;
			}
Exemplo n.º 17
0
			override public Frame[] Convert(Format cFormatTarget, Frame cFrameSource) //в pAVFrameSource лежат байты в формате this!!!
			{
				List<Frame> aRetVal = new List<Frame>();
				if (null == cFormatTarget || !(cFormatTarget is Format.Video))
					throw new Exception("target format is null or has a wrong type");
				Format.Video cFormatVideoTarget = (Format.Video)cFormatTarget;
				try
				{
                    if (ePixelFormat == cFormatVideoTarget.ePixelFormat && nHeight == cFormatVideoTarget.nHeight && nWidth == cFormatVideoTarget.nWidth)
                        return new Frame[] { new Frame(cFrameSource.aBytes) { nPTS = cFrameSource.nPTS, bKeyframe = cFrameSource.bKeyframe } };
                    if (eCodecID == cFormatTarget.eCodecID || NULL != _pCodec)
                        throw new NotImplementedException(); //TODO доделать конверт из encoded в raw
					
					cFrameSource = Transform(cFormatVideoTarget, cFrameSource);

                    int nSize;
                    if (NULL == cFrameSource)
                        (new Logger()).WriteWarning("Format.Video.Convert: IntPtr.Zero == cFrameSource.AVFrameGet()");
                    if (NULL == cFormatVideoTarget.pAVCodecContext)
                        (new Logger()).WriteWarning("Format.Video.Convert: IntPtr.Zero == cFormatVideoTarget.pAVCodecContext");
                    if (null == _cFrame)
                    {
                        _cFrame = new Frame(cFormatVideoTarget.nBufferSize);
                        _cFrame.nPTS = 0;
                    }
                    cFrameSource.nPTS = _cFrame.nPTS;
                    nSize = Functions.avcodec_encode_video(cFormatVideoTarget.pAVCodecContext, _cFrame.aBuffer, _cFrame.nLengthBuffer, cFrameSource);
					if (0 > nSize)
						throw new Exception("video encoding failed:" + nSize);
					if (0 < nSize)
					{
                        aRetVal.Add(new Frame(null, _cFrame.aBuffer.Take(nSize).ToArray()));

						AVCodecContext stAVCodecContext = (AVCodecContext)Marshal.PtrToStructure(cFormatVideoTarget.pAVCodecContext, typeof(AVCodecContext));
						if (NULL != stAVCodecContext.coded_frame)
						{
							AVFrame cAVFrame = (AVFrame)Marshal.PtrToStructure(stAVCodecContext.coded_frame, typeof(AVFrame));
							aRetVal[0].nPTS = cAVFrame.pts;
							aRetVal[0].bKeyframe = 0 < cAVFrame.key_frame;
						}
                    }
                    _cFrame.nPTS++;
				}
				catch (Exception ex)
				{
					(new Logger()).WriteError(ex);
				}
				return aRetVal.ToArray();
			}
Exemplo n.º 18
0
            private void FrameNext(IntPtr pStream, Format cFormatSource, Frame cFrameSource, Format cFormatTarget, IntPtr pBitStreamFilter)
			{
                AVStream stAVStream = (AVStream)Marshal.PtrToStructure(pStream, typeof(AVStream));
                Frame[] aFrames;
                if (null == cFormatSource)
                {
                    if (null == cFrameSource && cFormatTarget is Format.Audio)
                        aFrames = ((Format.Audio)cFormatTarget).Flush();
                    else
                        aFrames = new Frame[] { new Frame(null, cFrameSource.aBytes.ToArray()) { nPTS = cFrameSource.nPTS, bKeyframe = cFrameSource.bKeyframe } };
                }
                else
                    aFrames = cFormatSource.Convert(cFormatTarget, cFrameSource);
                IntPtr pAVPacket;
                AVPacket stAVPacket;
				for (int nIndx = 0; aFrames.Length > nIndx; nIndx++)
				{
                    pAVPacket = Functions.av_malloc((uint)(Marshal.SizeOf(typeof(AVPacket))));
                    Functions.av_init_packet(pAVPacket);
                    stAVPacket = (AVPacket)Marshal.PtrToStructure(pAVPacket, typeof(AVPacket));
					if (aFrames[nIndx].nPTS != Constants.AV_NOPTS_VALUE)
                        stAVPacket.pts = Functions.av_rescale_q(aFrames[nIndx].nPTS, cFormatTarget.stAVCodecContext.time_base, stAVStream.time_base);
                    if (aFrames[nIndx].bKeyframe)
                        stAVPacket.flags |= Constants.AV_PKT_FLAG_KEY;
                    stAVPacket.stream_index = stAVStream.index;
                    stAVPacket.size = aFrames[nIndx].nLength;
                    stAVPacket.data = aFrames[nIndx].pBytes;
                    lock(_aFramesLocked)
                        _aFramesLocked.Add(aFrames[nIndx]);
                    stAVPacket.buf = Functions.av_buffer_create(stAVPacket.data, stAVPacket.size, Marshal.GetFunctionPointerForDelegate(_fFrameUnlock), aFrames[nIndx], 0);
                    //System.IO.File.AppendAllText("packets", stAVPacket.pts + "\t" + stAVPacket.size + Environment.NewLine);
                    if (NULL != pBitStreamFilter && 0 != Functions.av_bitstream_filter_filter(pBitStreamFilter, cFormatTarget.pAVCodecContext, NULL, ref stAVPacket.data, ref stAVPacket.size, stAVPacket.data, stAVPacket.size, aFrames[nIndx].bKeyframe))
                            throw new Exception("error while filter a frame");
                    
                    Marshal.StructureToPtr(stAVPacket, pAVPacket, true);

                    lock(_cFormatCtx)
                        _cFormatCtx.PacketWrite(pAVPacket);
                    //Functions.av_free_packet(pAVPacket);
                    //Functions.av_freep(ref pAVPacket);
                }
			}
Exemplo n.º 19
0
				public Frame Process(Frame cSamplesSource)
				{
                    if (null == cSamples)
						cSamples = new Frame(_cFormatTarget);
					Process(cSamplesSource, cSamples);
					return cSamples;
				}
Exemplo n.º 20
0
			override public Frame[] Convert(Format cFormatTarget, Frame cFrameSource) //в cFrameSource лежат байты в формате this!!!
			{
				List<Frame> aRetVal = new List<Frame>();
				if (null == cFormatTarget || !(cFormatTarget is Format.Audio))
					throw new Exception("target format is null or has a wrong type");
				Format.Audio cFormatAudioTarget = (Format.Audio)cFormatTarget;
                IntPtr pPacket = NULL;
                Frame cFrameConverted;
                AVFrame cAVFrame;
                int nIndx = 0, nFrameSize, nSize, nPacketGot = 0, nOffset = 0;
                try
                {
                    if (eCodecID == cFormatTarget.eCodecID)
                    {
                        if (nSamplesRate == cFormatAudioTarget.nSamplesRate && eSampleFormat == cFormatAudioTarget.eSampleFormat && nChannelsQty == cFormatAudioTarget.nChannelsQty)
                            return new Frame[] { new Frame(null, cFrameSource.aBytes) { nPTS = cFrameSource.nPTS, bKeyframe = cFrameSource.bKeyframe } };
                        if (NULL != _pCodec)
                            throw new NotImplementedException(); //TODO доделать конверт из encoded в raw
                    }
                    if (nBufferSize < cFrameSource.nLength)
                        throw new Exception("wrong bytes qty for specified audio format. Should be less than " + nBufferSize + " but got " + cFrameSource.nLength);

                    while(true)
                    {
                        cFrameConverted = Transform(cFormatAudioTarget, cFrameSource);
                        if (null == cFrameConverted || 1 > cFrameConverted.nLength)
                            break;
                        cFrameSource = null;
                        cAVFrame = (AVFrame)Marshal.PtrToStructure(cFrameConverted, typeof(AVFrame));
                        if (null == aByteStream)
                        {
                            aByteStream = new List<List<byte>>();
                            for (nIndx = 0; cAVFrame.data.Length > nIndx; nIndx++)
                            {
                                if (NULL == cAVFrame.data[nIndx])
                                    break;
                                aByteStream.Add(new List<byte>());
                            }
                            if(1 > aByteStream.Count)
                                aByteStream.Add(new List<byte>());
                        }
                        int nLineSize = cFrameConverted.nLength / aByteStream.Count;
                        for (nIndx = 0; aByteStream.Count > nIndx; nIndx++)
                            aByteStream[nIndx].AddRange(cFrameConverted.aBuffer.Skip((int)((long)cAVFrame.data[nIndx] - (long)cAVFrame.data[0])).Take(nLineSize));
                    }
                    pPacket = Functions.av_malloc(Marshal.SizeOf(typeof(AVPacket)));
                    Functions.av_init_packet(pPacket);
                    AVPacket stPacket = (AVPacket)Marshal.PtrToStructure(pPacket, typeof(AVPacket));
                    stPacket.size = 0;
                    if (null == _cFrame)
                        _cFrame = new Frame(cFormatAudioTarget.nBufferSize);
                    if (1 > (nFrameSize = cFormatAudioTarget.stAVCodecContext.frame_size))
                        nFrameSize = cFrameConverted.nLength / ((cFormatAudioTarget.nBitsPerSample / 8) * cFormatAudioTarget.nChannelsQty);
                    nFrameSize *= (cFormatAudioTarget.nBitsPerSample / 8);
                    if (null == cFormatAudioTarget._cFrame)
                        cFormatTarget._cFrame = new Frame(this, nFrameSize * cFormatAudioTarget.nChannelsQty);
                    if (2 > aByteStream.Count)
                        nFrameSize *= cFormatAudioTarget.nChannelsQty;
                    while (nFrameSize <= aByteStream[0].Count && _cFrame.nLengthBuffer > (nOffset + stPacket.size))
                    {
                        for (nIndx = 0; aByteStream.Count > nIndx; nIndx++)
                        {
                            aByteStream[nIndx].CopyTo(0, cFormatTarget._cFrame.aBuffer, nIndx * nFrameSize, nFrameSize);
                            aByteStream[nIndx].RemoveRange(0, nFrameSize);
                        }
                        stPacket.data = _cFrame.pBytes + nOffset;
                        stPacket.size = _cFrame.nLengthBuffer - nOffset;
                        Marshal.StructureToPtr(stPacket, pPacket, true);

                        //lock (helper._oSyncRootGlobal)
                            nSize = Functions.avcodec_encode_audio2(cFormatAudioTarget.pAVCodecContext, pPacket, cFormatTarget._cFrame, ref nPacketGot);
                        if (0 > nSize)
                            throw new Exception("audio encoding failed\n");
                        if (0 < nPacketGot)
                        {
                            stPacket = (AVPacket)Marshal.PtrToStructure(pPacket, typeof(AVPacket));
                            if (0 < stPacket.size)
                            {
                                aRetVal.Add(new Frame(_cFrame.aBuffer.Skip(nOffset).Take(stPacket.size).ToArray()) { nPTS = stPacket.pts }); //TODO нужно сделать "наследование" одного фрейма от другого (один aBytes на оба Frame'а)
                                nOffset += stPacket.size;
                            }
                        }
                    }
                }
                catch (Exception ex)
                {
                    (new Logger()).WriteError(ex);
                }
                finally
                {
                    if (NULL != pPacket)
                    {
                        Functions.av_free_packet(pPacket);
                        Functions.av_freep(ref pPacket);
                    }
                    //if (NULL != pAVFrame)
                    //    Functions.avcodec_free_frame(ref pAVFrame);
                }
				return aRetVal.ToArray();
			}
Exemplo n.º 21
0
			public Frame Transform(Audio cFormatAudioTarget, Frame cSamplesSource)
			{
				return Transform(cFormatAudioTarget, cSamplesSource, null);
			}
Exemplo n.º 22
0
			public Frame[] Flush()
			{
                if (null == _cFrame)
                    _cFrame = new Frame(nBufferSize);
                List<Frame> aRetVal = new List<Frame>();
                IntPtr pPacket = NULL;
                try
                {
                    int nPacketGot = 0, nOffset = 0;
                    pPacket = Functions.av_malloc(Marshal.SizeOf(typeof(AVPacket)));
                    Functions.av_init_packet(pPacket);
                    AVPacket stPacket = (AVPacket)Marshal.PtrToStructure(pPacket, typeof(AVPacket));
                    stPacket.size = 0;
                    while (true)
                    {
                        stPacket.data = _cFrame.pBytes + nOffset;
                        stPacket.size = _cFrame.nLengthBuffer - nOffset;
                        Marshal.StructureToPtr(stPacket, pPacket, true);
                        //lock (helper._oSyncRootGlobal)
                            Functions.avcodec_encode_audio2(pAVCodecContext, pPacket, NULL, ref nPacketGot);
                        stPacket = (AVPacket)Marshal.PtrToStructure(pPacket, typeof(AVPacket));
                        if (0 < nPacketGot && 0 < stPacket.size)
                        {
                            aRetVal.Add(new Frame(_cFrame.aBuffer.Skip(nOffset).Take(stPacket.size).ToArray()) { nPTS = stPacket.pts }); //TODO нужно сделать "наследование" одного фрейма от другого (один aBytes на оба Frame'а)
                            nOffset += stPacket.size;
                        }
//                        else
                        break;
                    }
                }
                catch (Exception ex)
                {
                    (new Logger()).WriteError(ex);
                }
                finally
                {
                    if (NULL != pPacket)
                    {
                        Functions.av_free_packet(pPacket);
                        Functions.av_freep(ref pPacket);
                    }
                }
				return aRetVal.ToArray();
			}
Exemplo n.º 23
0
			private bool AddFrameToQueue()
			{
				bool bVideo = true, bAudio = true;
				if (null != _aqVideoFrames)
				{
					while (!_bFileEnd)
					{
						try
						{
							FrameDecodeVideo();
							_nPreparedFramesIndx++;
                            break;
						}
						catch (Exception ex)
						{
							bVideo = false;
							if (!_bFileEnd)
								(new Logger()).WriteWarning(ex); //logging
						}
					}
				}
				if ((bVideo || _bFileEnd) && null != _aqAudioFrames)
				{
					while (!_bFileEnd && bVideo || !bVideo)
					{
						try
						{
							if (_bFileEnd)
								while (true)
									FrameDecodeAudio(); // дообрабатываем аудиопакеты, взятые ранее и валимся в catch
							else
								FrameDecodeAudio();
							break;
						}
						catch (Exception ex)
						{
							bAudio = false;
							if (!_bFileEnd)
								(new Logger()).WriteWarning(ex); //logging
							else
								break;
						}
					}
				}
				if (bVideo && !bAudio) // неполный последний кадр уравновешиваем пустым аудио-кадром, чтобы не плодить рассинхрон дальше....
				{
					(new Logger()).WriteWarning("queue: bad last audio frame. silenced frame added."); //logging
					Frame cFrame = new Frame(_cFormatAudioTarget);
					lock (_aqAudioFrames)
						_aqAudioFrames.Enqueue(cFrame);
				}

				if (_bFileEnd || !bVideo)
					return false;
				while (nCacheSize < (null == _aqVideoFrames ? _aqAudioFrames.Count : _aqVideoFrames.Count))
				{
					if (_bClose)
						return false;
					System.Threading.Thread.Sleep(40);//FPS
				}
				return true;
			}
Exemplo n.º 24
0
			private void FrameDecodeVideo()
			{
				if (null == _cFormatVideoTarget)
					throw new NotImplementedException("null == cFormatTarget"); //UNDONE нужно доделать возвращение сырых пакетов
				Logger.Timings cTimings = new Logger.Timings();

				int nVideoFrameFinished = 0;
				IntPtr pPacketNext = NULL;

				while (true)
				{
					while (NULL == pPacketNext)
					{
						while (1 > _aqVideoPackets.Count)
						{
							if (_bFileEnd)
								throw new Exception("file ended");
							lock (_cSyncRoot)
								GetAndSortNextPacket();
						}
						pPacketNext = _aqVideoPackets.Peek();
					}
					cTimings.Restart("packets");
					if (null == _cFrameVideo)
						_cFrameVideo = new Frame(_cFormatVideo, _cFormatVideo.nBufferSize);
					try
					{
						int nError = Functions.avcodec_decode_video2(_cFormatVideo.pAVCodecContext, _cFrameVideo.AVFrameGet(), ref nVideoFrameFinished, pPacketNext);
						Functions.av_free_packet(pPacketNext);
						Functions.av_freep(ref pPacketNext);
					}
					catch (Exception ex)
					{
						(new Logger()).WriteError(ex);
					}
					_aqVideoPackets.Dequeue();
					cTimings.Restart("decode");
					if (0 < nVideoFrameFinished)
					{
						Frame cFrame;
						if (1 > _aqVideoFramesFree.Count) //ротация кадров
						{
							cFrame = new Frame(_cFormatVideoTarget, _cFormatVideoTarget.nBufferSize);
							cFrame.Disposing += new Frame.DisposingDelegate(cFrameVideo_Disposing);
						}
						else
							lock (_aqVideoFramesFree)
								cFrame = _aqVideoFramesFree.Dequeue();
						//Functions.avpicture_fill(_pAVFrameTarget, cFrame.aBytes, _cFormatVideoTarget.ePixelFormat, _cFormatVideoTarget.nWidth, _cFormatVideoTarget.nHeight);
						_cFormatVideo.Transform(_cFormatVideoTarget, _cFrameVideo, cFrame);   // lock inside!
						lock (_aqVideoFrames)
							_aqVideoFrames.Enqueue(cFrame);


						if (_bDoWritingFrames)
						{
							if (null != cFrame)
							{
								byte[] aBytes = new byte[_cFormatVideoTarget.nBufferSize];
								System.Runtime.InteropServices.Marshal.Copy(cFrame.p, aBytes, 0, (int)_cFormatVideoTarget.nBufferSize);
								lock (_aqWritingFrames)
									_aqWritingFrames.Enqueue(aBytes);
							}
						}

						cTimings.Restart("transform");
						break;
					}
				}
				cTimings.Stop("frame:decode:video: >40ms", 40);
			}
Exemplo n.º 25
0
			private void FrameDecodeVideo()
			{
				(new Logger()).WriteDebug4("in");
				if (null == _cFormatVideoTarget)
					throw new NotImplementedException("null == cFormatTarget"); //UNDONE нужно доделать возвращение сырых пакетов
				Logger.Timings cTimings = new Logger.Timings("ffmpeg:file");
                
                int nVideoFrameFinished = 0;
				IntPtr pPacketNext = NULL;

				while (true)
				{
					while (NULL == pPacketNext)
					{
						while (1 > _aqVideoPackets.Count)
						{
							if (_bFileEnd)
							{
								pPacketNext = Functions.av_malloc(Marshal.SizeOf(typeof(AVPacket)));
								helpers.WinAPI.memset(pPacketNext, 0, Marshal.SizeOf(typeof(AVPacket)));
								Functions.av_init_packet(pPacketNext);
								break;
							}
							lock (_oSyncRoot)
                                PacketNext();
						}
						if (!_bFileEnd)
							pPacketNext = _aqVideoPackets.Peek();
					}
					cTimings.Restart("packets");
					if (null == _cFrameVideo)
						_cFrameVideo = new Frame(_cFormatVideo);
					try
					{
						int nError = Functions.avcodec_decode_video2(_cFormatVideo.pAVCodecContext, _cFrameVideo, ref nVideoFrameFinished, pPacketNext);
						if (!_bFileEnd)
						{
							Functions.av_free_packet(pPacketNext);
							Functions.av_freep(ref pPacketNext);
							_aqVideoPackets.Dequeue();
						}
					}
					catch (Exception ex)
					{
						(new Logger()).WriteError(ex);
					}
						
					cTimings.Restart("decode");
					if (0 < nVideoFrameFinished)
					{
						Frame cFrame;
                        if (1 > _aqVideoFramesFree.Count) //ротация кадров
                        {
                            cFrame = new Frame(_cFormatVideoTarget);
                            cFrame.Disposing += cFrameVideo_Disposing;
							(new Logger()).WriteDebug3("video frame added. total:" + nFramesQueueVideo++);
                        }
                        else
                            lock (_aqVideoFramesFree)
                                cFrame = _aqVideoFramesFree.Dequeue();
						_cFormatVideo.Transform(_cFormatVideoTarget, _cFrameVideo, cFrame);
                        cFrame.bKeyframe = _cFrameVideo.bKeyframe;
                        cFrame.nPTS = _cFrameVideo.nPTS;
                        lock (_aqVideoFrames)
							_aqVideoFrames.Enqueue(cFrame);


						if (_bDoWritingFrames)
						{
							if (null != cFrame)
							{
								byte[] aBytes = new byte[_cFormatVideoTarget.nBufferSize];
								System.Runtime.InteropServices.Marshal.Copy(cFrame.pBytes, aBytes, 0, (int)_cFormatVideoTarget.nBufferSize);
								lock (_aqWritingFrames)
									_aqWritingFrames.Enqueue(aBytes);
							}
						}

						cTimings.Restart("transform");
						if (!_bFileEnd)
							break;
					}
					else if (_bFileEnd)
					{
						if (NULL != pPacketNext)
						{
							Functions.av_free_packet(pPacketNext);
							Functions.av_freep(ref pPacketNext);
						}
						throw new Exception("file ended");
					}
				}
				cTimings.Stop("frame:decode:video: >40ms", 40); //FPS
				(new Logger()).WriteDebug4("return");
			}
Exemplo n.º 26
0
			private void FrameDecodeAudio()
			{
				(new Logger()).WriteDebug4("in");
				if (null == _cFormatAudioTarget)
					throw new NotImplementedException("null == cFormatTarget"); //UNDONE нужно доделать возвращение сырых пакетов

				Logger.Timings cTimings = new Logger.Timings("ffmpeg:file");
                bool bFrameDecoded = false;
				Frame cSamplesTarget = null;
				Frame cFrame;
                if (1 > _aqAudioFramesFree.Count) //ротация кадров
                {
                    cFrame = new Frame(_cFormatAudioTarget);
                    cFrame.Disposing += cFrameAudio_Disposing;
                    (new Logger()).WriteDebug3("audio frame added. total:" + nFramesQueueAudio++);
                }
                else
                    lock (_aqAudioFramesFree)
                        cFrame = _aqAudioFramesFree.Dequeue();

				int nBytesCapacity = 0;
				int nBytesOffset = 0;
				byte[] aPacketBytes;
				int nLength = 0;
				AVPacket stPacket;
				if (null != _aBytesRemainder)
				{
                    if (_aBytesRemainder.Length > cFrame.aBuffer.Length)
                    {
                        //(new Logger()).WriteWarning("_aBytesRemainder.Length > cFrame.aBuffer.Length : " + _aBytesRemainder.Length + ":" + cFrame.aBuffer.Length);
                        Array.Copy(_aBytesRemainder, 0, cFrame.aBuffer, 0, cFrame.aBuffer.Length);
                        _aBytesRemainder = _aBytesRemainder.Skip(cFrame.aBuffer.Length).Take(_aBytesRemainder.Length - cFrame.aBuffer.Length).ToArray();
                        nBytesOffset += cFrame.aBuffer.Length;
                    }
                    else
                    {
                        Array.Copy(_aBytesRemainder, 0, cFrame.aBuffer, 0, _aBytesRemainder.Length);
                        nBytesOffset += _aBytesRemainder.Length;
                        _aBytesRemainder = null;
                    }
				}
				while (cFrame.nLength > nBytesOffset)
				{
					aPacketBytes = null;
					if (NULL == _pPacketAudioDub)
					{
                        _pPacketAudioDub = Functions.av_malloc(Marshal.SizeOf(typeof(AVPacket)));
						helpers.WinAPI.memset(_pPacketAudioDub, 0, Marshal.SizeOf(typeof(AVPacket)));
                        Functions.av_init_packet(_pPacketAudioDub);
						_stPacketAudio = (AVPacket)Marshal.PtrToStructure(_pPacketAudioDub, typeof(AVPacket));

					}
					cTimings.Restart("allocation");
					while (true)
					{
						// NOTE: the audio packet can contain several frames 
						while (_stPacketAudio.size > 0)
						{
                            if (null == _cFrameAudio)
                                _cFrameAudio = new Frame();
                            Marshal.StructureToPtr(_stPacketAudio, _pPacketAudioDub, true);
                            nLength = Functions.avcodec_decode_audio4(_cFormatAudio.pAVCodecContext, _cFrameAudio, ref bFrameDecoded, _pPacketAudioDub);
							cTimings.CheckIn("decode");
							if (nLength < 0)
							{
								_stPacketAudio.size = 0;
								break;
							}
							_stPacketAudio.data += nLength;
							_stPacketAudio.size -= nLength;
                            if (!bFrameDecoded)
								continue;
							cTimings.Restart("frame");
                            cSamplesTarget = _cFormatAudio.Transform(_cFormatAudioTarget, new Frame(_cFormatAudio, _cFrameAudio));
                            
                            
                            //cSamplesTarget = new Frame(_cFormatAudio);
                            //aPacketBytes = cSamplesTarget.aBytes;
                            //cSamplesTarget.Dispose();
                            
                            
                            aPacketBytes = cSamplesTarget.aBytes;
							cTimings.Restart("transform");
							break;
						}
						if (null != aPacketBytes)
							break;
						if (NULL != _pPacketAudio)
						{
                            Functions.av_free_packet(_pPacketAudio);
							Functions.av_freep(ref _pPacketAudio);
                            cTimings.Restart("packet free");
						}
						while (!_bFileEnd && 1 > _aqAudioPackets.Count)
						{
							lock (_oSyncRoot)
                                PacketNext();
						}
						if (_bFileEnd && 1 > _aqAudioPackets.Count)
							break;
						lock (_oSyncRoot)
							_pPacketAudio = _aqAudioPackets.Dequeue();
						stPacket = (AVPacket)Marshal.PtrToStructure(_pPacketAudio, typeof(AVPacket));

						_stPacketAudio.data = stPacket.data;
						_stPacketAudio.size = stPacket.size;
						cTimings.Restart("packets");
					}
					if (null == aPacketBytes)
						throw new Exception("audio packet is null");
					nBytesCapacity = aPacketBytes.Length;
                    if (cFrame.nLength < nBytesOffset + nBytesCapacity)
					{
						nBytesCapacity = cFrame.nLength - nBytesOffset;
						_aBytesRemainder = new byte[aPacketBytes.Length - nBytesCapacity];
						Array.Copy(aPacketBytes, nBytesCapacity, _aBytesRemainder, 0, _aBytesRemainder.Length);
					}
					Array.Copy(aPacketBytes, 0, cFrame.aBuffer, nBytesOffset, nBytesCapacity);
					nBytesOffset += nBytesCapacity;
					cTimings.Restart("accumulation");
				}
				cTimings.Stop("frame:decode:audio: >40ms", 40);//FPS
				lock (_aqAudioFrames)
					_aqAudioFrames.Enqueue(cFrame);
				(new Logger()).WriteDebug4("return");
			}
Exemplo n.º 27
0
		abstract public Frame[] Convert(Format cFormatTarget, Frame cFrameSource);
Exemplo n.º 28
0
			public void VideoFrameNext(Format.Video cFormatSource, Frame cFrameSource)
			{
				if (NULL == _pStreamVideo)
					throw new Exception("there is no video stream in file");
				AVStream stAVStream = (AVStream)Marshal.PtrToStructure(_pStreamVideo, typeof(AVStream));
				//_nPTSVideo = (double)stAVStream.pts.val * stAVStream.time_base.num / stAVStream.time_base.den;
				//(1 / FPS) * sample rate * frame number
				if (null == _cFrameVideo)
					_cFrameVideo = new Frame(_cFormatVideo, _cFormatVideo.nBufferSize);
				Frame[] aFrames = cFormatSource.Convert(_cFormatVideo, cFrameSource, _cFrameVideo);
				AVPacket stAVPacket = new AVPacket();
				IntPtr pAVPacket;
				for (int nIndx = 0; aFrames.Length > nIndx; nIndx++)
				{
					cFrameSource = aFrames[nIndx];
					pAVPacket = Functions.av_malloc((uint)(Marshal.SizeOf(stAVPacket)));
					Functions.av_init_packet(pAVPacket);
					stAVPacket = (AVPacket)Marshal.PtrToStructure(pAVPacket, typeof(AVPacket));
					if (cFrameSource.nPTS != Constants.AV_NOPTS_VALUE)
						stAVPacket.pts = Functions.av_rescale_q(cFrameSource.nPTS, _cFormatVideo.stAVCodecContext.time_base, stAVStream.time_base);
					if (cFrameSource.bKeyframe)
						stAVPacket.flags |= Constants.AV_PKT_FLAG_KEY;
					stAVPacket.stream_index = stAVStream.index;
					stAVPacket.size = cFrameSource.nLength;
					stAVPacket.data = cFrameSource.p;
					Marshal.StructureToPtr(stAVPacket, pAVPacket, true);
					//System.IO.File.AppendAllText("packets", stAVPacket.pts + "\t" + stAVPacket.size + "\r\n");
					_cFormatCtx.PacketWrite(pAVPacket);
					Functions.av_free_packet(pAVPacket);
					Functions.av_freep(ref pAVPacket);
					//cFrameSource.Dispose();
				}
			}