Esempio n. 1
0
		private void ResponseCallback(IAsyncResult iAsynchronousResult)
		{
			try
			{
				_ahMediaSourceAttributes = null;
				_aMediaStreamDescriptions = null;
				_cMP3WaveFormat = null;
				_cStream = new MediaStream(((HttpWebResponse)_cHttpWebRequest.EndGetResponse(iAsynchronousResult)).GetResponseStream());

				_cStream.Position = 0;
				// Initialize data structures to pass to the Media pipeline via the MediaStreamSource
				_ahMediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
				_aMediaStreamDescriptions = new List<MediaStreamDescription>();

				Dictionary<MediaStreamAttributeKeys, string> ahMediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();

				ahMediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264";
				ahMediaStreamAttributes[MediaStreamAttributeKeys.Width] = _nVideoFrameWidth.ToString();
				ahMediaStreamAttributes[MediaStreamAttributeKeys.Height] = _nVideoFrameHeight.ToString();
				_nCurrentFrameVideo = 0;
				_nCurrentFrameAudio = 0;

				_cMediaStreamVideoDescription = new MediaStreamDescription(MediaStreamType.Video, ahMediaStreamAttributes);
				_aMediaStreamDescriptions.Add(_cMediaStreamVideoDescription);

				FileTypeCompatibility cFtyp = (FileTypeCompatibility)Atom.Read(_cStream);
				Wide cWide = (Wide)cFtyp.cNext;
				MovieData cMdat = (MovieData)cWide.cNext;

				nDataStart = cMdat.DataOffsetGet();
				nDataSize = cMdat.DataSizeGet();

				//1. When the next four bytes in the bitstream form the four-byte sequence 0x00000001, the next byte in the byte
				//stream (which is a zero_byte syntax element) is extracted and discarded and the current position in the byte
				//stream is set equal to the position of the byte following this discarded byte.
				while (true)
				{
					if (0 == _cStream.ReadByte() && 0 == _cStream.ReadByte() && 0 == _cStream.ReadByte() && 1 == _cStream.ReadByte())
					{
						break;
					}
				}

				//2. The next three-byte sequence in the byte stream (which is a start_code_prefix_one_3bytes) is extracted and
				//discarded and the current position in the byte stream is set equal to the position of the byte following this
				//three-byte sequence.


				_cMP3WaveFormat = new MpegLayer3WaveFormat();
				_cMP3WaveFormat.WaveFormatEx = new WaveFormatEx();

				_cMP3WaveFormat.WaveFormatEx.FormatTag = WaveFormatEx.FormatMP3;
				//_cMP3WaveFormat.WaveFormatEx.Channels = (short)((mpegLayer3Frame.Channels == MediaParsers.Channel.SingleChannel) ? 1 : 2);
				_cMP3WaveFormat.WaveFormatEx.Channels = (short)2;
				//_cMP3WaveFormat.WaveFormatEx.SamplesPerSec = mpegLayer3Frame.SamplingRate;
				_cMP3WaveFormat.WaveFormatEx.SamplesPerSec = _nAudioSampleRate;
				//_cMP3WaveFormat.WaveFormatEx.AvgBytesPerSec = mpegLayer3Frame.Bitrate / 8;
				_cMP3WaveFormat.WaveFormatEx.AvgBytesPerSec = _nAudioBitRate / 8;
				_cMP3WaveFormat.WaveFormatEx.BlockAlign = 1;
				_cMP3WaveFormat.WaveFormatEx.BitsPerSample = 0;
				_cMP3WaveFormat.WaveFormatEx.Size = 12;

				_cMP3WaveFormat.Id = 1;
				_cMP3WaveFormat.BitratePaddingMode = 0;
				_cMP3WaveFormat.FramesPerBlock = 1;
				//_cMP3WaveFormat.BlockSize = (short)mpegLayer3Frame.FrameSize; //(short)(144 * nBitRate / _nAudioSampleRate + _cMP3WaveFormat.BitratePaddingMode);
				_cMP3WaveFormat.BlockSize = (short)(144 * _nAudioBitRate / _nAudioSampleRate + _cMP3WaveFormat.BitratePaddingMode);
				_cMP3WaveFormat.CodecDelay = 0;

				ahMediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
				ahMediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = _cMP3WaveFormat.ToHexString();
				_cMediaStreamAudioDescription = new MediaStreamDescription(MediaStreamType.Audio, ahMediaStreamAttributes);
				_aMediaStreamDescriptions.Add(_cMediaStreamAudioDescription);

				switch (_nAudioBitRate)
				{
					case 64000:
						_nAudioMP3ControlByte = 0x54;
						break;
					case 128000:
						_nAudioMP3ControlByte = 0x94;
						break;
					default:
						throw new Exception("unsupported audio bit rate:" + _nAudioBitRate);
				}
				_aFramesOffsets.Add(new FrameOffset(0, 0));
				_nFrameDuration = TimeSpan.FromSeconds((double)1 / 25).Ticks; //FPS
				_nSampleDuration = TimeSpan.FromSeconds(((double)1 / _nAudioSampleRate) * 1152).Ticks;

				try
				{
					long nBufferTicks = TimeSpan.FromSeconds(_nBufferSeconds).Ticks;
					while ((ulong)_nBufferSeconds > _nFramesQty || nBufferTicks > (_aAudioNALs.Count * _nSampleDuration)) //если длительность меньше буфера, то крутим до исключения, по которому и выйдем из цикла
						NALUnitParse();
					_cThread = new System.Threading.Thread(NALUnitsReceive);
					_cThread.Start();
				}
				catch
				{
					_bCached = true;
				}

				TimeSpan tsDuration;
				if (1 > _nFramesQty)
				{
					lock (_cSyncRoot)
					{
						long nDurationAudio = (long)(_aAudioNALs.Count * _nSampleDuration);
						long nDurationVideo = (long)(_aVideoNALs.Count(row => row.bFrameStart) * _nFrameDuration);
						tsDuration = TimeSpan.FromTicks(nDurationAudio > nDurationVideo ? nDurationAudio : nDurationVideo);
					}
				}
				else
					tsDuration = TimeSpan.FromMilliseconds(_nFramesQty * 40); //FPS

				_ahMediaSourceAttributes[MediaSourceAttributesKeys.Duration] = tsDuration.Ticks.ToString(System.Globalization.CultureInfo.InvariantCulture);
				_ahMediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = true.ToString();

				ReportOpenMediaCompleted(_ahMediaSourceAttributes, _aMediaStreamDescriptions);
			}
			catch (WebException e)
			{
				// Need to handle the exception
			}
		}
Esempio n. 2
0
        private void ResponseCallback(IAsyncResult iAsynchronousResult)
        {
            try
            {
                _ahMediaSourceAttributes  = null;
                _aMediaStreamDescriptions = null;
                _cMP3WaveFormat           = null;
                _cStream = new MediaStream(((HttpWebResponse)_cHttpWebRequest.EndGetResponse(iAsynchronousResult)).GetResponseStream());

                _cStream.Position = 0;
                // Initialize data structures to pass to the Media pipeline via the MediaStreamSource
                _ahMediaSourceAttributes  = new Dictionary <MediaSourceAttributesKeys, string>();
                _aMediaStreamDescriptions = new List <MediaStreamDescription>();

                Dictionary <MediaStreamAttributeKeys, string> ahMediaStreamAttributes = new Dictionary <MediaStreamAttributeKeys, string>();

                ahMediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264";
                ahMediaStreamAttributes[MediaStreamAttributeKeys.Width]       = _nVideoFrameWidth.ToString();
                ahMediaStreamAttributes[MediaStreamAttributeKeys.Height]      = _nVideoFrameHeight.ToString();
                _nCurrentFrameVideo = 0;
                _nCurrentFrameAudio = 0;

                _cMediaStreamVideoDescription = new MediaStreamDescription(MediaStreamType.Video, ahMediaStreamAttributes);
                _aMediaStreamDescriptions.Add(_cMediaStreamVideoDescription);

                FileTypeCompatibility cFtyp = (FileTypeCompatibility)Atom.Read(_cStream);
                Wide      cWide             = (Wide)cFtyp.cNext;
                MovieData cMdat             = (MovieData)cWide.cNext;

                nDataStart = cMdat.DataOffsetGet();
                nDataSize  = cMdat.DataSizeGet();

                //1. When the next four bytes in the bitstream form the four-byte sequence 0x00000001, the next byte in the byte
                //stream (which is a zero_byte syntax element) is extracted and discarded and the current position in the byte
                //stream is set equal to the position of the byte following this discarded byte.
                while (true)
                {
                    if (0 == _cStream.ReadByte() && 0 == _cStream.ReadByte() && 0 == _cStream.ReadByte() && 1 == _cStream.ReadByte())
                    {
                        break;
                    }
                }

                //2. The next three-byte sequence in the byte stream (which is a start_code_prefix_one_3bytes) is extracted and
                //discarded and the current position in the byte stream is set equal to the position of the byte following this
                //three-byte sequence.


                _cMP3WaveFormat = new MpegLayer3WaveFormat();
                _cMP3WaveFormat.WaveFormatEx = new WaveFormatEx();

                _cMP3WaveFormat.WaveFormatEx.FormatTag = WaveFormatEx.FormatMP3;
                //_cMP3WaveFormat.WaveFormatEx.Channels = (short)((mpegLayer3Frame.Channels == MediaParsers.Channel.SingleChannel) ? 1 : 2);
                _cMP3WaveFormat.WaveFormatEx.Channels = (short)2;
                //_cMP3WaveFormat.WaveFormatEx.SamplesPerSec = mpegLayer3Frame.SamplingRate;
                _cMP3WaveFormat.WaveFormatEx.SamplesPerSec = _nAudioSampleRate;
                //_cMP3WaveFormat.WaveFormatEx.AvgBytesPerSec = mpegLayer3Frame.Bitrate / 8;
                _cMP3WaveFormat.WaveFormatEx.AvgBytesPerSec = _nAudioBitRate / 8;
                _cMP3WaveFormat.WaveFormatEx.BlockAlign     = 1;
                _cMP3WaveFormat.WaveFormatEx.BitsPerSample  = 0;
                _cMP3WaveFormat.WaveFormatEx.Size           = 12;

                _cMP3WaveFormat.Id = 1;
                _cMP3WaveFormat.BitratePaddingMode = 0;
                _cMP3WaveFormat.FramesPerBlock     = 1;
                //_cMP3WaveFormat.BlockSize = (short)mpegLayer3Frame.FrameSize; //(short)(144 * nBitRate / _nAudioSampleRate + _cMP3WaveFormat.BitratePaddingMode);
                _cMP3WaveFormat.BlockSize  = (short)(144 * _nAudioBitRate / _nAudioSampleRate + _cMP3WaveFormat.BitratePaddingMode);
                _cMP3WaveFormat.CodecDelay = 0;

                ahMediaStreamAttributes = new Dictionary <MediaStreamAttributeKeys, string>();
                ahMediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = _cMP3WaveFormat.ToHexString();
                _cMediaStreamAudioDescription = new MediaStreamDescription(MediaStreamType.Audio, ahMediaStreamAttributes);
                _aMediaStreamDescriptions.Add(_cMediaStreamAudioDescription);

                switch (_nAudioBitRate)
                {
                case 64000:
                    _nAudioMP3ControlByte = 0x54;
                    break;

                case 128000:
                    _nAudioMP3ControlByte = 0x94;
                    break;

                default:
                    throw new Exception("unsupported audio bit rate:" + _nAudioBitRate);
                }
                _aFramesOffsets.Add(new FrameOffset(0, 0));
                _nFrameDuration  = TimeSpan.FromSeconds((double)1 / 25).Ticks;                //FPS
                _nSampleDuration = TimeSpan.FromSeconds(((double)1 / _nAudioSampleRate) * 1152).Ticks;

                try
                {
                    long nBufferTicks = TimeSpan.FromSeconds(_nBufferSeconds).Ticks;
                    while ((ulong)_nBufferSeconds > _nFramesQty || nBufferTicks > (_aAudioNALs.Count * _nSampleDuration))                     //если длительность меньше буфера, то крутим до исключения, по которому и выйдем из цикла
                    {
                        NALUnitParse();
                    }
                    _cThread = new System.Threading.Thread(NALUnitsReceive);
                    _cThread.Start();
                }
                catch
                {
                    _bCached = true;
                }

                TimeSpan tsDuration;
                if (1 > _nFramesQty)
                {
                    lock (_cSyncRoot)
                    {
                        long nDurationAudio = (long)(_aAudioNALs.Count * _nSampleDuration);
                        long nDurationVideo = (long)(_aVideoNALs.Count(row => row.bFrameStart) * _nFrameDuration);
                        tsDuration = TimeSpan.FromTicks(nDurationAudio > nDurationVideo ? nDurationAudio : nDurationVideo);
                    }
                }
                else
                {
                    tsDuration = TimeSpan.FromMilliseconds(_nFramesQty * 40);                     //FPS
                }
                _ahMediaSourceAttributes[MediaSourceAttributesKeys.Duration] = tsDuration.Ticks.ToString(System.Globalization.CultureInfo.InvariantCulture);
                _ahMediaSourceAttributes[MediaSourceAttributesKeys.CanSeek]  = true.ToString();

                ReportOpenMediaCompleted(_ahMediaSourceAttributes, _aMediaStreamDescriptions);
            }
            catch (WebException e)
            {
                // Need to handle the exception
            }
        }