protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            CameraStreamSourceDataSingleton dataSource = CameraStreamSourceDataSingleton.Instance;

            if (frameStreamOffset + dataSource.FrameBufferSize > dataSource.FrameStreamSize)
            {
                dataSource.FrameStream.Seek(0, SeekOrigin.Begin);
                frameStreamOffset = 0;
            }

            Task tsk = dataSource.CameraEffect.GetNewFrameAndApplyEffect().AsTask();

            // Wait that the asynchroneous call completes, and proceed by reporting
            // the MediaElement that new samples are ready.
            tsk.ContinueWith((task) =>
            {
                dataSource.FrameStream.Position = 0;

                MediaStreamSample msSamp = new MediaStreamSample(
                    videoStreamDescription,
                    dataSource.FrameStream,
                    frameStreamOffset,
                    dataSource.FrameBufferSize,
                    currentTime,
                    emptySampleDict);

                ReportGetSampleCompleted(msSamp);
                currentTime += frameTime;
                frameStreamOffset += dataSource.FrameBufferSize;
            });
        }
Esempio n. 2
0
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            MediaStreamSample audioSample = null;

            if (!sample_enumerator.MoveNext ())
            {
                // If you are near the end of the file, return a null stream, which
                // tells the MediaStreamSource and MediaElement to close down.
                audioSample = new MediaStreamSample(
                    this.audioStreamDescription,
                    null,
                    0,
                    0,
                    0,
                    emptyDict);
                this.ReportGetSampleCompleted(audioSample);
            }
            else
            {
                // FIXME: Stream should not be created every time.
                SampleBuffer buf = (SampleBuffer) sample_enumerator.Current;
                audioSample = new MediaStreamSample(
                    this.audioStreamDescription,
                    new MemoryStream (buf.Data, buf.Index, buf.Count, false),
                    buf.Index,
                    buf.Count,
                    timePosition,
                    emptyDict);
                timePosition += buf.Count * 10000000 / (44100 * 2 * 2);
                this.ReportGetSampleCompleted(audioSample);
            }
        }
Esempio n. 3
0
		protected override void GetSampleAsync (MediaStreamType mediaStreamType)
		{
			Mp3Frame frame;
			MediaStreamSample sample;
			Dictionary<MediaSampleAttributeKeys, string> attribs = new Dictionary<MediaSampleAttributeKeys, string> ();

			//string format = "HH:mm:ss.ffff";
			//if (opened == DateTime.MinValue)
			//    opened = DateTime.Now;
			//Debug.WriteLine ("{0} GetSampleAsync stamp: {1}", (DateTime.Now - opened).ToString (), TimeSpan.FromMilliseconds (current_pts / 10000).ToString ());

			try {
				if (this.frame != null) {
					frame = this.frame;
					this.frame = null;
				} else {
					frame = Mp3Frame.Read (stream);
				}

				sample = new MediaStreamSample (description, new MemoryStream (frame.data), 0, frame.data.Length, current_pts, attribs);

				current_pts += frame.Duration;

				ReportGetSampleCompleted (sample);
			} catch (System.IO.EndOfStreamException ex) {
				Console.WriteLine (ex);
				sample = new MediaStreamSample (description, null, 0, 0, 0, attribs);
				ReportGetSampleCompleted (sample);
			} catch (Exception ex) {
				Console.WriteLine (ex);
				ReportGetSampleCompleted (null);
			}
		}
Esempio n. 4
0
        protected override void GetSampleAsync(
            MediaStreamType mediaStreamType)
        {
            while (mediaStreamSamples_[mediaStreamType].Count == 0)
            {
                Sample sample;
                Error ec = demuxer_.get_sample(out sample);
                if (ec == Error.success)
                {
                    Dictionary<MediaSampleAttributeKeys, string> mediaSampleAttributes =
                        new Dictionary<MediaSampleAttributeKeys, string>();
                    if (sample.is_sync)
                    {
                        mediaSampleAttributes[MediaSampleAttributeKeys.KeyFrameFlag] = bool.TrueString;
                    }
                    MediaStreamType type = mediaStreamTypes_[(int)sample.index];
                    MediaStreamSample sample2 = new MediaStreamSample(
                        mediaStreamDescriptions_[type],
                        new System.IO.MemoryStream(sample.data),
                        0,
                        sample.data.Length,
                        (long)sample.time,
                        mediaSampleAttributes);
                    mediaStreamSamples_[type].Add(sample2);
                }
                else
                {
                    if (ec == Error.would_block)
                    {
                        if (pending_)
                        {
                            System.Threading.Thread.Sleep(100);
                            continue;
                        }
                        else
                        {
                            pending_ = true;
                            System.Threading.ThreadPool.QueueUserWorkItem(
                                GetSamplePending, mediaStreamType);
                        }
                    }
                    else if (ec == Error.stream_end)
                    {
                        ReportGetSampleCompleted(null);
                    }
                    else
                    {
                        ErrorOccurred(ec.ToString());
                    }
                    return;
                }
            }

            pending_ = false;

            MediaStreamSample sample3 = mediaStreamSamples_[mediaStreamType][0];
            mediaStreamSamples_[mediaStreamType].RemoveAt(0);
            ReportGetSampleCompleted(sample3);
        }
Esempio n. 5
0
	protected override void GetSampleAsync(MediaStreamType mediaStreamType)
	{
		int blocksPlayed = this.Asap.GetBlocksPlayed();
		int bufferLen = this.Asap.Generate(buffer, buffer.Length, BitsPerSample == 8 ? ASAPSampleFormat.U8 : ASAPSampleFormat.S16LE);
		Stream s = bufferLen == 0 ? null : new MemoryStream(buffer);
		MediaStreamSample mss = new MediaStreamSample(this.MediaStreamDescription, s, 0, bufferLen,
			blocksPlayed * 10000000L / ASAP.SampleRate, SampleAttributes);
		ReportGetSampleCompleted(mss);
	}
 protected override void GetSampleAsync(MediaStreamType mediaStreamType)
 {
     if (mediaStreamType != MediaStreamType.Audio)
         throw new InvalidOperationException ("Only audio stream type is supported");
     q.BeginGetNextSample ((result) => {
         var sample = q.EndGetNextSample (result);
         ArraySegment<byte> buf = sample.Buffer;
         position += ToTick (sample.Duration);
         var s = new MediaStreamSample (media_desc, new MemoryStream (buf.Array), buf.Offset, buf.Count, position, empty_atts);
         this.ReportGetSampleCompleted (s);
         }, null);
 }
Esempio n. 7
0
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            MemoryStream frame = new MemoryStream ();
            org.diracvideo.Jirac.Picture p = dec.Pull ();
            MediaStreamSample sample;
            int [] pixels;

            p.Decode ();
            pixels = p.GetImage ();

            foreach (int i in pixels)
                frame.Write (BitConverter.GetBytes (i), 0, 4);

            sample = new MediaStreamSample (streamDescription, frame, 0, frame.Length, timestamp, empty_dict);

            timestamp += 50;

            ReportGetSampleCompleted(sample);
        }
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            if (_frameStreamOffset + _frameBufferSize > _frameStreamSize)
            {
                _frameStream.Seek(0, SeekOrigin.Begin);
                _frameStreamOffset = 0;
            }

            //_cameraBuffer.NewViewfinderFrame(_cameraData, _cameraFilteredData);

            _dev.GetPreviewBufferArgb(_cameraData);
            Buffer.BlockCopy(_cameraData, 0, _cameraFilteredData, 0, _cameraFilteredData.Length);

            _frameStream.Write(_cameraFilteredData, 0, _frameBufferSize);

            MediaStreamSample msSamp = new MediaStreamSample(_videoStreamDescription, _frameStream, _frameStreamOffset,_frameBufferSize, _currentTime, _emptySampleDict);

            ReportGetSampleCompleted(msSamp);

            _currentTime += _frameTime;
            _frameStreamOffset += _frameBufferSize;
        }
 public void Shutdown()
 {
     shutdownEvent.Set();
     lock (lockObj)
     {
         if (_outstandingGetVideoSampleCount > 0)
         {
             // ReportGetSampleCompleted must be called after GetSampleAsync to avoid memory leak. So, send
             // an empty MediaStreamSample here.
             MediaStreamSample msSamp = new MediaStreamSample(
                 _videoDesc,
                 null,
                 0,
                 0,
                 0,
                 0,
                 _emptySampleDict);
             ReportGetSampleCompleted(msSamp);
             _outstandingGetVideoSampleCount = 0;
         }
     }
 }
Esempio n. 10
0
		protected override void GetSampleAsync(MediaStreamType eMediaStreamType)
        {
			Dictionary<MediaSampleAttributeKeys, string> emptyDict = new Dictionary<MediaSampleAttributeKeys, string>();
			MediaStreamSample cMediaStreamSample = null;
			NALUnit cNALUnit = null;
			lock (_cSyncRoot)
			{
				if (!_bCached && _nCurrentFrameVideo >= _nFramesBuffered)
				{
					this.ReportGetSampleProgress(0);
					if(MediaStreamType.Audio == eMediaStreamType)
						_bAudioFrameNeeded = true;
					else
						_bVideoFrameNeeded = true;
					return;
				}
				if (MediaStreamType.Audio == eMediaStreamType)
					_bAudioFrameNeeded = false;
				else
					_bVideoFrameNeeded = false;
			}
			if (eMediaStreamType == MediaStreamType.Audio)
			{
				try
				{
					lock (_cSyncRoot)
					{
						if (0 < _aAudioNALs.Count && _aAudioNALs.Count > _nFrameOffsetAudio)
							cNALUnit = _aAudioNALs[_nFrameOffsetAudio++];
					}
					if (null != cNALUnit)
					{
						cMediaStreamSample = new MediaStreamSample(_cMediaStreamAudioDescription, new MemoryStream(_cStream.Read(cNALUnit.nStart, cNALUnit.nBytesQty)), 0, cNALUnit.nBytesQty, (long)(_nCurrentFrameAudio * _nSampleDuration), emptyDict); //(long)(_nCurrentFrameAudio * _nSampleDuration)
						_nCurrentFrameAudio++;
					}
				}
				catch { }
				if(null == cMediaStreamSample)
					cMediaStreamSample = new MediaStreamSample(_cMediaStreamAudioDescription, null, 0, 0, 0, emptyDict);
			}
			else
			{
				try
				{
					lock (_cSyncRoot)
						if (0 < _aVideoNALs.Count && _aVideoNALs.Count > _nFrameOffsetVideo)
							cNALUnit = _aVideoNALs[_nFrameOffsetVideo++];
					if (null != cNALUnit)
						cMediaStreamSample = new MediaStreamSample(_cMediaStreamVideoDescription, new MemoryStream(_cStream.Read(cNALUnit.nStart, cNALUnit.nBytesQty)), 0, cNALUnit.nBytesQty, (long)(_nCurrentFrameVideo * _nFrameDuration), emptyDict); //(long)(_nCurrentFrameVideo * _nFrameDuration)
					if (null == cNALUnit || cNALUnit.bFrameStart)
						_nCurrentFrameVideo++;
				}
				catch { }
				if (null == cMediaStreamSample)
					cMediaStreamSample = new MediaStreamSample(_cMediaStreamVideoDescription, null, 0, 0, 0, emptyDict);
			}
			this.ReportGetSampleCompleted(cMediaStreamSample);
		}
Esempio n. 11
0
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
          
            for (var i = 0; i < NumberOfSamples; i++)
            {
                var sample = muteSamples ? new Sample() : pitchGenerator.GetSample();

                binaryWriter.Write(sample.LeftChannel);
                binaryWriter.Write(sample.RightChannel);
            }

            var mediaStreamSample = new MediaStreamSample(streamDescription, binaryWriter.BaseStream, currentPosition,
                                                          BufferByteLength, 0, OtherAttributes);

            currentPosition += BufferByteLength;

            ReportGetSampleCompleted(mediaStreamSample);
        }
Esempio n. 12
0
        protected void ReportGetSampleCompleted(MediaStreamSample mediaStreamSample)
        {
            IntPtr frame;
            IntPtr buffer;
            uint   buflen;

            byte [] buf;
            int     height;
            int     width;

            if (closed)
            {
                return;
            }

            // FIXME: wrong/overzealous validations wrt SL2 (see unit tests)
            if (media_element == null || demuxer == IntPtr.Zero)
            {
                throw new InvalidOperationException();
            }

            // A null value / stream means the end has been reached.
            if ((mediaStreamSample == null) || (mediaStreamSample.Stream == null))
            {
                NativeMethods.imedia_demuxer_report_get_frame_completed(demuxer, IntPtr.Zero);
                return;
            }

            if (mediaStreamSample.MediaStreamDescription.NativeStream == IntPtr.Zero)
            {
                throw new InvalidOperationException();
            }

            // TODO:
            // Fix this to not copy the data twice and have 3 managed/unmanaged switches.
            // The best would probably be to have the pipeline/mediaframe accept an IMediaStream as the
            // buffer, this however requires changes in every demuxer/codecs we have.

            buflen = (uint)mediaStreamSample.Count;
            buf    = new byte [buflen];
            mediaStreamSample.Stream.Seek(mediaStreamSample.Offset, System.IO.SeekOrigin.Begin);
            mediaStreamSample.Stream.Read(buf, 0, (int)buflen);

            // we pass a hardocded true as keyframe flag here. User code can lie and
            // don't set the keyframe flag on any frame at all. Our pipeline doesn't work well
            // for this case (seeking in particular, we seek to keyframes, and when
            // there are no keyframes...). Since we can't rely on the keyframe
            // flag being set at all, just lie the best way for our pipeline.
            frame = NativeMethods.media_frame_new(mediaStreamSample.MediaStreamDescription.NativeStream, IntPtr.Zero, buflen, (ulong)mediaStreamSample.Timestamp, true);

            if (NativeMethods.media_frame_allocate_buffer(frame, buflen, 16))
            {
                buffer = NativeMethods.media_frame_get_buffer(frame);
                Marshal.Copy(buf, 0, buffer, (int)buflen);
            }

            if (mediaStreamSample.Attributes.ContainsKey(MediaSampleAttributeKeys.FrameHeight))
            {
                if (int.TryParse(mediaStreamSample.Attributes [MediaSampleAttributeKeys.FrameHeight], out height))
                {
                    NativeMethods.media_frame_set_demuxer_height(frame, height);
                }
            }

            if (mediaStreamSample.Attributes.ContainsKey(MediaSampleAttributeKeys.FrameWidth))
            {
                if (int.TryParse(mediaStreamSample.Attributes [MediaSampleAttributeKeys.FrameWidth], out width))
                {
                    NativeMethods.media_frame_set_demuxer_width(frame, width);
                }
            }

            NativeMethods.imedia_demuxer_report_get_frame_completed(demuxer, frame);

            NativeMethods.event_object_unref(frame);
        }
Esempio n. 13
0
		protected void ReportGetSampleCompleted (MediaStreamSample mediaStreamSample)
		{
			IntPtr frame;
			IntPtr buffer;
			uint buflen;
			byte [] buf;
			
			// FIXME: wrong/overzealous validations wrt SL2 (see unit tests)
			if (closed || media_element == null || demuxer == IntPtr.Zero)
				throw new InvalidOperationException ();
			
			// A null value / stream means the end has been reached.
			if ((mediaStreamSample == null) || (mediaStreamSample.Stream == null)) {
				NativeMethods.imedia_demuxer_report_get_frame_completed (demuxer, IntPtr.Zero);
				return;
			}

			if (mediaStreamSample.MediaStreamDescription.NativeStream == IntPtr.Zero)
				throw new InvalidOperationException ();
			
			// TODO:
			// Fix this to not copy the data twice and have 3 managed/unmanaged switches.
			// The best would probably be to have the pipeline/mediaframe accept an IMediaStream as the 
			// buffer, this however requires changes in every demuxer/codecs we have.
			
			buflen = (uint) mediaStreamSample.Count;
			buf = new byte [buflen];
			mediaStreamSample.Stream.Seek (mediaStreamSample.Offset, System.IO.SeekOrigin.Begin);
			mediaStreamSample.Stream.Read (buf, 0, (int) buflen);
			
			buffer = Marshal.AllocHGlobal ((int) buflen);
			Marshal.Copy (buf, 0, buffer, (int) buflen);
			
			// we pass a hardocded true as keyframe flag here. User code can lie and
			// don't set the keyframe flag on any frame at all. Our pipeline doesn't work well
			// for this case (seeking in particular, we seek to keyframes, and when 
			// there are no keyframes...). Since we can't rely on the keyframe
			// flag being set at all, just lie the best way for our pipeline.
			frame = NativeMethods.media_frame_new (mediaStreamSample.MediaStreamDescription.NativeStream, buffer, buflen, (ulong) mediaStreamSample.Timestamp, true);
			
			NativeMethods.imedia_demuxer_report_get_frame_completed (demuxer, frame);
			
			NativeMethods.event_object_unref (frame);
		}
Esempio n. 14
0
        private void SendSamples()
        {
            while (_sampleQueue.Count() > 0 && _outstandingGetVideoSampleCount > 0)
            {
                if (!(shutdownEvent.WaitOne(0)))
                {
                    VideoSample vs = _sampleQueue.Dequeue();
                    Stream s = System.Runtime.InteropServices.WindowsRuntime.WindowsRuntimeBufferExtensions.AsStream(vs.buffer);

                    // Send out the next sample
                    MediaStreamSample msSamp = new MediaStreamSample(
                        _videoDesc,
                        s,
                        0,
                        s.Length,
                        (long)vs.hnsPresentationTime,
                        (long)vs.hnsSampleDuration,
                        _emptySampleDict);

                    ReportGetSampleCompleted(msSamp);
                    _outstandingGetVideoSampleCount--;
                }
                else
                {
                    // If video rendering is shutting down we should no longer deliver frames
                    return;
                }
            }
        }
        /// <summary>
        /// Processes camera frameBuffer using the set effect and provides media element with a filtered frameBuffer.
        /// </summary>
        protected async override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            if (m_initFPSComputation)
            {
                FPS = -1;
                m_startTime = DateTime.Now;
                m_nbPicture = 0;
                m_initFPSComputation = false;
            }


            if (_camera != null)
            {
                _camera.GetPreviewBufferYCbCr(_cameraFrameBuffer);
            }


            if (_updateEffect)
            {
                if (_effect != null && _effect is IDisposable)
                {
                    (_effect as IDisposable).Dispose();
                    _effect = null;
                }
                _effect = RecipeFactory.Current.CreatePipeline(_source);
                _renderer.Source = _effect;
            }
            _updateEffect = false;




            await _renderer.RenderAsync();



            if (_frameStream != null)
            {
                _frameStream.Position = 0;
                _currentTime += _frameTime;

                var sample = new MediaStreamSample(_videoStreamDescription, _frameStream, _frameStreamOffset, _frameBufferSize, _currentTime, _emptyAttributes);

                ReportGetSampleCompleted(sample);
            }




            ++m_nbPicture;
            var t = DateTime.Now;
            if (m_nbPicture > 10 && t.Subtract(m_startTime).TotalMilliseconds > 1000)
            {
                FPS = m_nbPicture / t.Subtract(m_startTime).TotalSeconds;
                m_startTime = t;
                m_nbPicture = 0;

            }

        }
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            for (int i = 0; i < numSamples; i++)
            {

                if (this.index < this.StartPoint |
                    this.index > this.EndPoint)
                    this.index = this.StartPoint;

                memoryStream.WriteByte(this.sourceData[index]);
                memoryStream.WriteByte(this.sourceData[index + 1]);
                memoryStream.WriteByte(this.sourceData[index + 2]);
                memoryStream.WriteByte(this.sourceData[index + 3]);

                index += 4;
            }

            MediaStreamSample mediaStreamSample =
                new MediaStreamSample(
                    mediaStreamDescription,
                    memoryStream,
                    currentPosition,
                    bufferByteCount,
                    currentTimeStamp,
                    emptySampleDict);

            currentTimeStamp += bufferByteCount * 10000000L / byteRate;
            currentPosition += bufferByteCount;

            ReportGetSampleCompleted(mediaStreamSample);
        }
Esempio n. 17
0
        private void SubmitAudioSample(AudioSample audioSample)
        {
            Stream sampleStream = WindowsRuntimeBufferExtensions.AsStream(audioSample.buffer);

            // Send out the next LPCM sample
            MediaStreamSample mediaStreamSamp = new MediaStreamSample(
                audioDesc,
                sampleStream,
                0,
                sampleStream.Length,
                (long)frameNumber, // FIXME?
                emptySampleDict);

            Debug.WriteLine("Submitting audio samples");
            ReportGetSampleCompleted(mediaStreamSamp);
        }
        /// <summary>
        /// Parses the next sample from the requested stream and then calls ReportGetSampleCompleted
        /// to inform its parent MediaElement of the next sample.
        /// </summary>
        /// <param name="mediaStreamType">
        /// Should always be Audio for this MediaStreamSource.
        /// </param>
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            System.Diagnostics.Debug.WriteLine(System.Threading.Thread.CurrentThread.ManagedThreadId.ToString() + ":  GetSampleAsync()");

            // If the MSS has been disposed, but the player has not been stopped, this will force a stop by returning an empty sample.
            if (this.audioStream == null)
            {
                System.Diagnostics.Debug.WriteLine(System.Threading.Thread.CurrentThread.ManagedThreadId.ToString() + ":  Race condition #1 handled!");
                this.ReportGetSampleCompleted(new MediaStreamSample(this.audioStreamDescription, null, 0, 0, 0, ShoutcastMediaStreamSource.emptyDict));
                return;
            }

            if (this.workerException != null)
            {
                System.Diagnostics.Debug.WriteLine(System.Threading.Thread.CurrentThread.ManagedThreadId.ToString() + ":  Error #1 handled!");
                this.CleanupAudioStream();
                this.ErrorOccurred(this.workerException.Message);
                return;
            }

            // See if we need to report buffering.
            int bufferingPercentage = this.audioStream.BufferingPercentage;
            while (bufferingPercentage < 100)
            {
                System.Diagnostics.Debug.WriteLine(System.Threading.Thread.CurrentThread.ManagedThreadId.ToString() + ":  Buffering percentage less than 100");
                this.ReportGetSampleProgress(bufferingPercentage / 100.0d);

                // DANGER WILL ROBINSON!!! DANGER!!!
                // This line causes a race condition, as Thread.Sleep() causes the current thread to give up its time slice.  If the next thread scheduled to run is a thread that
                // is calling Dispose, our audio stream can be null, so we need to check after we wake up.  If so, we need to return an empty audio sample to shut everything down
                // properly.
                System.Threading.Thread.Sleep(10);

                // If the MSS has been disposed, but the player has not been stopped, this will force a stop by returning an empty sample.
                if (this.audioStream == null)
                {
                    System.Diagnostics.Debug.WriteLine(System.Threading.Thread.CurrentThread.ManagedThreadId.ToString() + ":  Race condition #2 handled!");
                    this.ReportGetSampleCompleted(new MediaStreamSample(this.audioStreamDescription, null, 0, 0, 0, ShoutcastMediaStreamSource.emptyDict));
                    return;
                }

                if (this.workerException != null)
                {
                    System.Diagnostics.Debug.WriteLine(System.Threading.Thread.CurrentThread.ManagedThreadId.ToString() + ":  Error #2 handled!");
                    this.ErrorOccurred(this.workerException.Message);
                    return;
                }

                bufferingPercentage = this.audioStream.BufferingPercentage;
            }

            try
            {
                System.Diagnostics.Debug.WriteLine("ReportGetSampleCompleted()");
                MediaStreamSample audioSample = new MediaStreamSample(
                        this.audioStreamDescription,
                        this.audioStream,
                        0,
                        this.audioStream.CurrentFrameSize,
                        this.currentTimestamp,
                        ShoutcastMediaStreamSource.emptyDict);

                this.currentTimestamp += this.audioStream.WaveFormat.AudioDurationFromBufferSize((uint)this.audioStream.CurrentFrameSize);
                this.ReportGetSampleCompleted(audioSample);
            }
            catch (Exception ex)
            {
                this.ErrorOccurred(ex.Message);
            }
        }
        void DemuxBuffer_VideoSamplesAvailable(object sender, EventArgs e)
        {
            GenericMediaFrame frame = null;

            lock (DemuxBuffer.VideoFrames) {
                if (DemuxBuffer.VideoFrames.Count > 0) {
                    while (frame == null) {
                        frame = DemuxBuffer.VideoFrames.Dequeue();
                    }
                } else {
                    ReportEndOfMedia(MediaStreamType.Video);
                    return;
                }
            }

            TimeSpan when = TimeSpan.FromTicks((long)frame.FramePresentationTime);

            var flags = new Dictionary<MediaSampleAttributeKeys, string>();
            var ms = new MemoryStream(frame.FrameData);

            try {
                var samp = new MediaStreamSample(
                        this.videoStreamDescription,
                        ms, 0, frame.FrameData.Length,
                        frame.FramePresentationTime,
                        flags);

                ReportGetSampleProgress(1.0);
                ReportGetSampleCompleted(samp);
            } catch { }
        }
Esempio n. 20
0
        private void SubmitVideoSample(VideoSample videoSample)
        {
            Stream sampleStream = WindowsRuntimeBufferExtensions.AsStream(videoSample.buffer);

            // Send out the next NAL
            MediaStreamSample mediaStreamSamp = new MediaStreamSample(
                videoDesc,
                sampleStream,
                0,
                sampleStream.Length,
                (long)videoSample.frameNumber,
                emptySampleDict);

            Debug.WriteLine("Submitting video samples");
            ReportGetSampleCompleted(mediaStreamSamp);
        }
        /// <summary>
        /// Parses the next sample from the requested stream and then calls ReportGetSampleCompleted
        /// to inform its parent MediaElement of the next sample.
        /// </summary>
        /// <param name="mediaStreamType">
        /// Should always be Audio for this MediaStreamSource.
        /// </param>
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            Dictionary<MediaSampleAttributeKeys, string> emptyDict = new Dictionary<MediaSampleAttributeKeys, string>();
            MediaStreamSample audioSample = null;

            if (this.currentFrame != null)
            {
                // Calculate our current position based on the stream's length
                //// double ratio = (double)this.currentFrameStartPosition / (double)this.audioStreamLength;
                //// TimeSpan currentPosition = new TimeSpan((long)(this.trackDuration.Ticks * ratio));

                // Calculate our current position instead based on the bitrate of the stream (more accurate?)
                double position = (double)this.currentFrameStartPosition / (double)this.currentFrame.Bitrate;
                TimeSpan currentPosition = TimeSpan.FromSeconds(position * 8 /* bits per Byte */);

                // Create a MemoryStream to hold the bytes
                // FrameSize includes the frame header which we've already read from the previous iteration, so just copy the
                // header, and then read the remaining bytes
                this.currentFrame.CopyHeader(buffer);
                int audioSampleSize = this.currentFrame.FrameSize - MpegFrame.FrameHeaderSize;
                int c = this.audioStream.Read(buffer, MpegFrame.FrameHeaderSize, audioSampleSize);
                if (c != audioSampleSize)
                {
                    // Ran out of bytes trying to read MP3 frame.
                    this.currentFrame = null;
                    audioSample = new MediaStreamSample(this.audioStreamDescription, null, 0, 0, 0, emptyDict);
                    this.ReportGetSampleCompleted(audioSample);
                    return;
                }

                this.currentFrameStartPosition += c;
                using (MemoryStream audioFrameStream = new MemoryStream(buffer))
                {
                    // Return the next sample in the stream
                    audioSample = new MediaStreamSample(this.audioStreamDescription, audioFrameStream, 0, this.currentFrame.FrameSize, currentPosition.Ticks, emptyDict);
                    this.ReportGetSampleCompleted(audioSample);

                    // Grab the next frame
                    MpegFrame nextFrame = new MpegFrame(this.audioStream);
                    if ( (nextFrame.Version == 1 || nextFrame.Version == 2) && nextFrame.Layer == 3)
                    {
                        this.currentFrameStartPosition += MpegFrame.FrameHeaderSize;
                        this.currentFrame = nextFrame;
                    }
                    else
                    {
                        this.currentFrame = null;
                    }
                }
            }
            else
            {
                // We're near the end of the file, or we got an irrecoverable error.
                // Return a null stream which tells the MediaStreamSource & MediaElement to shut down
                audioSample = new MediaStreamSample(this.audioStreamDescription, null, 0, 0, 0, emptyDict);
                this.ReportGetSampleCompleted(audioSample);
            }
        }
Esempio n. 22
0
        private MediaStreamSample GetAudioSample2()
        {
            MediaStreamSample sample = new MediaStreamSample(this.audioStreamDescription, null, 0, 0, 0, emptyDict);

            if (m_AmediaStreamEnd)
                return sample;

            if (aIdx > (aNum - 1))
            {
                // free current video block
                m_curAudioBlk = null;
                m_abuffer.WaitForWorkItem();

                m_curAudioBlk = (MSF.AudioBlock)m_abuffer.Dequeue().CommandParameter ;
                if (m_curAudioBlk != null)
                {
                    if (m_abuffer.Count() == 0)
                        m_aBufferFullEvent.Reset();

                    aIdx = 0;
                    aNum = (int)m_curAudioBlk.AudioFrameNum;
                }
                else
                {
                    m_AmediaStreamEnd = true;
                    return sample;
                }
            }

            MemoryStream aStream = new MemoryStream();
            //LATM 封装,第一个字节是length, 跳过第一个字节, 相应的长度应该减1
            int len = (int)m_curAudioBlk.aFrames[aIdx].Length - 1;
            aStream.Write(m_curAudioBlk.aFrames[aIdx]._data, 1, len);

            MediaStreamSample mediaStreamSample = new MediaStreamSample
                (
                this.audioStreamDescription,
                aStream,
                0,
                len,
                m_curAudioBlk.aFrames[aIdx].RelativeTime,   //// 以100 纳秒为单位
                emptyDict
                );
            aIdx++;

            return mediaStreamSample;
        }
        /// <summary>
        /// Processes camera frameBuffer using the set effect and provides media element with a filtered frameBuffer.
        /// </summary>
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            var task = _cameraEffect.GetNewFrameAndApplyEffect(_frameBuffer.AsBuffer(), _frameSize);

            // When asynchroneous call completes, proceed by reporting about the sample completion

            task.ContinueWith((action) =>
            {
                if (_frameStream != null)
                {
                    _frameStream.Position = 0;
                    _currentTime += _frameTime;
                    _frameCount++;

                    var sample = new MediaStreamSample(_videoStreamDescription, _frameStream, _frameStreamOffset, _frameBufferSize, _currentTime, _emptyAttributes);

                    ReportGetSampleCompleted(sample);
                }
            });
        }
Esempio n. 24
0
        private void GetAudioSample(bool bDirect)
        {
            lock (GetAudioSampleFunctionLock)
            {
                if ( (bDirect == false) && (HostWaitingCount <= 0))
                    return;

                int nPackets = 1;
                MemoryStream ReceivedAudioStream = new MemoryStream();

                lock (AudioPacketLock)
                {
                    while (AudioPackets.Count > 0)
                    {
                        byte[] bPacket = AudioPackets.Dequeue();
                        ReceivedAudioStream.Write(bPacket, 0, bPacket.Length);
                        nPackets++;
                    }

                }

                if (ReceivedAudioStream.Length <= 0) // nothing available, have to call when the next sample is available
                {
                    if (bDirect == true)
                        HostWaitingCount++;
                    return;
                }

                if (bDirect == false)
                    HostWaitingCount--;

                ReceivedAudioStream.Seek(0, SeekOrigin.Begin);

                // Send out the next sample
                MediaStreamSample msSamp = new MediaStreamSample(
                    MediaStreamDescription,
                    ReceivedAudioStream,
                    0,
                    ReceivedAudioStream.Length,
                    m_nTimeStamp,
                    EmptyDictionary);

                m_nTimeStamp += 400000 * nPackets; // render time in in 100 nanosecond units
                ReportGetSampleCompleted(msSamp);
            }
        }
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            uint bufferSize = (uint)AlignUp(
                this.wavParser.WaveFormatEx.AvgBytesPerSec,
                this.wavParser.WaveFormatEx.BlockAlign);

            bufferSize = Math.Min(bufferSize, (uint)this.wavParser.BytesRemainingInChunk);
            if (bufferSize > 0)
            {
                this.wavParser.ProcessDataFromChunk(bufferSize);

                MediaStreamSample sample = new MediaStreamSample(
                    this.audioDesc,
                    this.stream,
                    this.currentPosition,
                    bufferSize,
                    this.currentTimeStamp,
                    this.emptySampleDict);

                this.currentTimeStamp += this.wavParser.WaveFormatEx.AudioDurationFromBufferSize(bufferSize);
                this.currentPosition += bufferSize;


                ReportGetSampleCompleted(sample);
            }
            else
            {
                ReportGetSampleCompleted(new MediaStreamSample(this.audioDesc, null, 0, 0, 0, this.emptySampleDict));
            }
        }
Esempio n. 26
0
        private MediaStreamSample GetVideoSample2()
        {
            //Utility.Trace(" GetVideoSample2 ");

            MediaStreamSample sample = new MediaStreamSample(this.videoStreamDescription, null, 0, 0, 0, emptyDict);
            if (m_VmediaStreamEnd)
            {
                Utility.Trace(" GetVideoSample2 NULL stream has been sent!");
                return sample;
            }
            if (vIdx > (fNum - 1))
            {
                // free current video block and request for next video block
                m_curVideoBlk = null;

                Utility.Trace(" GetVideoSample,m_vbuffer WaitForWorkItem ");
                m_vbuffer.WaitForWorkItem();
                Utility.Trace(" GetVideoSample,m_vbuffer Got an Item ");

                m_curVideoBlk = (MSF.VideoBlock)m_vbuffer.Dequeue().CommandParameter;

                if (m_curVideoBlk != null)
                {
                    Utility.Trace(String.Format(" GetVideoSample,m_vbuffer.count = {0} ", m_vbuffer.Count()));

                    if (m_vbuffer.Count() == 0)
                        m_vBufferFullEvent.Reset();

                    vIdx = 0;
                    fNum = (int)m_curVideoBlk.VideoFrameNum;
                }
                else
                {
                    m_VmediaStreamEnd = true;
                    Utility.Trace(" GetVideoSample2 NULL stream has been sent!");
                    return sample;
                }

            }

            MemoryStream vStream = new MemoryStream();
            int len = (int)m_curVideoBlk.vFrames[vIdx].Length;
            vStream.Write(m_curVideoBlk.vFrames[vIdx]._data, 0, len);

            MediaStreamSample mediaStreamSample = new MediaStreamSample
                (
                this.videoStreamDescription,
                vStream,
                0,
                len,
                m_curVideoBlk.vFrames[vIdx].RelativeTime,   //// 以100 纳秒为单位
                emptyDict
                );
            vIdx++;

            return mediaStreamSample;
        }
Esempio n. 27
0
        /// <summary>
        /// Return the next sample requested
        /// </summary>
        /// <param name="mediaStreamType">The stream type that we are getting a sample for</param>
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            // Start with one second of data, rounded up to the nearest block.
            uint bufferSize = (uint)AlignUp(
                this.wavParser.WaveFormatEx.AvgBytesPerSec,
                this.wavParser.WaveFormatEx.BlockAlign);

            // Figure out how much data we have left in the chunk compared to the
            // data that we need.
            bufferSize = Math.Min(bufferSize, this.wavParser.BytesRemainingInChunk);
            if(bufferSize > 0)
            {
                this.wavParser.ProcessDataFromChunk(bufferSize);

                // Send out the next sample
                MediaStreamSample sample = new MediaStreamSample(
                    this.audioDesc,
                    this.stream,
                    this.currentPosition,
                    bufferSize,
                    this.currentTimeStamp,
                    this.emptySampleDict);

                // Move our timestamp and position forward
                this.currentTimeStamp += this.wavParser.WaveFormatEx.AudioDurationFromBufferSize(bufferSize);
                this.currentPosition += bufferSize;

                // Loop forever
                if(Loop && this.wavParser.BytesRemainingInChunk == 0)
                {
                    this.wavParser.MoveToStartOfChunk();
                    this.currentPosition = this.startPosition;
                }

                ReportGetSampleCompleted(sample);
            }
            else if(sendFakeEnding)
            {
                // Required to work around bug in Silverlight with short samples that would otherwise only send
                // ReportGetSampleCompleted once (actually twice due to weird, non-debuggable thread!)
                // (Add Debug.WriteLine or Assert to see this happen)

                sendFakeEnding = false;
                MediaStreamSample sample = new MediaStreamSample(
                        this.audioDesc,
                        this.stream,
                        this.currentPosition,
                        bufferSize,
                        this.currentTimeStamp,
                        this.emptySampleDict);
                ReportGetSampleCompleted(sample);
            }
            else
            {
                // Report EOS
                ReportGetSampleCompleted(new MediaStreamSample(this.audioDesc, null, 0, 0, 0, this.emptySampleDict));
            }
        }
        /// <summary>
        /// Return the next sample requested
        /// </summary>
        /// <param name="mediaStreamType">The stream type that we are getting a sample for</param>
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            // Start with one second of data, rounded up to the nearest block.
            uint bufferSize = (uint)AlignUp(
                this.wavParser.WaveFormatEx.AvgBytesPerSec,
                this.wavParser.WaveFormatEx.BlockAlign);

            // Figure out how much data we have left in the chunk compared to the
            // data that we need.
            bufferSize = Math.Min(bufferSize, (uint)this.wavParser.BytesRemainingInChunk);
            if (bufferSize > 0)
            {
                this.wavParser.ProcessDataFromChunk(bufferSize);

                // Send out the next sample
                MediaStreamSample sample = new MediaStreamSample(
                    this.audioDesc,
                    this.stream,
                    this.currentPosition,
                    bufferSize,
                    this.currentTimeStamp,
                    this.emptySampleDict);

                // Move our timestamp and position forward
                this.currentTimeStamp += this.wavParser.WaveFormatEx.AudioDurationFromBufferSize(bufferSize);
                this.currentPosition += bufferSize;

                /* Uncomment to loop forever
                // If there are no more bytes in the chunk, start again from the beginning
                if (this.wavParser.BytesRemainingInChunk == 0)
                {
                    this.wavParser.MoveToStartOfChunk();
                    this.currentPosition = this.startPosition;
                }
                */

                ReportGetSampleCompleted(sample);
            }
            else
            {
                // Report EOS
                ReportGetSampleCompleted(new MediaStreamSample(this.audioDesc, null, 0, 0, 0, this.emptySampleDict));
            }
        }
        /// <summary>
        /// Do the actual ReportGetSampleCompleted work. Fires an event in case derived classes
        /// want to listen in.
        /// </summary>
        /// <param name="chunk">the chunk with our sample</param>
        /// <param name="mediaStreamSample">the sample we are reporting</param>
        private void DoReportGetSampleCompleted(MediaChunk chunk, MediaStreamSample mediaStreamSample)
        {
            ReportGetSampleCompleted(mediaStreamSample);
            GetSampleCompletedEventArgs args = new GetSampleCompletedEventArgs();
            args.Sample = mediaStreamSample;
            args.ChunkId = (chunk != null) ? chunk.ChunkId : -1;
            args.Bitrate = (chunk != null) ? chunk.Bitrate : 0;
            args.StreamId = (chunk != null) ? chunk.StreamId : -1;

            if (GetSampleCompleted != null && chunk != null)
            {
                GetSampleCompleted(this, args);
            }
        }
        private void ReportEndOfMedia(MediaStreamType mediaStreamType)
        {
            var flags = new Dictionary<MediaSampleAttributeKeys, string>();

            MediaStreamDescription msd = this.videoStreamDescription;
            if (mediaStreamType == MediaStreamType.Audio) msd = this.audioStreamDescription;
            try {
                var samp = new MediaStreamSample(msd, null, 0, 0, 0, flags);
                ReportGetSampleCompleted(samp);
            } catch { }
        }
        /// <summary>
        /// Handle a sample request
        /// </summary>
        /// <param name="elem">the work command we are handling</param>
        private void DoSample(WorkQueueElement elem)
        {
            DateTime enter = DateTime.Now;

            // Give the sample if available, nothing otherwise
            MediaStreamType mediaStreamType = (MediaStreamType)elem.CommandParameter;
            int mediaTypeIndex = (int)mediaStreamType;

            // Get the queue of chunks for this stream
            MediaChunkQueue mediaQueue = m_manifestInfo.GetStreamInfoForStreamType(mediaStreamType).Queue;
            int mediaTypeMask = mediaStreamType == MediaStreamType.Video ? 0x01 : 0x02;

            MediaChunk chunk = null;
            GetFrameData frameData = null;

            bool flag = true;
            while (flag)
            {
                // Get the current chunk from our media queue
                flag = false;
                chunk = mediaQueue.Current;
                if (chunk == null)
                {
                    // This is a redudant code in a case native side calls twice after the end of media
                    // It should not happen but because we played with serialization on/off we keep it in place as a defensive code
                    DoReportGetSampleCompleted(null, new MediaStreamSample(m_manifestInfo.GetStreamInfoForStreamType(mediaStreamType).Description, null, 0, 0, 0, m_nullAttributes));
                    return;
                }

                // If we have not finished parsing the chunk yet, then we can't send the sample
                if (chunk.State == MediaChunk.ChunkState.Pending || chunk.State == MediaChunk.ChunkState.Loaded)
                {
                    // If the chunk is pending but hasn't been downloaded yet, then force a download
                    if (chunk.State == MediaChunk.ChunkState.Pending && chunk.Downloader == null)
                    {
                        if (chunk.Bitrate == 0)
                        {
                            // Silverlight failed to load previous chunk or inform us about failure and as a result we did not even started a new one
                            chunk.Bitrate = m_manifestInfo.GetStreamInfoForStreamType(chunk.MediaType).Bitrates[0];
                        }

                        Tracer.Trace(TraceChannel.Error, "Lost {0} in state {1} trying to load again.", chunk.Sid, chunk.State);
                        m_heuristics.ForceNextDownload(chunk.StreamId, chunk.ChunkId);
                        m_heuristics.ScheduleDownloads();
                    }

                    // Media chunk is not yet available, try again later
                    m_workQueue.Enqueue(elem);
                    ReportGetSampleProgress(chunk.DownloadPercent / 100.0f);

                    if ((m_bufferingStateMask & mediaTypeMask) == 0)
                    {
                        if (m_bufferingStateMask == 0)
                        {
                            FireBufferingStarted();
                        }

                        m_bufferingStateMask |= mediaTypeMask;
                    }

                    // Take a nap to give us some time to download. If it's already downloaded,
                    // then it just hasn't been parsed yet
                    if (chunk.DownloadPercent < 100)
                    {
                        Thread.Sleep(10);
                    }

                    if (chunk.SampleRequestsMissed++ == 0)
                    {
                        Tracer.Trace(
                            TraceChannel.Error,
                            "Chunk {0} is not available on sample request, chunk state {1}, downloader is {2}",
                            chunk.Sid,
                            chunk.State,
                            chunk.Downloader == null ? "null" : "not null");
                    }
                    else if (chunk.SampleRequestsMissed % 100 == 0)
                    {
                        Tracer.Trace(
                            TraceChannel.Error,
                            "Chunk {0} is not available for {3} seconds, chunk state {1}, downloader is {2}",
                            chunk.Sid,
                            chunk.State,
                            chunk.Downloader == null ? "null" : "not null",
                            chunk.SampleRequestsMissed / 100);
                    }
                    else if (chunk.SampleRequestsMissed >= (m_playbackInfo.IsPlaying ? 500 : 1500))
                    {
                        // After 5 seconds delay during play or 15 seconds while paused or stopped, move on to the next chunk.
                        if (chunk.Downloader != null)
                        {
                            chunk.Downloader.CancelDownload();
                        }

                        chunk.SampleRequestsMissed = 0;

                        m_consecutiveMissedChunks[mediaTypeIndex]++;
                        string msg = String.Format(CultureInfo.InvariantCulture, "Failed to load in time media chunk {0} ({1},{2}, #{3} in a row)", chunk.Sid, chunk.Bitrate, chunk.State, m_consecutiveMissedChunks[mediaTypeIndex]);

                        // If we have missed to many, then throw an error
                        if (m_consecutiveMissedChunks[mediaTypeIndex] >= Configuration.Playback.MaxMissingOrCorruptedChunks)
                        {
                            throw new AdaptiveStreamingException(msg);
                        }
                        else
                        {
                            Tracer.Trace(TraceChannel.Error, msg);
                        }

                        mediaQueue.MoveNext();

                        // No need to verify flag, if we hit end of stream we'll know in 10 milliseconds
                    }

                    DateTime exit2 = DateTime.Now;
                    if ((exit2 - enter).TotalSeconds > 20e-3)
                    {
                        Tracer.Trace(TraceChannel.Timing, "DoSample: long time: {0}", (exit2 - enter).TotalSeconds);
                    }

                    return;
                }
                else if (chunk.State != MediaChunk.ChunkState.Parsed)
                {
                    // We are not parsed, so flag us as missed
                    m_consecutiveMissedChunks[mediaTypeIndex]++;
                    string msg = String.Format(
                        CultureInfo.InvariantCulture,
                        "Failed to {0} media chunk {1} ({2}), #{3} in a row.",
                        chunk.State == MediaChunk.ChunkState.Error ? "download" : "parse",
                        chunk.Sid,
                        chunk.Bitrate,
                        m_consecutiveMissedChunks[mediaTypeIndex]);

                    if (m_consecutiveMissedChunks[mediaTypeIndex] >= Configuration.Playback.MaxMissingOrCorruptedChunks)
                    {
                        throw new AdaptiveStreamingException(msg);
                    }
                    else
                    {
                        Tracer.Trace(TraceChannel.Error, msg);
                    }

                    mediaQueue.MoveNext();
                    m_workQueue.Enqueue(elem);
                    return;
                }

                // If we get here, then we should have a frame. Try to get it from our parser
                try
                {
                    if ((frameData = chunk.GetNextFrame()) == null)
                    {
                        // We could not get a frame from our parser, so move to the next chunk
                        flag = mediaQueue.MoveNext();
                        if (!flag)
                        {
                            // Signal end of the stream
                            DoReportGetSampleCompleted(null, new MediaStreamSample(m_manifestInfo.GetStreamInfoForStreamType(mediaStreamType).Description, null, 0, 0, 0, m_nullAttributes));
                            return;
                        }
                    }
                }
                catch (ChunkNotParsedException)
                {
                    // We could not get a frame from our parser, so move to the next chunk
                    flag = mediaQueue.MoveNext();
                    if (!flag)
                    {
                        // Signal end of the stream
                        DoReportGetSampleCompleted(null, new MediaStreamSample(m_manifestInfo.GetStreamInfoForStreamType(mediaStreamType).Description, null, 0, 0, 0, m_nullAttributes));
                        return;
                    }
                }
            }

            if (chunk.SampleRequestsMissed > 0)
            {
                Tracer.Trace(TraceChannel.Error, "Chunk {0} was not available for {1} milliseconds", chunk.Sid, chunk.SampleRequestsMissed * 10);
            }

            // Since we have a chunk here, we can reset are missed requests
            chunk.SampleRequestsMissed = 0;
            m_consecutiveMissedChunks[mediaTypeIndex] = 0;

            // Update our buffering state
            if ((m_bufferingStateMask & mediaTypeMask) != 0)
            {
                m_bufferingStateMask &= ~mediaTypeMask;

                if (m_bufferingStateMask == 0)
                {
                    FireBufferingDone();
                }
            }

            // Notify everyone about the bitrate we are using
            FireOnPlayBitrateChange(chunk.MediaType, chunk.Bitrate, DateTime.Now);

            // Check to see if we have any DRM attributes
            Dictionary<MediaSampleAttributeKeys, string> sampleAttributes = new Dictionary<MediaSampleAttributeKeys, string>();
            if (frameData.DrmData != null)
            {
                sampleAttributes.Add(/*"XCP_MS_SAMPLE_DRM"*/ MediaSampleAttributeKeys.DRMInitializationVector, Convert.ToBase64String(frameData.DrmData));
            }

            // Create the sample that we send to the media element
            MediaStreamSample sample = new MediaStreamSample(
                m_manifestInfo.GetStreamInfoForStreamType(mediaStreamType).Description,
                chunk.DownloadedPiece,
                frameData.StartOffset,
                frameData.FrameSize,
                frameData.Timestamp,
                sampleAttributes);

            // Must call if chunk.GetNextFrame is called, which happens only here above
            mediaQueue.UpdateBufferSizes();

            // Report this sample to our heuristics module
            if (mediaStreamType == MediaStreamType.Video)
            {
                m_playbackInfo.SourceFramesPerSecond = chunk.FrameRate;
            }

            m_heuristics.OnSampleDelivered(mediaStreamType, chunk.ChunkId, chunk.Bitrate, frameData.Timestamp);

            // Respond to the media element
            DoReportGetSampleCompleted(chunk, sample);

            DateTime exit = DateTime.Now;
            if ((exit - enter).TotalSeconds > 20e-3)
            {
                Tracer.Trace(TraceChannel.Timing, "DoSample: long time: {0}", (exit - enter).TotalSeconds);
            }
        }