protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            CameraStreamSourceDataSingleton dataSource = CameraStreamSourceDataSingleton.Instance;

            if (frameStreamOffset + dataSource.FrameBufferSize > dataSource.FrameStreamSize)
            {
                dataSource.FrameStream.Seek(0, SeekOrigin.Begin);
                frameStreamOffset = 0;
            }

            Task tsk = dataSource.CameraEffect.GetNewFrameAndApplyEffect().AsTask();

            // Wait that the asynchroneous call completes, and proceed by reporting
            // the MediaElement that new samples are ready.
            tsk.ContinueWith((task) =>
            {
                dataSource.FrameStream.Position = 0;

                MediaStreamSample msSamp = new MediaStreamSample(
                    videoStreamDescription,
                    dataSource.FrameStream,
                    frameStreamOffset,
                    dataSource.FrameBufferSize,
                    currentTime,
                    emptySampleDict);

                ReportGetSampleCompleted(msSamp);
                currentTime += frameTime;
                frameStreamOffset += dataSource.FrameBufferSize;
            });
        }
예제 #2
0
        /// <summary>
        /// Creates the frame reader using the target format and registers the <see cref="OnFrameArrived"/> event. The width is padded to be divisibly by 64.
        /// </summary>
        /// <returns></returns>
        private async Task <bool> CreateFrameReader()
        {
            const MediaStreamType mediaStreamType = MediaStreamType.VideoRecord;
            CameraParameters      parameters      = new CameraParameters(_cameraProfile);

            try
            {
                MediaFrameSource source = _mediaCapture.FrameSources.Values.Single(frameSource => frameSource.Info.MediaStreamType == mediaStreamType);
                MediaFrameFormat format = GetTargetFormat(source, parameters);
                await source.SetFormatAsync(format);

                _frameReader = await _mediaCapture.CreateFrameReaderAsync(source, format.Subtype);

                _frameReader.FrameArrived += OnFrameArrived;

                FrameWidth  = Convert.ToInt32(format.VideoFormat.Width);
                FrameHeight = Convert.ToInt32(format.VideoFormat.Height);
                FrameWidth  = PadTo64(FrameWidth);

                _logger.Log($"FrameReader initialized using {FrameWidth} x {FrameHeight}, frame rate: {format.FrameRate.Numerator} / {format.FrameRate.Denominator}, color format: {_format}");
            }
            catch (Exception exception)
            {
                _logger.LogError("Frame Reader could not be initialized");
                _logger.LogException(exception);
                return(false);
            }

            return(true);
        }
예제 #3
0
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            for (int i = 0; i < numSamples; i++)
            {
                StereoSample sample;
                if (this.Input != null)
                    sample = this.Input.GetSample();
                else
                    sample = new StereoSample();

                //left channel
                memoryStream.WriteByte(
                    (byte)(sample.LeftSample & 0xFF));
                memoryStream.WriteByte(
                    (byte)(sample.LeftSample >> 8));


                //right channel
                memoryStream.WriteByte(
                        (byte)(sample.RightSample & 0xFF));
                memoryStream.WriteByte(
                        (byte)(sample.RightSample >> 8));

            }

            MediaStreamSample mediaStreamSample =
                new MediaStreamSample(mediaStreamDescription, memoryStream, currentPosition,
                                      bufferByteCount, currentTimeStamp, emptySampleDict);

            currentTimeStamp += bufferByteCount * 10000000L / byteRate;
            currentPosition += bufferByteCount;

            ReportGetSampleCompleted(mediaStreamSample);
        }
예제 #4
0
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            lock (_syncRoot)
            {
                if (_pendingPackets.Count > 0)
                {
                    var frame = _pendingPackets.First.Value;
                    _pendingPackets.RemoveFirst();
                    long ts = frame.getTimeStamp();
                    ts *= 10000;
                    var attrs = frame.isFrameTypeI() ? _iSampleAttrs : _pSampleAttrs;
                    attrs[MediaSampleAttributeKeys.FrameWidth] = frame.getWidth().ToString();
                    attrs[MediaSampleAttributeKeys.FrameHeight] = frame.getHeight().ToString();
                    Debug.WriteLine("GetSample completed");
                    ReportGetSampleCompleted(new MediaStreamSample(_mediaStreamDescription,
                                                                   new ARDroneVideoPacketStream(frame),
                                                                   0,
                                                                   frame.getDataLength(),
                                                                   ts, attrs));
                }
                else
                {
                    ++_waitingForPacket;

                    Debug.WriteLine("GetSample progress");
                    ReportGetSampleProgress(0);
                }
            }
        }
예제 #5
0
        //private int AlignUp(int a, int b)
        //{
        //    int tmp = a + b - 1;
        //    return tmp - (tmp % b);
        //}
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            int numSamples = ChannelCount * 256;
            int bufferByteCount = BitsPerSample / 8 * numSamples;

            // fill the stream with noise
            for (int i = 0; i < numSamples; i++)
            {
                short sample = (short)_random.Next(
                    short.MinValue, short.MaxValue);

                _stream.Write(BitConverter.GetBytes(sample),
                              0,
                              sizeof(short));
            }

            // Send out the next sample
            MediaStreamSample msSamp = new MediaStreamSample(
                _audioDesc,
                _stream,
                _currentPosition,
                bufferByteCount,
                _currentTimeStamp,
                _emptySampleDict);

            // Move our timestamp and position forward
            _currentTimeStamp += _waveFormat.AudioDurationFromBufferSize(
                                    (uint)bufferByteCount);
            _currentPosition += bufferByteCount;

            ReportGetSampleCompleted(msSamp);
        }
예제 #6
0
        private async Task SetLargestResolution(MediaCapture device, MediaStreamType type)
        {
            if (device == null)
            {
                return;
            }

            var resolutions = device.VideoDeviceController.GetAvailableMediaStreamProperties(type).ToList();

            if (resolutions.Count == 0)
            {
                return;
            }
            var largest = resolutions[0] as VideoEncodingProperties;

            foreach (VideoEncodingProperties r in resolutions)
            {
                if (largest.Width < r.Width)
                {
                    largest = r;
                }
            }

            await device.VideoDeviceController.SetMediaStreamPropertiesAsync(type, largest);
        }
        //#############################################################################################
        //###################################      private     ########################################


        //#############################################################################################
        /// <summary>
        /// Find a source coresponding to parameters of <see cref="Init(MediaStreamType, MediaFrameSourceKind)"/>
        /// </summary>
        /// <param name="streamType"> MediaStreamType object property </param>
        /// <param name="sourceKind"> MediaFrameSourceKind object property </param>
        private async Task FindSource(MediaStreamType streamType, MediaFrameSourceKind sourceKind)
        {
            var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); // list available sources

            // indicate that source is not set
            _selectedGroup      = null;
            _selectedSourceInfo = null;

            foreach (var sourceGroup in frameSourceGroups)
            {
                foreach (var sourceInfo in sourceGroup.SourceInfos)
                {
                    // if a source is matching with arguments
                    if (sourceInfo.MediaStreamType == streamType && sourceInfo.SourceKind == sourceKind)
                    {
                        _selectedSourceInfo = sourceInfo;
                        break;
                    }
                }
                if (_selectedSourceInfo != null)
                {
                    _selectedGroup = sourceGroup;
                    break;
                }
            }

            // in case no source was found
            if (_selectedSourceInfo == null)
            {
                System.Diagnostics.Debug.WriteLine("Source not find");
            }
        }
예제 #8
0
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            MediaStreamSample audioSample = null;

            if (!sample_enumerator.MoveNext ())
            {
                // If you are near the end of the file, return a null stream, which
                // tells the MediaStreamSource and MediaElement to close down.
                audioSample = new MediaStreamSample(
                    this.audioStreamDescription,
                    null,
                    0,
                    0,
                    0,
                    emptyDict);
                this.ReportGetSampleCompleted(audioSample);
            }
            else
            {
                // FIXME: Stream should not be created every time.
                SampleBuffer buf = (SampleBuffer) sample_enumerator.Current;
                audioSample = new MediaStreamSample(
                    this.audioStreamDescription,
                    new MemoryStream (buf.Data, buf.Index, buf.Count, false),
                    buf.Index,
                    buf.Count,
                    timePosition,
                    emptyDict);
                timePosition += buf.Count * 10000000 / (44100 * 2 * 2);
                this.ReportGetSampleCompleted(audioSample);
            }
        }
예제 #9
0
파일: Mp3Demuxer.cs 프로젝트: dfr0/moon
		protected override void GetSampleAsync (MediaStreamType mediaStreamType)
		{
			Mp3Frame frame;
			MediaStreamSample sample;
			Dictionary<MediaSampleAttributeKeys, string> attribs = new Dictionary<MediaSampleAttributeKeys, string> ();

			//string format = "HH:mm:ss.ffff";
			//if (opened == DateTime.MinValue)
			//    opened = DateTime.Now;
			//Debug.WriteLine ("{0} GetSampleAsync stamp: {1}", (DateTime.Now - opened).ToString (), TimeSpan.FromMilliseconds (current_pts / 10000).ToString ());

			try {
				if (this.frame != null) {
					frame = this.frame;
					this.frame = null;
				} else {
					frame = Mp3Frame.Read (stream);
				}

				sample = new MediaStreamSample (description, new MemoryStream (frame.data), 0, frame.data.Length, current_pts, attribs);

				current_pts += frame.Duration;

				ReportGetSampleCompleted (sample);
			} catch (System.IO.EndOfStreamException ex) {
				Console.WriteLine (ex);
				sample = new MediaStreamSample (description, null, 0, 0, 0, attribs);
				ReportGetSampleCompleted (sample);
			} catch (Exception ex) {
				Console.WriteLine (ex);
				ReportGetSampleCompleted (null);
			}
		}
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            for (int i = 0; i < numSamples; i++)
            {
                if (this.index <this.StartPoint |
                                this.index> this.EndPoint)
                {
                    this.index = this.StartPoint;
                }

                memoryStream.WriteByte(this.sourceData[index]);
                memoryStream.WriteByte(this.sourceData[index + 1]);
                memoryStream.WriteByte(this.sourceData[index + 2]);
                memoryStream.WriteByte(this.sourceData[index + 3]);

                index += 4;
            }

            MediaStreamSample mediaStreamSample =
                new MediaStreamSample(
                    mediaStreamDescription,
                    memoryStream,
                    currentPosition,
                    bufferByteCount,
                    currentTimeStamp,
                    emptySampleDict);

            currentTimeStamp += bufferByteCount * 10000000L / byteRate;
            currentPosition  += bufferByteCount;

            ReportGetSampleCompleted(mediaStreamSample);
        }
예제 #11
0
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            try
            {
                MemoryStream rawSampleStream;
                byte[]       rawSample;
                if ((rawSample = frameSource.GetNextFrame()) != null)
                {
                    rawSampleStream      = new MemoryStream(rawSample);
                    LastPulseSubmittedAt = DateTime.Now;
                }
                else
                {
                    rawSampleStream = new MemoryStream(emptyFrame);
                }

                MediaStreamSample sample = new MediaStreamSample(
                    mediaStreamDescription,
                    rawSampleStream,
                    0,
                    rawSampleStream.Length,
                    (DateTime.Now - startTime).Ticks,
                    emptySampleDict);
                ReportGetSampleCompleted(sample);
            }
            catch (Exception ex)
            {
                ClientLogger.Debug(ex.ToString());
            }
        }
 protected override void GetSampleAsync(MediaStreamType mediaStreamType)
 {
     try
     {
         if (mediaStreamType == MediaStreamType.Video)
         {
             videoController.GetNextVideoFrame(ssrcId, frameStream =>
             {
                 if (frameStream != null)
                 {
                     // Send out the next sample
                     frameStream.Position = 0;
                     var msSamp           = new MediaStreamSample(
                         videoDesc,
                         frameStream,
                         0,
                         frameStream.Length,
                         (DateTime.Now - startTime).Ticks,
                         emptySampleDict);
                     ReportGetSampleCompleted(msSamp);
                 }
             });
         }
     }
     catch (Exception ex)
     {
         ClientLogger.Debug(ex.ToString());
     }
 }
예제 #13
0
 // private DateTime _firstSampleRequestedAt = DateTime.MinValue;
 // private int _samplesRequested;
 protected override void GetSampleAsync(MediaStreamType mediaStreamType)
 {
     try
     {
         if (mediaStreamType != MediaStreamType.Audio)
         {
             return;
         }
         _logger.LogSampleRequested();
         if (AudioController == null)
         {
             ReportSample(new MemoryStream(0));
         }
         else
         {
             //if (_firstSampleRequestedAt == DateTime.MinValue)
             //{
             //    _firstSampleRequestedAt = DateTime.Now;
             //}
             //_samplesRequested++;
             AudioController.GetNextAudioFrame(ReportSample);
         }
     }
     catch (Exception ex)
     {
         ClientLogger.Debug(ex.ToString);
     }
 }
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            int numSamples      = 512;
            int bufferByteCount = channelCount * BitsPerSample / 8 * numSamples;

            for (int i = 0; i < numSamples; i++)
            {
                StereoSample stereoSample = this.SampleMaker.GetSample();
                memoryStream.WriteByte((byte)(stereoSample.LeftSample & 0xFF));
                memoryStream.WriteByte((byte)(stereoSample.LeftSample >> 8));
                memoryStream.WriteByte((byte)(stereoSample.RightSample & 0xFF));
                memoryStream.WriteByte((byte)(stereoSample.RightSample >> 8));
            }

            // Send out the next sample
            MediaStreamSample mediaStreamSample =
                new MediaStreamSample(mediaStreamDescription, memoryStream, currentPosition,
                                      bufferByteCount, currentTimeStamp, emptySampleDict);

            // Move timestamp and position forward
            currentTimeStamp += bufferByteCount * 10000000L / byteRate;
            currentPosition  += bufferByteCount;

            ReportGetSampleCompleted(mediaStreamSample);
        }
예제 #15
0
        private void PopulateVideoDeviceProperties(MediaStreamType streamType, ComboBox comboBox, bool showFrameRate = true)
        {
            // query all properties of the specified video stream type
            IEnumerable <StreamPropertiesHelper> allStreamProperties =
                mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(streamType)
                .Select(x => new StreamPropertiesHelper(x));

            // order them by resolution then frame rate
            allStreamProperties = allStreamProperties
                                  .OrderByDescending(x => x.Height * x.Width)
                                  .ThenByDescending(x => x.FrameRate);

            // populate the combo box with the entries
            foreach (var property in allStreamProperties)
            {
                var comboBoxItem = new ComboBoxItem();
                comboBoxItem.Content = property.GetFriendlyName(showFrameRate);
                comboBoxItem.Tag     = property;
                comboBox.Items.Add(comboBoxItem);
            }

            var settings = AppSettingsContainer.GetCachedSettings();

            comboBox.SelectedItem = WebcamComboBox.Items.Where(x => (x as ComboBoxItem).Content.ToString() == settings.WebcamQuality).FirstOrDefault();
        }
예제 #16
0
 /// <summary>
 /// メディアストリームタイプ一覧を得る
 /// </summary>
 /// <param name="deviceIndex">デバイスインデックス</param>
 /// <returns>一覧</returns>
 public async Task<string[]> GetMediaStreamTypes(int deviceIndex)
 {
     if (!(deviceIndex < _Devices.Count))
     {
         // 無効なindexを排除する
         return null;
     }
     selectedId = _Devices[deviceIndex].Id;
     _Settings.VideoDeviceId = selectedId;
     await _MediaCapture.InitializeAsync(_Settings);
     var types = new MediaStreamType[] { MediaStreamType.Photo, MediaStreamType.VideoPreview, MediaStreamType.VideoRecord };
     _MediaStreamTypes = new List<MediaStreamType>();
     foreach (var type in types)
     {
         var properties = _MediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(type);
         // VideoEncodingProperty/ImageEncodngPropertyの区別が必要
         if (properties.Count > 0)
         {
             _MediaStreamTypes.Add(type);
         }
     }
     var result = new List<string>();
     foreach (var type in _MediaStreamTypes)
     {
         result.Add(type.ToString());
     }
     return result.ToArray();
 }
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            for (int i = 0; i < numSamples; i++)
            {

                if (this.index < this.StartPoint |
                    this.index > this.EndPoint)
                    this.index = this.StartPoint;

                memoryStream.WriteByte(this.sourceData[index]);
                memoryStream.WriteByte(this.sourceData[index + 1]);
                memoryStream.WriteByte(this.sourceData[index + 2]);
                memoryStream.WriteByte(this.sourceData[index + 3]);

                index += 4;
            }

            MediaStreamSample mediaStreamSample =
                new MediaStreamSample(
                    mediaStreamDescription,
                    memoryStream,
                    currentPosition,
                    bufferByteCount,
                    currentTimeStamp,
                    emptySampleDict);

            currentTimeStamp += bufferByteCount * 10000000L / byteRate;
            currentPosition += bufferByteCount;

            ReportGetSampleCompleted(mediaStreamSample);
        }
예제 #18
0
 /// <summary>
 /// Initializes a new instance of the MediaChunk class
 /// </summary>
 /// <param name="chunkId">the id of this chunk</param>
 /// <param name="mediaType">the media type of this chunk</param>
 /// <param name="streamId">the id of the stream this chunk is contained in</param>
 public MediaChunk(int chunkId, MediaStreamType mediaType, int streamId)
 {
     ChunkId = chunkId;
     MediaType = mediaType;
     StreamId = streamId;
     DownloadedPiece = null;
 }
예제 #19
0
        protected override void GetSampleAsync(
            MediaStreamType mediaStreamType)
        {
            while (mediaStreamSamples_[mediaStreamType].Count == 0)
            {
                Sample sample;
                Error ec = demuxer_.get_sample(out sample);
                if (ec == Error.success)
                {
                    Dictionary<MediaSampleAttributeKeys, string> mediaSampleAttributes =
                        new Dictionary<MediaSampleAttributeKeys, string>();
                    if (sample.is_sync)
                    {
                        mediaSampleAttributes[MediaSampleAttributeKeys.KeyFrameFlag] = bool.TrueString;
                    }
                    MediaStreamType type = mediaStreamTypes_[(int)sample.index];
                    MediaStreamSample sample2 = new MediaStreamSample(
                        mediaStreamDescriptions_[type],
                        new System.IO.MemoryStream(sample.data),
                        0,
                        sample.data.Length,
                        (long)sample.time,
                        mediaSampleAttributes);
                    mediaStreamSamples_[type].Add(sample2);
                }
                else
                {
                    if (ec == Error.would_block)
                    {
                        if (pending_)
                        {
                            System.Threading.Thread.Sleep(100);
                            continue;
                        }
                        else
                        {
                            pending_ = true;
                            System.Threading.ThreadPool.QueueUserWorkItem(
                                GetSamplePending, mediaStreamType);
                        }
                    }
                    else if (ec == Error.stream_end)
                    {
                        ReportGetSampleCompleted(null);
                    }
                    else
                    {
                        ErrorOccurred(ec.ToString());
                    }
                    return;
                }
            }

            pending_ = false;

            MediaStreamSample sample3 = mediaStreamSamples_[mediaStreamType][0];
            mediaStreamSamples_[mediaStreamType].RemoveAt(0);
            ReportGetSampleCompleted(sample3);
        }
예제 #20
0
        protected override void GetSampleAsync(
            MediaStreamType mediaStreamType)
        {
            while (mediaStreamSamples_[mediaStreamType].Count == 0)
            {
                Sample sample;
                Error ec = demuxer_.get_sample(out sample);
                if (ec == Error.success)
                {
                    Dictionary<MediaSampleAttributeKeys, string> mediaSampleAttributes =
                        new Dictionary<MediaSampleAttributeKeys, string>();
                    if (sample.is_sync)
                    {
                        mediaSampleAttributes[MediaSampleAttributeKeys.KeyFrameFlag] = bool.TrueString;
                    }
                    MediaStreamType type = mediaStreamTypes_[(int)sample.index];
                    MediaStreamSample sample2 = new MediaStreamSample(
                        mediaStreamDescriptions_[type],
                        new System.IO.MemoryStream(sample.data),
                        0,
                        sample.data.Length,
                        (long)sample.time,
                        mediaSampleAttributes);
                    mediaStreamSamples_[type].Add(sample2);
                }
                else
                {
                    if (ec == Error.would_block)
                    {
                        if (pending_)
                        {
                            System.Threading.Thread.Sleep(100);
                            continue;
                        }
                        else
                        {
                            pending_ = true;
                            System.Threading.ThreadPool.QueueUserWorkItem(
                                GetSamplePending, mediaStreamType);
                        }
                    }
                    else if (ec == Error.stream_end)
                    {
                        ReportGetSampleCompleted(null);
                    }
                    else
                    {
                        ErrorOccurred(ec.ToString());
                    }
                    return;
                }
            }

            pending_ = false;

            MediaStreamSample sample3 = mediaStreamSamples_[mediaStreamType][0];
            mediaStreamSamples_[mediaStreamType].RemoveAt(0);
            ReportGetSampleCompleted(sample3);
        }
예제 #21
0
        public MSF GetCurrentBuffer(int channel, MediaStreamType mediatype, out int f)
        {
            MSF msf;

            f = 0;

            if (mediatype == MediaStreamType.Audio)
            {
                idx = aidx;
            }
            else if (mediatype == MediaStreamType.Video)
            {
                idx = vidx;
            }

            switch (idx)
            {
            case 0:
                msf = _muxs1[channel].msf;
                f   = 1;
                break;

            case 1:
                msf = _muxs2[channel].msf;
                break;

            case 2:
                msf = _muxs3[channel].msf;
                break;

            case 3:
                msf = _muxs4[channel].msf;
                break;

            case 4:
                msf = _muxs5[channel].msf;
                break;

            case 5:
                msf = _muxs6[channel].msf;
                break;

            default:
                msf = null;
                break;
            }
            idx++;

            if (mediatype == MediaStreamType.Audio)
            {
                aidx = idx;
            }
            else if (mediatype == MediaStreamType.Video)
            {
                vidx = idx;
            }

            return(msf);
        }
예제 #22
0
 public List <MediaStream> GetMediaStreams(MediaStreamType type)
 {
     return(MediaSourceManager.GetMediaStreams(new MediaStreamQuery
     {
         ItemId = Id,
         Type = type
     }));
 }
예제 #23
0
 /// <summary>
 /// Initializes a new instance of the MediaChunkQueue class
 /// </summary>
 /// <param name="count">the number of chunks in the queue</param>
 /// <param name="mediaType">the media type of the queue</param>
 /// <param name="streamId">the id of the stream this queue belongs to</param>
 public MediaChunkQueue(int count, MediaStreamType mediaType, int streamId)
 {
     m_dataQueue = new MediaChunk[count];
     for (int i = 0; i < count; i++)
     {
         m_dataQueue[i] = new MediaChunk(i, mediaType, streamId);
     }
 }
예제 #24
0
        /// <summary>
        /// Return the next sample requested
        /// </summary>
        /// <param name="mediaStreamType">The stream type that we are getting a sample for</param>
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            // Start with one second of data, rounded up to the nearest block.
            uint bufferSize = (uint)AlignUp(
                this.wavParser.WaveFormatEx.AvgBytesPerSec,
                this.wavParser.WaveFormatEx.BlockAlign);

            // Figure out how much data we have left in the chunk compared to the
            // data that we need.
            bufferSize = Math.Min(bufferSize, this.wavParser.BytesRemainingInChunk);
            if (bufferSize > 0)
            {
                this.wavParser.ProcessDataFromChunk(bufferSize);

                // Send out the next sample
                MediaStreamSample sample = new MediaStreamSample(
                    this.audioDesc,
                    this.stream,
                    this.currentPosition,
                    bufferSize,
                    this.currentTimeStamp,
                    this.emptySampleDict);

                // Move our timestamp and position forward
                this.currentTimeStamp += this.wavParser.WaveFormatEx.AudioDurationFromBufferSize(bufferSize);
                this.currentPosition  += bufferSize;

                // Loop forever
                if (Loop && this.wavParser.BytesRemainingInChunk == 0)
                {
                    this.wavParser.MoveToStartOfChunk();
                    this.currentPosition = this.startPosition;
                }

                ReportGetSampleCompleted(sample);
            }
            else if (sendFakeEnding)
            {
                // Required to work around bug in Silverlight with short samples that would otherwise only send
                // ReportGetSampleCompleted once (actually twice due to weird, non-debuggable thread!)
                // (Add Debug.WriteLine or Assert to see this happen)

                sendFakeEnding = false;
                MediaStreamSample sample = new MediaStreamSample(
                    this.audioDesc,
                    this.stream,
                    this.currentPosition,
                    bufferSize,
                    this.currentTimeStamp,
                    this.emptySampleDict);
                ReportGetSampleCompleted(sample);
            }
            else
            {
                // Report EOS
                ReportGetSampleCompleted(new MediaStreamSample(this.audioDesc, null, 0, 0, 0, this.emptySampleDict));
            }
        }
예제 #25
0
        /// <summary>
        /// Parses the next sample from the requested stream and then calls ReportGetSampleCompleted
        /// to inform its parent MediaElement of the next sample.
        /// </summary>
        /// <param name="mediaStreamType">
        /// Should always be Audio for this MediaStreamSource.
        /// </param>
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            Dictionary <MediaSampleAttributeKeys, string> emptyDict = new Dictionary <MediaSampleAttributeKeys, string>();
            MediaStreamSample audioSample = null;

            if (this.currentFrame != null)
            {
                // Calculate our current position based on the stream's length
                //// double ratio = (double)this.currentFrameStartPosition / (double)this.audioStreamLength;
                //// TimeSpan currentPosition = new TimeSpan((long)(this.trackDuration.Ticks * ratio));

                // Calculate our current position instead based on the bitrate of the stream (more accurate?)
                double   position        = (double)this.currentFrameStartPosition / (double)this.currentFrame.Bitrate;
                TimeSpan currentPosition = TimeSpan.FromSeconds(position * 8 /* bits per Byte */);

                // Create a MemoryStream to hold the bytes
                // FrameSize includes the frame header which we've already read from the previous iteration, so just copy the
                // header, and then read the remaining bytes
                this.currentFrame.CopyHeader(buffer);
                int audioSampleSize = this.currentFrame.FrameSize - MpegFrame.FrameHeaderSize;
                int c = this.audioStream.Read(buffer, MpegFrame.FrameHeaderSize, audioSampleSize);
                if (c != audioSampleSize)
                {
                    // Ran out of bytes trying to read MP3 frame.
                    this.currentFrame = null;
                    audioSample       = new MediaStreamSample(this.audioStreamDescription, null, 0, 0, 0, emptyDict);
                    this.ReportGetSampleCompleted(audioSample);
                    return;
                }

                this.currentFrameStartPosition += c;
                using (MemoryStream audioFrameStream = new MemoryStream(buffer))
                {
                    // Return the next sample in the stream
                    audioSample = new MediaStreamSample(this.audioStreamDescription, audioFrameStream, 0, this.currentFrame.FrameSize, currentPosition.Ticks, emptyDict);
                    this.ReportGetSampleCompleted(audioSample);

                    // Grab the next frame
                    MpegFrame nextFrame = new MpegFrame(this.audioStream);
                    if ((nextFrame.Version == 1 || nextFrame.Version == 2) && nextFrame.Layer == 3)
                    {
                        this.currentFrameStartPosition += MpegFrame.FrameHeaderSize;
                        this.currentFrame = nextFrame;
                    }
                    else
                    {
                        this.currentFrame = null;
                    }
                }
            }
            else
            {
                // We're near the end of the file, or we got an irrecoverable error.
                // Return a null stream which tells the MediaStreamSource & MediaElement to shut down
                audioSample = new MediaStreamSample(this.audioStreamDescription, null, 0, 0, 0, emptyDict);
                this.ReportGetSampleCompleted(audioSample);
            }
        }
예제 #26
0
 /// <summary>
 /// Initializes a new instance of the <see cref="MixedRealityCaptureVideoEffect"/> class.
 /// </summary>
 /// <param name="streamType">The capture stream to which this effect is to be applied.</param>
 /// <param name="globalOpacityCoefficient">The opacity of the holograms in range from 0.0 (fully transparent) to 1.0 (fully opaque).</param>
 /// <param name="preferredHologramPerspective">
 /// Value used to indicate which holographic camera view configuration should be captured:
 /// 0 (Display) means that the app won't be asked to render from the photo/video camera,
 /// 1 (PhotoVideoCamera) will ask the app to render from the photo/video camera (if the app supports it).
 /// Only supported on HoloLens 2.
 /// </param>
 public MixedRealityCaptureVideoEffect(
     MediaStreamType streamType     = MediaStreamType.VideoRecord,
     float globalOpacityCoefficient = 0.9f,
     MixedRealityCapturePerspective preferredHologramPerspective = MixedRealityCapturePerspective.PhotoVideoCamera)
 {
     this.properties = new ()
     {
         { "StreamType", streamType },
         { "HologramCompositionEnabled", true },
예제 #27
0
	protected override void GetSampleAsync(MediaStreamType mediaStreamType)
	{
		int blocksPlayed = this.Asap.GetBlocksPlayed();
		int bufferLen = this.Asap.Generate(buffer, buffer.Length, BitsPerSample == 8 ? ASAPSampleFormat.U8 : ASAPSampleFormat.S16LE);
		Stream s = bufferLen == 0 ? null : new MemoryStream(buffer);
		MediaStreamSample mss = new MediaStreamSample(this.MediaStreamDescription, s, 0, bufferLen,
			blocksPlayed * 10000000L / ASAP.SampleRate, SampleAttributes);
		ReportGetSampleCompleted(mss);
	}
예제 #28
0
        /// <summary>
        /// Initializes a new instance of the StreamInfo class
        /// </summary>
        /// <param name="baseUrl">the base url for chunks in this stream</param>
        /// <param name="language">the language of this stream</param>
        /// <param name="numberOfChunks">the number of chunks in this stream</param>
        /// <param name="mediaType">the MediaStreamType of this stream</param>
        /// <param name="streamId">the id of this stream</param>
        public StreamInfo(string baseUrl, string language, int numberOfChunks, MediaStreamType mediaType, int streamId)
        {
            m_baseUrl = baseUrl;
            m_mediaType = mediaType;
            m_language = language;
            m_numberOfChunksInStream = numberOfChunks;

            // Initialize our queue of chunks
            m_chunksQueue = new MediaChunkQueue(numberOfChunks, MediaType, streamId);
        }
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            _updater(_buffer, _bufferSize);

            var sample = new MediaStreamSample(_audioDescription, _bufferStream, 0, _bufferSize, _timestamp, _emptySampleDict);

            _timestamp += _bufferSize * 10000000L / _waveFormat.AverageBytesPerSec; // 100 ns

            ReportGetSampleCompleted(sample);
        }
예제 #30
0
    protected override void GetSampleAsync(MediaStreamType mediaStreamType)
    {
        int               blocksPlayed = this.Asap.GetBlocksPlayed();
        int               bufferLen    = this.Asap.Generate(buffer, buffer.Length, BitsPerSample == 8 ? ASAPSampleFormat.U8 : ASAPSampleFormat.S16LE);
        Stream            s            = bufferLen == 0 ? null : new MemoryStream(buffer);
        MediaStreamSample mss          = new MediaStreamSample(this.MediaStreamDescription, s, 0, bufferLen,
                                                               blocksPlayed * 10000000L / ASAP.SampleRate, SampleAttributes);

        ReportGetSampleCompleted(mss);
    }
예제 #31
0
        private int?GetMediaStreamCount(MediaStreamType type, int limit)
        {
            var count = MediaSource.GetStreamCount(type);

            if (count.HasValue)
            {
                count = Math.Min(count.Value, limit);
            }

            return(count);
        }
예제 #32
0
        /// <summary> Gets a stream. </summary>
        /// <param name="item"> The item. </param>
        /// <param name="streamType"> Type of the stream. </param>
        /// <returns> The stream. </returns>
        protected MediaStream GetStream(BaseItem item, MediaStreamType streamType)
        {
            var itemInfo = GetMediaSourceInfo(item);

            if (itemInfo != null)
            {
                return(itemInfo.MediaStreams.FirstOrDefault(n => n.Type == streamType));
            }

            return(null);
        }
예제 #33
0
 static void GetSampleAsyncInternal(IntPtr instance, MediaStreamType mediaStreamType)
 {
     try {
         FromIntPtr(instance).GetSampleAsyncInternal(mediaStreamType);
     } catch (Exception ex) {
         try {
             Console.WriteLine("Unhandled exception in MediaStreamSource.GetSampleAsyncInternal: {0}", ex);
         } catch {
         }
     }
 }
예제 #34
0
 protected override void GetSampleAsync(MediaStreamType mediaStreamType)
 {
     if (this._isClosed)
     {
         return;
     }
     lock (this._streamLock)
     {
         Dictionary <MediaSampleAttributeKeys, string> dictionary = new Dictionary <MediaSampleAttributeKeys, string>();
         if (this.audioStream.Position < this.audioStream.Length && (!this.HaveEnoughDataInBuffer() || !this.SeekToTimeIfNeeded()))
         {
             this.ReportGetSampleProgress(0.5);
             ThreadPool.QueueUserWorkItem((WaitCallback)(obj =>
             {
                 Thread.Sleep(1000);
                 this.GetSampleAsync(mediaStreamType);
             }));
         }
         else if (this.currentFrame != null)
         {
             TimeSpan timeSpan = TimeSpan.FromSeconds((double)this.currentFrameStartPosition / (double)this.currentFrame.Bitrate * 8.0);
             this.currentFrame.CopyHeader(Mp3MediaStreamSourceX.buffer);
             int count = this.currentFrame.FrameSize - 4;
             int num   = this.audioStream.Read(Mp3MediaStreamSourceX.buffer, 4, count);
             if (num != count)
             {
                 this.currentFrame = null;
                 this.ReportGetSampleCompleted(new MediaStreamSample(this.audioStreamDescription, null, 0, 0, 0, (IDictionary <MediaSampleAttributeKeys, string>)dictionary));
             }
             else
             {
                 this.currentFrameStartPosition = this.currentFrameStartPosition + (long)num;
                 using (MemoryStream memoryStream = new MemoryStream(Mp3MediaStreamSourceX.buffer))
                 {
                     this.ReportGetSampleCompleted(new MediaStreamSample(this.audioStreamDescription, (Stream)memoryStream, 0, (long)this.currentFrame.FrameSize, timeSpan.Ticks, (IDictionary <MediaSampleAttributeKeys, string>)dictionary));
                     MpegFrame mpegFrame = new MpegFrame(this.audioStream);
                     if ((mpegFrame.Version == 1 || mpegFrame.Version == 2) && mpegFrame.Layer == 3)
                     {
                         this.currentFrameStartPosition = this.currentFrameStartPosition + 4L;
                         this.currentFrame = mpegFrame;
                     }
                     else
                     {
                         this.currentFrame = null;
                     }
                 }
             }
         }
         else
         {
             this.ReportGetSampleCompleted(new MediaStreamSample(this.audioStreamDescription, null, 0, 0, 0, (IDictionary <MediaSampleAttributeKeys, string>)dictionary));
         }
     }
 }
예제 #35
0
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            //start the worker thread
            if (m_WorkerThread == null)
            {
                m_WorkerThread = new Thread(WorkerThreadRun);
                m_WorkerThread.Start();
            }

            m_commands.Enqueue(new WorkQueueElement(WorkQueueElement.Command.Sample, mediaStreamType));
        }
예제 #36
0
 protected override void GetSampleAsync(MediaStreamType mediaStreamType)
 {
     if (mediaStreamType == MediaStreamType.Audio)
     {
         GetAudioSample(true);
     }
     else if (mediaStreamType == MediaStreamType.Video)
     {
         //GetVideoSample();
     }
 }
예제 #37
0
 protected override void GetSampleAsync(MediaStreamType mediaStreamType)
 {
     if (mediaStreamType == MediaStreamType.Video)
     {
         lock (lockObj)
         {
             this.outstandingSamplesCount++;
             FeedSamples();
         }
     }
 }
 protected override void GetSampleAsync(MediaStreamType mediaStreamType)
 {
     switch (mediaStreamType)
     {
         case MediaStreamType.Video:
             GetVideoSample();
             break;
         case MediaStreamType.Audio:
             Debug.WriteLine("audio?!");
             break;
     }
 }
 protected override void GetSampleAsync(MediaStreamType mediaStreamType)
 {
     if (mediaStreamType != MediaStreamType.Audio)
         throw new InvalidOperationException ("Only audio stream type is supported");
     q.BeginGetNextSample ((result) => {
         var sample = q.EndGetNextSample (result);
         ArraySegment<byte> buf = sample.Buffer;
         position += ToTick (sample.Duration);
         var s = new MediaStreamSample (media_desc, new MemoryStream (buf.Array), buf.Offset, buf.Count, position, empty_atts);
         this.ReportGetSampleCompleted (s);
         }, null);
 }
예제 #40
0
        public MediaStream GetMediaStream(MediaStreamType type, int index)
        {
            foreach (MediaStream i in MediaStreams)
            {
                if (i.Type == type && i.Index == index)
                {
                    return(i);
                }
            }

            return(null);
        }
예제 #41
0
파일: Format.cs 프로젝트: As-You-Like/Media
        public MediaStream GetStream(MediaStreamType type)
        {
            foreach (var stream in Streams)
            {
                if (stream.Type == type)
                {
                    return(stream);
                }
            }

            return(null);
        }
예제 #42
0
    /// <summary>
    /// Return the next sample requested
    /// </summary>
    /// <param name="mediaStreamType">The stream type that we are getting a sample for</param>
    protected override void GetSampleAsync(MediaStreamType mediaStreamType) {

      long bufferSize = Math.Min(sampleSize, startPosition + pcmDataLen - currentPosition);
      // Send out the next sample
      if (bufferSize > 0) {
        MediaStreamSample sample = new MediaStreamSample(audioDesc, stream, currentPosition, bufferSize, currentTimeStamp, emptySampleDict);
        currentTimeStamp += header.AudioDurationFromDataLen(bufferSize);
        currentPosition += bufferSize;
        ReportGetSampleCompleted(sample);
      } else
        // Report EOS
        ReportGetSampleCompleted(new MediaStreamSample(audioDesc, null, 0, 0, 0, this.emptySampleDict));
    }
예제 #43
0
        /// <summary>
        /// Sets encoding properties on a camera stream. Ensures CaptureElement and preview stream are stopped before setting properties.
        /// </summary>
        public async Task SetMediaStreamPropertiesAsync(MediaStreamType streamType, IMediaEncodingProperties encodingProperties)
        {
            // Stop preview and unlink the CaptureElement from the MediaCapture object
            await MediaCapture.StopPreviewAsync();
            _previewControl.Source = null;

            // Apply desired stream properties
            await MediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, encodingProperties);

            // Recreate the CaptureElement pipeline and restart the preview
            _previewControl.Source = MediaCapture;
            await MediaCapture.StartPreviewAsync();
        }
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            switch (mediaStreamType)
            {
            case MediaStreamType.Video:
                GetVideoSample();
                break;

            case MediaStreamType.Audio:
                Debug.WriteLine("audio?!");
                break;
            }
        }
예제 #45
0
        public List <MediaStream> GetSelectableStreams(MediaStreamType type)
        {
            List <MediaStream> list = new List <MediaStream>();

            foreach (MediaStream stream in MediaSource.MediaStreams)
            {
                if (type == stream.Type)
                {
                    list.Add(stream);
                }
            }

            return(list);
        }
예제 #46
0
 protected override void GetSampleAsync(MediaStreamType mediaStreamType)
 {
     try
     {
         if (mediaStreamType == MediaStreamType.Video)
         {
             _sampleRequested = true;
         }
     }
     catch (Exception ex)
     {
         ClientLogger.ErrorException(ex, "Get sample failed");
     }
 }
예제 #47
0
        private static IEnumerable<MediaStream> GetSortedStreams(IEnumerable<MediaStream> streams, MediaStreamType type, List<string> languagePreferences)
        {
            // Give some preferance to external text subs for better performance
            return streams.Where(i => i.Type == type)
                .OrderBy(i =>
            {
                var index = languagePreferences.FindIndex(l => string.Equals(i.Language, l, StringComparison.OrdinalIgnoreCase));

                return index == -1 ? 100 : index;
            })
                 .ThenBy(i => GetBooleanOrderBy(i.IsDefault))
                 .ThenBy(i => GetBooleanOrderBy(i.SupportsExternalStream))
                 .ThenBy(i => GetBooleanOrderBy(i.IsTextSubtitleStream))
                 .ThenBy(i => GetBooleanOrderBy(i.IsExternal))
                 .ThenBy(i => i.Index);
        }
 protected override void GetSampleAsync(MediaStreamType mediaStreamType)
 {
     if (mediaStreamType != MediaStreamType.Audio)
         throw new InvalidOperationException ("Only audio stream type is supported");
     if (samples == null) {
         samples = new Queue<IMediaSample> ();
         gen.BufferArrived += sample => {
             if (stuck_count-- > 0)
                 this.ReportGetSampleCompleted (sample);
             else
                 samples.Enqueue (sample);
         };
     }
     if (samples.Count > 0)
         ReportGetSampleCompleted (samples.Dequeue ());
     else
         stuck_count++;
 }
예제 #49
0
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            MemoryStream frame = new MemoryStream ();
            org.diracvideo.Jirac.Picture p = dec.Pull ();
            MediaStreamSample sample;
            int [] pixels;

            p.Decode ();
            pixels = p.GetImage ();

            foreach (int i in pixels)
                frame.Write (BitConverter.GetBytes (i), 0, 4);

            sample = new MediaStreamSample (streamDescription, frame, 0, frame.Length, timestamp, empty_dict);

            timestamp += 50;

            ReportGetSampleCompleted(sample);
        }
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            if (index < this.sourceData.Length)
            {
                for (int i = 0; i < numSamples; i++)
                {
                    if (index >= this.sourceData.Length)
                        break;

                    memoryStream.WriteByte(
                        this.sourceData[index]);
                    memoryStream.WriteByte(
                        this.sourceData[index + 1]);
                    index += 2;

                    if (this.Channels == 2)
                    {
                        memoryStream.WriteByte(
                            this.sourceData[index]);
                        memoryStream.WriteByte(
                            this.sourceData[index + 1]);
                        index += 2;
                    }

                }

                MediaStreamSample mediaStreamSample =
                    new MediaStreamSample(mediaStreamDescription, memoryStream, currentPosition,
                                          bufferByteCount, currentTimeStamp, emptySampleDict);

                currentTimeStamp += bufferByteCount * 10000000L / byteRate;
                currentPosition += bufferByteCount;

                ReportGetSampleCompleted(mediaStreamSample);
            }
            else
            {
                this.CloseMedia();
            }
        }
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            if (_frameStreamOffset + _frameBufferSize > _frameStreamSize)
            {
                _frameStream.Seek(0, SeekOrigin.Begin);
                _frameStreamOffset = 0;
            }

            //_cameraBuffer.NewViewfinderFrame(_cameraData, _cameraFilteredData);

            _dev.GetPreviewBufferArgb(_cameraData);
            Buffer.BlockCopy(_cameraData, 0, _cameraFilteredData, 0, _cameraFilteredData.Length);

            _frameStream.Write(_cameraFilteredData, 0, _frameBufferSize);

            MediaStreamSample msSamp = new MediaStreamSample(_videoStreamDescription, _frameStream, _frameStreamOffset,_frameBufferSize, _currentTime, _emptySampleDict);

            ReportGetSampleCompleted(msSamp);

            _currentTime += _frameTime;
            _frameStreamOffset += _frameBufferSize;
        }
예제 #52
0
        /// <summary>
        /// This function takes care of keeping track of dropped frame
        /// rate, deciding when that information is meaningful, and
        /// validating the bit rates that the network heuristics can
        /// use
        /// </summary>
        /// <param name="mediaStreamType">The type of the stream we are processing</param>
        /// <param name="bitRate">Media bit rate in bps</param>
        internal void ProcessFrameRateHeuristcs(MediaStreamType mediaStreamType, ulong bitRate)
        {
            if (mediaStreamType == MediaStreamType.Video)
            {
                // Do this for video only
                StreamInfo stream = m_manifestInfo.GetStreamInfoForStreamType(mediaStreamType);
                int streamIndex = stream.StreamId;
                NetworkMediaInfo networkMediaInfo = m_networkMediaInfo[streamIndex];

                int suspended = networkMediaInfo.TestFrameRate(
                    bitRate,
                    m_playbackInfo.IsMinimizedBrowser,
                    m_playbackInfo.CumulativeTics,
                    m_playbackInfo.GetDroppedFpsHistory(),
                    m_playbackInfo.RenderedFramesPerSecond,
                    m_playbackInfo.SourceFramesPerSecond);

                if (suspended > 0)
                {
                    RequestChunkReplacement();
                }
            }
        }
예제 #53
0
        /// <summary>
        /// Gets the total time in the buffer for the given stream type
        /// </summary>
        /// <param name="mediaStreamType">the type of the stream to look at</param>
        /// <returns>the total time of chunks buffered</returns>
        private ulong GetTimeBufferedForStreamType(MediaStreamType mediaStreamType)
        {
            // If we don't have a manifest then we haven't started yet
            if (m_manifestInfo == null)
            {
                return 0;
            }

            // Get the info for the stream
            StreamInfo stream = m_manifestInfo.GetStreamInfoForStreamType(mediaStreamType);

            // If there is no stream, then nothing buffered
            if (stream == null)
            {
                return 0;
            }

            return stream.Queue.BufferTime;
        }
예제 #54
0
        /// <summary>
        /// Determines which stream will be used for playback
        /// </summary>
        /// <param name="allStream">All stream.</param>
        /// <param name="desiredIndex">Index of the desired.</param>
        /// <param name="type">The type.</param>
        /// <param name="returnFirstIfNoIndex">if set to <c>true</c> [return first if no index].</param>
        /// <returns>MediaStream.</returns>
        private MediaStream GetMediaStream(IEnumerable<MediaStream> allStream, int? desiredIndex, MediaStreamType type, bool returnFirstIfNoIndex = true)
        {
            var streams = allStream.Where(s => s.Type == type).OrderBy(i => i.Index).ToList();

            if (desiredIndex.HasValue)
            {
                var stream = streams.FirstOrDefault(s => s.Index == desiredIndex.Value);

                if (stream != null)
                {
                    return stream;
                }
            }

            if (type == MediaStreamType.Video)
            {
                streams = streams.Where(i => !string.Equals(i.Codec, "mjpeg", StringComparison.OrdinalIgnoreCase)).ToList();
            }

            if (returnFirstIfNoIndex && type == MediaStreamType.Audio)
            {
                return streams.FirstOrDefault(i => i.Channels.HasValue && i.Channels.Value > 0) ??
                       streams.FirstOrDefault();
            }

            // Just return the first one
            return returnFirstIfNoIndex ? streams.FirstOrDefault() : null;
        }
예제 #55
0
        public int? GetStreamCount(MediaStreamType type)
        {
            int numMatches = 0;
            int numStreams = 0;

            foreach (MediaStream i in MediaStreams)
            {
                numStreams++;
                if (i.Type == type)
                {
                    numMatches++;
                }
            }

            if (numStreams == 0)
            {
                return null;
            }

            return numMatches;
        }
예제 #56
0
        public MediaStream GetMediaStream(MediaStreamType type, int index)
        {
            foreach (MediaStream i in MediaStreams)
            {
                if (i.Type == type && i.Index == index)
                {
                    return i;
                }
            }

            return null;
        }
예제 #57
0
 /// <summary>
 /// Gets the information for the particular stream type. If there are multiple
 /// streams of the given type, it will return the first one it finds.
 /// </summary>
 /// <param name="type">the type of stream to retrieve</param>
 /// <returns>A StreamInfo object with the information for the stream</returns>
 public StreamInfo GetStreamInfoForStreamType(MediaStreamType type)
 {
     foreach (StreamInfo info in m_activeStreams)
     {
         if (info != null)
         {
             if (info.MediaType == type)
             {
                 return info;
             }
         }
     }
     return null;
 }
        private void ReportEndOfMedia(MediaStreamType mediaStreamType)
        {
            var flags = new Dictionary<MediaSampleAttributeKeys, string>();

            MediaStreamDescription msd = this.videoStreamDescription;
            if (mediaStreamType == MediaStreamType.Audio) msd = this.audioStreamDescription;
            try {
                var samp = new MediaStreamSample(msd, null, 0, 0, 0, flags);
                ReportGetSampleCompleted(samp);
            } catch { }
        }
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            // Call the playlist to cue the next chunk, then exit and wait for the available event.
            switch (mediaStreamType) {
                case MediaStreamType.Audio:
                    DemuxBuffer.RequestAudioSample();
                    break;

                case MediaStreamType.Video:
                    DemuxBuffer.RequestVideoSample();
                    break;
            }
        }
        /// <summary>
        /// Return the next sample requested
        /// </summary>
        /// <param name="mediaStreamType">The stream type that we are getting a sample for</param>
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            // Start with one second of data, rounded up to the nearest block.
            uint bufferSize = (uint)AlignUp(
                this.wavParser.WaveFormatEx.AvgBytesPerSec,
                this.wavParser.WaveFormatEx.BlockAlign);

            // Figure out how much data we have left in the chunk compared to the
            // data that we need.
            bufferSize = Math.Min(bufferSize, (uint)this.wavParser.BytesRemainingInChunk);
            if (bufferSize > 0)
            {
                this.wavParser.ProcessDataFromChunk(bufferSize);

                // Send out the next sample
                MediaStreamSample sample = new MediaStreamSample(
                    this.audioDesc,
                    this.stream,
                    this.currentPosition,
                    bufferSize,
                    this.currentTimeStamp,
                    this.emptySampleDict);

                // Move our timestamp and position forward
                this.currentTimeStamp += this.wavParser.WaveFormatEx.AudioDurationFromBufferSize(bufferSize);
                this.currentPosition += bufferSize;

                /* Uncomment to loop forever
                // If there are no more bytes in the chunk, start again from the beginning
                if (this.wavParser.BytesRemainingInChunk == 0)
                {
                    this.wavParser.MoveToStartOfChunk();
                    this.currentPosition = this.startPosition;
                }
                */

                ReportGetSampleCompleted(sample);
            }
            else
            {
                // Report EOS
                ReportGetSampleCompleted(new MediaStreamSample(this.audioDesc, null, 0, 0, 0, this.emptySampleDict));
            }
        }