private MediaStreamSample CreateVideoSample(byte[] buf) { if (videoStart.Ticks == 0) { videoStart = DateTime.Now; } // Marshal this buffer so we can safely queue it without worrying about // reuse of the memory backing it byte[] bufCopy = new byte[buf.Length]; Array.Copy(buf, bufCopy, buf.Length); MediaStreamSample sample = MediaStreamSample.CreateFromBuffer(bufCopy.AsBuffer(), DateTime.Now - videoStart); sample.Duration = TimeSpan.Zero; // HACK: Marking all frames as keyframes seems // to keep the decoder from dying after the first // few seconds. sample.KeyFrame = true; if ((buf[4] & 0x1F) == 0x5) { sample.KeyFrame = true; } return(sample); }
/// <summary> /// Устанвливает следующий сэмпл для запроса. /// </summary> private int SetSample(MediaStreamSourceSampleRequest request) { try { Debug.WriteLine($"Length: {_fileStream.Length}, Offset: {_byteOffset}"); _fileStream.Seek(_byteOffset, SeekOrigin.Begin); int count = _fileStream.Read(_buffer, _bufferOffset, BUFFER_SIZE); IBuffer buffer = _buffer.AsBuffer(_bufferOffset, count); _bufferOffset += count; if (_bufferOffset + BUFFER_SIZE > _bufferSize) { _bufferOffset = 0; } var sample = MediaStreamSample.CreateFromBuffer(buffer, _timeOffset); sample.Duration = _sampleDuration; sample.KeyFrame = true; _byteOffset += count; _timeOffset = _timeOffset.Add(_sampleDuration); request.Sample = sample; return(count); } catch (Exception) { return(0); } }
public void PlaySound(int samplingRate, byte[] pcmData) { AudioEncodingProperties audioProps = AudioEncodingProperties.CreatePcm((uint)samplingRate, 1, 16); AudioStreamDescriptor audioDesc = new AudioStreamDescriptor(audioProps); MediaStreamSource mss = new MediaStreamSource(audioDesc); bool samplePlayed = false; mss.SampleRequested += (sender, args) => { if (samplePlayed) { return; } IBuffer ibuffer = pcmData.AsBuffer(); MediaStreamSample sample = MediaStreamSample.CreateFromBuffer(ibuffer, TimeSpan.Zero); sample.Duration = TimeSpan.FromSeconds(pcmData.Length / 2.0 / samplingRate); args.Request.Sample = sample; samplePlayed = true; }; mediaElement.SetMediaStreamSource(mss); }
public MediaStreamSample GenerateSample() { // Generate 1s of data var buffer = new byte[2 * m_sampleRate]; var time = Time.TotalSeconds; for (int i = 0; i < m_sampleRate; i++) { Int16 value = (Int16)(Int16.MaxValue * Math.Sin(2 * Math.PI * m_sineFrequency * time * time)); // Chirp sine wave buffer[2 * i] = (byte)(value & 0xFF); buffer[2 * i + 1] = (byte)((value >> 8) & 0xFF); time += (1 / (double)m_sampleRate); } var sample = MediaStreamSample.CreateFromBuffer(buffer.AsBuffer(), Time); sample.Discontinuous = (Time == TimeSpan.Zero); sample.Duration = TimeSpan.FromSeconds(1); Time += TimeSpan.FromSeconds(1); return(sample); }
/// <summary> /// Get a sample from the pool which has a buffer with a given capacity /// and with the associated timestamp. /// </summary> /// <param name="byteSize">The exact size in bytes that the sample buffer needs to accomodate.</param> /// <param name="timestamp">The sample presentation timestamp.</param> /// <returns>The newly created sample</returns> /// <remarks> /// The returned sample's buffer has a <see cref="Windows.Storage.Streams.Buffer.Length"/> property /// set to the input <see cref="byteSize"/>. This is required to be set before creating the sample, /// and should not be modified once the sample was created. /// </remarks> public MediaStreamSample Pop(uint byteSize, System.TimeSpan timestamp) { Buffer buffer; lock (this) { if (_freeBuffers.Count > 0) { buffer = _freeBuffers.Pop(); if (buffer.Capacity < byteSize) { buffer = new Buffer(byteSize); } } else { buffer = new Buffer(byteSize); } _usedBuffers.Enqueue(buffer); // This must be set before calling CreateFromBuffer() below otherwise // the Media Foundation pipeline throws an exception. buffer.Length = byteSize; } // Because the managed wrapper does not allow modifying the timestamp, // need to recreate the sample each time with the correct timestamp. var sample = MediaStreamSample.CreateFromBuffer(buffer, timestamp); sample.Processed += OnSampleProcessed; return(sample); }
/// <summary> /// Dequeues a NAL and gives it to the decoder. /// </summary> private void HandleSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { //Log.Verbose("HandleSampleRequested"); Nal nal; lock (usedNals) { //Log.Verbose("usedNals.Dequeue: {0}", usedNals.Count); nal = (usedNals.Count > 0) ? usedNals.Dequeue() : null; } if (nal != null) { args.Request.Sample = MediaStreamSample.CreateFromBuffer(nal.Buffer, new TimeSpan(0)); lock (availableNals) { //Log.Verbose("availableNals.Enqueue"); availableNals.Enqueue(nal); } } else { //Log.Verbose("Deferred"); request = args.Request; deferral = args.Request.GetDeferral(); } }
private async Task <Tuple <MediaStreamSample, uint> > ParseAACSampleAsync(bool partial = false, byte[] partialBytes = null) { IBuffer buffer = null; MediaStreamSample sample = null; uint sampleLength = 0; if (partial) { buffer = partialBytes.AsBuffer(); sampleLength = aac_adts_sampleSize - (uint)partialBytes.Length; byteOffset += sampleLength; } else { await socketReader.LoadAsync(aac_adts_sampleSize); buffer = socketReader.ReadBuffer(aac_adts_sampleSize); byteOffset += aac_adts_sampleSize; sampleLength = aac_adts_sampleSize; } sample = MediaStreamSample.CreateFromBuffer(buffer, timeOffSet); sample.Duration = aac_adts_sampleDuration; sample.KeyFrame = true; timeOffSet = timeOffSet.Add(aac_adts_sampleDuration); return(new Tuple <MediaStreamSample, uint>(sample, sampleLength)); }
private void OnMediaSourceSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs e) { var instantBuffer = GetBuffer(); var buffer = _mediaDecoder.ReadSample(instantBuffer, instantBuffer.Capacity); MediaStreamSample sample = null; if (buffer.Length > 0) { sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromSeconds(_currentTime)); sample.Processed += OnSampleProcessed; var duration = _mediaDecoder.GetDurationFromBufferSize(buffer.Length); sample.Duration = TimeSpan.FromSeconds(duration); _currentTime += duration; } else { _currentTime = 0.0; _mediaDecoder.Seek(0); } e.Request.Sample = sample; }
private MediaStreamSample CreateAudioSample(byte[] buf) { if (audioStart.Ticks == 0) { audioStart = DateTime.Now; } MediaStreamSample sample = MediaStreamSample.CreateFromBuffer(buf.AsBuffer(), DateTime.Now - audioStart); sample.Duration = TimeSpan.FromMilliseconds(5); return(sample); }
private async Task <Tuple <MediaStreamSample, uint> > ParseMP3SampleAsync(bool partial = false, byte[] partialBytes = null) { //http://www.mpgedit.org/mpgedit/mpeg_format/MP3Format.html IBuffer buffer = null; MediaStreamSample sample = null; uint sampleLength = 0; if (partial) { buffer = partialBytes.AsBuffer(); sampleLength = mp3_sampleSize - (uint)partialBytes.Length; byteOffset += sampleLength; } else { var read = await socketReader.LoadAsync(mp3_sampleSize); if (read == 0) { Disconnect(); MediaStreamSource.NotifyError(MediaStreamSourceErrorStatus.ConnectionToServerLost); return(new Tuple <MediaStreamSample, uint>(null, 0)); } else if (read < mp3_sampleSize) { buffer = socketReader.ReadBuffer(read); byteOffset += mp3_sampleSize; } else { buffer = socketReader.ReadBuffer(mp3_sampleSize); byteOffset += mp3_sampleSize; } sampleLength = mp3_sampleSize; } sample = MediaStreamSample.CreateFromBuffer(buffer, timeOffSet); sample.Duration = mp3_sampleDuration; sample.KeyFrame = true; timeOffSet = timeOffSet.Add(mp3_sampleDuration); return(new Tuple <MediaStreamSample, uint>(sample, sampleLength)); }
private void MediaStreamSource_SampleRequested( MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { #if EMULATOR_ON #else var sample = ViewModel.VideoViewModel.GetSample(); //Debug.WriteLine($"{nameof(MediaStreamSource_SampleRequested)} - video ready? {sample != null}"); if (sample != null) { //Debug.WriteLine($"{nameof(MediaStreamSource_SampleRequested)} - got sample time index {sample.TimeIndex}, length {sample.Buffer.Length}b, duration {sample.Duration}"); args.Request.Sample = MediaStreamSample.CreateFromBuffer(sample.Buffer.AsBuffer(), sample.TimeIndex); args.Request.Sample.Duration = sample.Duration; } #endif }
void mkvStreamSource_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { MediaStreamSample sample = null; MediaStreamSourceSampleRequest request = args.Request; MediaStreamSourceSampleRequestDeferral deferal = request.GetDeferral(); try { var mkvFs = mediaStreamFileSource as CCPlayer.HWCodecs.Matroska.MKV.MKVFileSource; FrameBufferData fd = mkvFs.GetFrameData(request.StreamDescriptor); if (fd.Data != null) { sample = MediaStreamSample.CreateFromBuffer(fd.Data, fd.TimeCode); sample.Duration = fd.Duration; sample.KeyFrame = fd.KeyFrame; //자막을 검색하여 추가 MessengerInstance.Send <Message>(new Message("SubtitleFrameInMKV", mkvFs.SubtitleFrames), TransportControlViewModel.NAME); } else if (System.Diagnostics.Debugger.IsAttached) { //NUll이 보고되면 자연스럽게 종료처리가 됨. 즉, MediaEnded Event가 발생함. System.Diagnostics.Debug.WriteLine("***************************** null이 보고 되었음. 종료 코드가 들어옴 => MediaElement의 MediaEndedEvent 발생될 것임."); } request.Sample = sample; } catch (Exception e) { System.Diagnostics.Debug.WriteLine("********************************** 샘플오류 또는 강제 종료 => MediaStreamSource의 Closed 이벤트가 발생될 것임 : " + e.Message); //Close 이벤트 발생 sender.NotifyError(MediaStreamSourceErrorStatus.DecodeError); } finally { if (deferal != null) { deferal.Complete(); } } }
public async Task <Tuple <MediaStreamSample, uint> > ParseSampleAsync(ShoutcastStreamProcessor processor, DataReader socketReader, bool partial = false, byte[] partialBytes = null) { //http://www.mpgedit.org/mpgedit/mpeg_format/MP3Format.html IBuffer buffer = null; MediaStreamSample sample = null; uint sampleLength = 0; if (partial) { buffer = partialBytes.AsBuffer(); sampleLength = MP3Parser.mp3_sampleSize - (uint)partialBytes.Length; //processor.byteOffset += sampleLength; } else { var read = await socketReader.LoadAsync(MP3Parser.mp3_sampleSize); if (read == 0 || read < MP3Parser.mp3_sampleSize) { //disconnected. throw new ShoutcastDisconnectionException(); } buffer = socketReader.ReadBuffer(MP3Parser.mp3_sampleSize); //processor.byteOffset += MP3Parser.mp3_sampleSize; sampleLength = MP3Parser.mp3_sampleSize; } sample = MediaStreamSample.CreateFromBuffer(buffer, processor.timeOffSet); sample.Duration = MP3Parser.mp3_sampleDuration; sample.KeyFrame = true; processor.timeOffSet = processor.timeOffSet.Add(MP3Parser.mp3_sampleDuration); return(new Tuple <MediaStreamSample, uint>(sample, sampleLength)); }
public async Task <Tuple <MediaStreamSample, uint> > ParseSampleAsync(ShoutcastStreamProcessor processor, SocketWrapper socket, bool partial = false, byte[] partialBytes = null) { IBuffer buffer = null; MediaStreamSample sample = null; uint sampleLength = 0; if (partial) { buffer = partialBytes.AsBuffer(); sampleLength = AAC_ADTSParser.aac_adts_sampleSize - (uint)partialBytes.Length; //processor.byteOffset += sampleLength; } else { var read = await socket.LoadAsync(AAC_ADTSParser.aac_adts_sampleSize); if (read == 0 || read < AAC_ADTSParser.aac_adts_sampleSize) { //disconnected. throw new ShoutcastDisconnectionException(); } buffer = await socket.ReadBufferAsync(AAC_ADTSParser.aac_adts_sampleSize); //processor.byteOffset += AAC_ADTSParser.aac_adts_sampleSize; sampleLength = AAC_ADTSParser.aac_adts_sampleSize; } sample = MediaStreamSample.CreateFromBuffer(buffer, processor.timeOffSet); sample.Duration = AAC_ADTSParser.aac_adts_sampleDuration; sample.KeyFrame = true; processor.timeOffSet = processor.timeOffSet.Add(AAC_ADTSParser.aac_adts_sampleDuration); return(new Tuple <MediaStreamSample, uint>(sample, sampleLength)); }
private MediaStreamSample CreateVideoSample(byte[] buf) { if (videoStart.Ticks == 0) { videoStart = DateTime.Now; } MediaStreamSample sample = MediaStreamSample.CreateFromBuffer(buf.AsBuffer(), DateTime.Now - videoStart); sample.DecodeTimestamp = sample.Timestamp; sample.Duration = TimeSpan.Zero; switch (buf[4]) { case 0x65: sample.KeyFrame = true; //Debug.WriteLine("I-frame"); break; case 0x67: //Debug.WriteLine("SPS"); break; case 0x68: //Debug.WriteLine("PPS"); break; case 0x61: break; default: Debug.WriteLine("Unrecognized data: " + buf[4].ToString()); break; } return(sample); }
private void OnMediaSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { // Retrieve the deferral for the request to ensure the consumer doesn't time out var request = args.Request; var deferral = request.GetDeferral(); var encoding = this.sourceStream.Format.Encoding; var bytesForBuffer = new byte[(uint)(this.sampleDuration.TotalSeconds * encoding.Bitrate / 8)]; var bytesRetrieved = this.sourceStream.Read(bytesForBuffer, 0, bytesForBuffer.Length); if (bytesRetrieved == bytesForBuffer.Length) { var mediaSampleBuffer = CryptographicBuffer.CreateFromByteArray(bytesForBuffer); var mediaSample = MediaStreamSample.CreateFromBuffer(mediaSampleBuffer, this.totalPlaybackDuration); mediaSample.KeyFrame = true; mediaSample.Duration = this.sampleDuration; request.Sample = mediaSample; this.totalPlaybackDuration += this.sampleDuration; } deferral.Complete(); }
/// <summary> /// Processes a NAL. /// </summary> /// <param name="nal">The NAL to be processed.</param> /// <returns>Type of NAL.</returns> private int ProcessNal(Nal nal) { // get the NAL type int nalType = -1; if (nal.Buffer.Length > 4) { byte[] header = new byte[5]; nal.Buffer.CopyTo(0, header, 0, 5); nalType = (header[0] == 0 && header[1] == 0 && header[2] == 0 && header[3] == 1) ? (header[4] & 0x1F) : -1; } //Log.Verbose("NAL: type = {0}, len = {1}", nalType, nal.Buffer.Length); // process the first SPS record we encounter if (nalType == 7 && !isDecoding) { byte[] sps = new byte[nal.Buffer.Length]; nal.Buffer.CopyTo(sps); SpsParser parser = new SpsParser(sps, (int)nal.Buffer.Length); //Log.Verbose("SPS: {0}x{1} @ {2}", parser.width, parser.height, parser.fps); VideoEncodingProperties properties = VideoEncodingProperties.CreateH264(); properties.ProfileId = H264ProfileIds.High; properties.Width = (uint)parser.width; properties.Height = (uint)parser.height; streamSource = new MediaStreamSource(new VideoStreamDescriptor(properties)); streamSource.BufferTime = TimeSpan.Zero; streamSource.CanSeek = false; streamSource.Duration = TimeSpan.Zero; streamSource.SampleRequested += HandleSampleRequested; var action = Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.High, () => { statusTextBlock.Visibility = Visibility.Collapsed; media.SetMediaStreamSource(streamSource); media.Play(); storyboard.Begin(); }); isDecoding = true; } // queue the frame if (nalType > 0 && isDecoding) { if (deferral != null) { request.Sample = MediaStreamSample.CreateFromBuffer(nal.Buffer, new TimeSpan(0)); lock (availableNals) { //Log.Verbose("availableNals.Enqueue"); availableNals.Enqueue(nal); } deferral.Complete(); deferral = null; request = null; //Log.Verbose("Deferral Complete"); } else { //Log.Verbose("usedNals.Enqueue"); lock (usedNals) { usedNals.Enqueue(nal); } } } // return the NAL type return(isDecoding ? nalType : -1); }
private void Mss_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { if (!(args.Request.StreamDescriptor is VideoStreamDescriptor)) { return; } Debug.WriteLine("requesting sample"); MediaStreamSourceSampleRequest request = args.Request; MpegTS.VideoSample rawSample = null; MediaStreamSourceSampleRequestDeferral deferal = request.GetDeferral(); try { //block here for signal from mpegTS parser that a sample is ready //if (extractor.SampleCount == 0) // threadSync.WaitOne(); //dequeue the raw sample here if (!foundKeyFrame) { do { threadSync.WaitOne(); rawSample = extractor.DequeueNextSample(false); }while (rawSample == null || extractor.SampleCount == 0); } else { if (extractor.SampleCount > 0) { rawSample = extractor.DequeueNextSample(false); } if (rawSample == null) { request.Sample = emptySample; deferal.Complete(); return; } } //if (!gotT0) //{ // gotT0 = true; // //T0.TotalMilliseconds = 33.3667; //} //check max size of current buffer, increase if needed. if (buff.Capacity < rawSample.Length) { buff = new Windows.Storage.Streams.Buffer((uint)rawSample.Length); bStream.Dispose(); //bStream = sample.Buffer.AsStream(); bStream = buff.AsStream(); } //create our sample here may need to keep initial time stamp for relative time? sample = MediaStreamSample.CreateFromBuffer(buff, new TimeSpan(T0.Ticks * frameCount)); bStream.Position = 0; //write the raw sample to the reqest sample stream; rawSample.WriteToStream(bStream); sample.Buffer.Length = (uint)rawSample.Length; Debug.WriteLine("sample length: {0}", rawSample.Length); //sample.DecodeTimestamp = new TimeSpan(T0.Ticks * frameCount); sample.Duration = T0; sample.KeyFrame = ScanForKeyframe(bStream);//rawSample.Length > 3000;// //not sure if this is correct... sample.Discontinuous = !lastFrame; //this just tells us if the MpegTS Continuity Counter //for all Mpeg packets in the sample were in order. (0-15) lastFrame = rawSample.IsComplete; //if (!foundKeyFrame) // sample = emptySample; //else ++frameCount; // create the MediaStreamSample and assign to the request object. // You could also create the MediaStreamSample using createFromBuffer(...) //MediaStreamSample sample = await MediaStreamSample.CreateFromStreamAsync(inputStream, sampleSize, timeOffset); //sample.Duration = sampleDuration; //sample.KeyFrame = true; // increment the time and byte offset //byteOffset += sampleSize; //timeOffset = timeOffset.Add(sampleDuration); request.Sample = sample; } catch (Exception ex) { var exStr = ex.ToString(); } finally { deferal.Complete(); } Debug.WriteLine("exit request sample"); }
private void flvStreamSource_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { MediaStreamSourceSampleRequest request = args.Request; MediaStreamSourceSampleRequestDeferral deferal = request.GetDeferral(); FlvFile flvFile = mediaStreamFileSource as FlvFile; FlvTag flvTag = null; MemoryStream stream = null; MediaStreamSample sample = null; try { if (flvFile != null) { if (request.StreamDescriptor is VideoStreamDescriptor) { flvTag = flvFile.FlvFileBody.CurrentVideoTag; if (flvTag.VideoData.CodecID == CodecID.AVC) { byte[] by = flvTag.VideoData.AVCVideoPacket.NALUs; if (by != null && by.Length > 0) { MemoryStream srcStream = new MemoryStream(by); stream = new MemoryStream(); if (flvTag.VideoData.FrameType == FrameType.Keyframe) { if (NALUnitHeader != null) { stream.Write(NALUnitHeader, 0, NALUnitHeader.Length); } } using (BinaryReader reader = new BinaryReader(srcStream)) { var sampleSize = srcStream.Length; while (sampleSize > 4L) { var ui32 = reader.ReadUInt32(); var count = OldSkool.swaplong(ui32); stream.Write(h264StartCode, 0, h264StartCode.Length); stream.Write(reader.ReadBytes((int)count), 0, (int)count); sampleSize -= 4 + (uint)count; } } if (stream != null && stream.Length > 0) { IBuffer buffer = stream.ToArray().AsBuffer(); stream.Position = 0; sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromTicks(flvTag.Timestamp)); sample.KeyFrame = flvTag.VideoData.FrameType == FrameType.Keyframe; } } } else { IBuffer buffer = flvTag.VideoData.RawData.AsBuffer(); sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromTicks(flvTag.Timestamp)); sample.KeyFrame = flvTag.VideoData.FrameType == FrameType.Keyframe; } } else { byte[] by = null; flvTag = flvFile.FlvFileBody.CurrentAudioTag; switch (flvTag.AudioData.SoundFormat) { case SoundFormat.AAC: by = (flvTag.AudioData.SoundData as AACAudioData).RawAACFrameData; break; case SoundFormat.MP3: by = flvTag.AudioData.SoundData.RawData; break; case SoundFormat.ADPCM: by = flvTag.AudioData.SoundData.RawData; break; } if (by != null && by.Length > 0) { stream = new MemoryStream(by); IBuffer buffer = by.AsBuffer(); sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromTicks(flvTag.Timestamp)); sample.KeyFrame = true; request.Sample = sample; } } } //샘플보고 request.Sample = sample; } catch (Exception e) { System.Diagnostics.Debug.WriteLine("샘플오류 " + e.Message); sender.NotifyError(MediaStreamSourceErrorStatus.DecodeError); } finally { if (deferal != null) { deferal.Complete(); } } }
private async void Mss_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { if (!_sampleWatch.IsRunning) { _sampleWatch.Start(); } //Debug.Write("+"); // test flush //var frames = _frameServer.ReadAllFrames(); //if (frames != null ) //{ // Debug.Write($"F:{frames.Count}"); // args.Request.Sample = MediaStreamSample.CreateFromBuffer(frames.Content.AsBuffer(), frames.TimeIndex); // args.Request.Sample.Duration = frames.Duration; //} //else //{ // Debug.Write($"NULL"); //} // test multiple frames //var timeout = TimeSpan.FromMilliseconds(_frameTimeout.TotalMilliseconds * 5); //var stopwatch = Stopwatch.StartNew(); //var frames = _frameServer.ReadFrames(args.Request, timeout, 5); //args.Request.Sample = MediaStreamSample.CreateFromBuffer(frames.Content.AsBuffer(), frames.TimeIndex); //args.Request.Sample.Duration = frames.Duration; //if (stopwatch.Elapsed > timeout) //{ // Debug.Write($" TO: {stopwatch.ElapsedMilliseconds.ToString("#,#")}ms "); //} // test single framees //var stopwatch = Stopwatch.StartNew(); var sample = _frameServer.GetSample(_frameTimeout); if (sample != null && sample.Count > 0) { //Debug.Write("T"); args.Request.Sample = MediaStreamSample.CreateFromBuffer(sample.Content.AsBuffer(), sample.TimeIndex); args.Request.Sample.Duration = sample.Duration; if (_sampleRequestCount % 32 == 0) { await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => { _mediaPosition = _mediaElement.Position; _mediaElement.Position = sample.TimeIndex; }); Debug.WriteLine($"\nRQT: {_sampleWatch.Elapsed} - STI: {sample.TimeIndex} - MEP: {_mediaPosition}, RQCNT: {_sampleRequestCount}, SCNT: {sample.Count}, SDUR: {sample.Duration}, {(uint)(_sampleRequestCount / _sampleWatch.Elapsed.TotalSeconds)}RQ/s"); } } ++_sampleRequestCount; //if (_frameServer.TryReadFrame(out var frame, _frameTimeout)) //{ // //Debug.Write("T"); // args.Request.Sample = MediaStreamSample.CreateFromBuffer(frame.Content.AsBuffer(), frame.TimeIndex); // args.Request.Sample.Duration = frame.Duration; // if (frame.Index % (32 * 5) == 0) // { // Debug.WriteLine($"\nSR {_sampleWatch.Elapsed} - {frame.TimeIndex}: F#{frame.Index}, R#{_sampleRequestCount}, {(uint)(_sampleRequestCount / _sampleWatch.Elapsed.TotalSeconds)}R/s"); // } //} //++_sampleRequestCount; //else //{ // Debug.Write("F"); // if (stopwatch.Elapsed > _frameTimeout) // { // Debug.Write($" TO: {stopwatch.ElapsedMilliseconds.ToString("#,#")}ms "); // } //} //Debug.Write("-"); }