private async void OnSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { var request = args.Request; var deferral = request.GetDeferral(); if (request.StreamDescriptor is AudioStreamDescriptor) { await _BufferingHelper.GetAudioAsync() .AsTask() .ContinueWith(prevTask => { request.Sample = prevTask.Result; deferral.Complete(); }); } else { await _BufferingHelper.GetVideoAsync() .AsTask() .ContinueWith(prevTask => { request.Sample = prevTask.Result; deferral.Complete(); }); } }
async void MSS_SampleRequested(Windows.Media.Core.MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { MediaStreamSourceSampleRequest request = args.Request; // check if the sample requested byte offset is within the file size if (byteOffset + sampleSize <= mssStream.Size) { MediaStreamSourceSampleRequestDeferral deferal = request.GetDeferral(); IInputStream inputStream = mssStream.GetInputStreamAt(byteOffset); // create the MediaStreamSample and assign to the request object. // You could also create the MediaStreamSample using createFromBuffer(...) MediaStreamSample sample = await MediaStreamSample.CreateFromStreamAsync(inputStream, sampleSize, timeOffset); sample.Duration = sampleDuration; sample.KeyFrame = true; // increment the time and byte offset byteOffset += sampleSize; timeOffset = timeOffset.Add(sampleDuration); request.Sample = sample; deferal.Complete(); } }
private void OnSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { if (args.Request.StreamDescriptor is VideoStreamDescriptor) { var deferral = args.Request.GetDeferral(); nextSampleReady.WaitOne(); if (nextSample != null) { args.Request.Sample = MediaStreamSample.CreateFromDirect3D11Surface(nextSample.Surface, nextSample.Timestamp - encodingStart); // Manually forcing the surface to be disposed helps with memory consumption // As this is the only consumer, this is safe nextSample.Surface.Dispose(); } else { args.Request.Sample = null; } sampleProcessed.Set(); deferral.Complete(); } }
void mss_SampleRequested(Windows.Media.Core.MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { if (args.Request.StreamDescriptor is VideoStreamDescriptor) { _sampleMaker.GenerateSample(args.Request); } }
/// <summary> /// Dequeues a NAL and gives it to the decoder. /// </summary> private void HandleSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { //Log.Verbose("HandleSampleRequested"); Nal nal; lock (usedNals) { //Log.Verbose("usedNals.Dequeue: {0}", usedNals.Count); nal = (usedNals.Count > 0) ? usedNals.Dequeue() : null; } if (nal != null) { args.Request.Sample = MediaStreamSample.CreateFromBuffer(nal.Buffer, new TimeSpan(0)); lock (availableNals) { //Log.Verbose("availableNals.Enqueue"); availableNals.Enqueue(nal); } } else { //Log.Verbose("Deferred"); request = args.Request; deferral = args.Request.GetDeferral(); } }
private void OnMediaSourceSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs e) { var instantBuffer = GetBuffer(); var buffer = _mediaDecoder.ReadSample(instantBuffer, instantBuffer.Capacity); MediaStreamSample sample = null; if (buffer.Length > 0) { sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromSeconds(_currentTime)); sample.Processed += OnSampleProcessed; var duration = _mediaDecoder.GetDurationFromBufferSize(buffer.Length); sample.Duration = TimeSpan.FromSeconds(duration); _currentTime += duration; } else { _currentTime = 0.0; _mediaDecoder.Seek(0); } e.Request.Sample = sample; }
public async void SampleRequested(MediaStreamSource s, MediaStreamSourceSampleRequestedEventArgs e) { var req = e.Request; var deferal = req.GetDeferral(); while (req.Sample == null) { if (req.StreamDescriptor is AudioStreamDescriptor) { var flvTag = await this.ReadAudioTag(); if (flvTag != null) { req.Sample = await flvTag.CreateAudioSample(); } } if (req.StreamDescriptor is VideoStreamDescriptor) { var flvTag = await this.ReadVideoTag(); if (flvTag != null) { req.Sample = await flvTag.CreateVideoSample(FirstVideo, newFrame);// vi <= 2); newFrame = false; } } if (req.Sample == null) { Debug.WriteLine("SampleRequested:NULL"); } } deferal.Complete(); }
public void AudioSampleRequested(MediaStreamSourceSampleRequestedEventArgs args) { lock (audioQueueLock) { pendingAudioRequest = args.Request; pendingAudioDeferral = args.Request.GetDeferral(); } }
public void VideoSampleRequested(MediaStreamSourceSampleRequestedEventArgs args) { lock (videoQueueLock) { pendingVideoRequest = args.Request; pendingVideoDeferral = args.Request.GetDeferral(); } }
/// <summary> /// Try to serve a video frame to the given Media Foundation sample request by /// dequeuing a video frame from the internal queue. If no frame is available, /// store the request and its deferral for later serving. /// </summary> /// <param name="args">The Media Foundation sample request to attempt to serve</param> public void TryServeVideoFrame(MediaStreamSourceSampleRequestedEventArgs args) { // Check if the local video stream is enabled if (_frameQueue == null) //< TODO - not the correct check (though also useful) { // End of stream args.Request.Sample = null; return; } // Try to read the next available frame packet I420VideoFrameStorage frameStorage; lock (_deferralLock) { if (!_frameQueue.TryDequeue(out frameStorage)) { // Not available yet, wait for it //_lateFrameStat.Track(); if (_deferral != null) { // Already a frame pending, and now another one. // The earlier one will be skipped (we don't keep track of it for simplicity). //_frameQueue.FrameSkip.Track(); } args.Request.ReportSampleProgress(0); _request = args.Request; _deferral = _request.GetDeferral(); return; } } // Calculate frame timestamp TimeSpan timestamp = TimeSpan.FromSeconds(_frameCount / 30.0); ++_frameCount; // Get a sample uint pixelSize = frameStorage.Width * frameStorage.Height; uint byteSize = (pixelSize / 2 * 3); // I420 = 12 bits per pixel //Debug.Assert(byteSize == frame.Size); var sample = _streamSamplePool.Pop(byteSize, timestamp); sample.Duration = TimeSpan.FromSeconds(1.0 / 30.0); // Copy the frame data into the sample's buffer uint copySize = Math.Min((uint)frameStorage.Capacity, byteSize); frameStorage.Buffer.CopyTo(0, sample.Buffer, 0, (int)copySize); sample.Buffer.Length = copySize; // Somewhat surprisingly, this is not automatic // Recycle the frame storage itself _frameQueue.RecycleStorage(frameStorage); // Return the requested sample args.Request.Sample = sample; }
void mss_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { DiscordService discordService = (DiscordService)App.Current.Services.GetRequiredService <IDiscordService>(); foreach ((string key, WebrtcManager manager) in discordService.Streams) { if (key.EndsWith(UserId.ToString())) { manager.GenerateSample(args.Request); return; } } }
/// <summary> /// Вызывается при запросе нового сэмпла. /// </summary> private void MediaSource_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { var privateRequest = args.Request; long newOffset = _byteOffset + BUFFER_SIZE; if (newOffset <= _fileStream.Length) { int count = SetSample(privateRequest); } else { privateRequest.GetDeferral().Complete(); } }
/// <summary> /// Media stream source sample requested callback /// </summary> private void _mss_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { // Determine which stream needs a sample if (args.Request.StreamDescriptor == _videoDesc) { // Video _streamSource.VideoSampleRequested(args); } else { // Audio _streamSource.AudioSampleRequested(args); } }
public void VideoSampleRequested(MediaStreamSourceSampleRequestedEventArgs args) { // Block until a sample is available from the queue byte[] sample = pendingSamples.Take(); if (sample == null) { // This wakeup was for termination return; } // Return the sample args.Request.Sample = CreateVideoSample(sample); }
void mp4_SampleRequested(Windows.Media.Core.MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { try { if (args.Request.StreamDescriptor is VideoStreamDescriptor) { _mp4Sampler.GetSample(args.Request); } } catch (Exception excp) { Debug.WriteLine("Exception mp4_SampleRequeste. " + excp.Message); } }
private async void Source_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { //if (args.Request.Sample == null) return; //var length = args.Request.Sample.Buffer.Length; //Debug.WriteLine(length); //if (length < 20240) //{ // var deferral = args.Request.GetDeferral(); // //Debug.WriteLine("R"); // await Task.Delay(400); // deferral.Complete(); //} //Debug.WriteLine("Sample requested"); }
private void MediaStreamSource_SampleRequested( MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { #if EMULATOR_ON #else var sample = ViewModel.VideoViewModel.GetSample(); //Debug.WriteLine($"{nameof(MediaStreamSource_SampleRequested)} - video ready? {sample != null}"); if (sample != null) { //Debug.WriteLine($"{nameof(MediaStreamSource_SampleRequested)} - got sample time index {sample.TimeIndex}, length {sample.Buffer.Length}b, duration {sample.Duration}"); args.Request.Sample = MediaStreamSample.CreateFromBuffer(sample.Buffer.AsBuffer(), sample.TimeIndex); args.Request.Sample.Duration = sample.Duration; } #endif }
void MSS_SampleRequested(Windows.Media.Core.MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { if (!m_hasSetMediaSource || advanced_media_source == null) { return; } if (args.Request.StreamDescriptor is VideoStreamDescriptor) { advanced_media_source.GenerateVideoSample(args.Request); } else if (args.Request.StreamDescriptor is AudioStreamDescriptor) { advanced_media_source.GenerateAudioSample(args.Request); } }
public void VideoSampleRequested(MediaStreamSourceSampleRequestedEventArgs args) { lock (videoQueueLock) { if (pendingVideoSample != null) { args.Request.Sample = pendingVideoSample; pendingVideoSample = null; queueEmpty.Set(); return; } } // If we don't have any sample right now, we just return an empty sample. This tells the decoder // that we're still alive here. Doing a sample deferral seems to cause serious lag issues. args.Request.Sample = MediaStreamSample.CreateFromBuffer(new byte[0].AsBuffer(), TimeSpan.Zero); }
/// <summary> /// Callback from the Media Foundation pipeline when a new video frame is needed. /// </summary> /// <param name="sender">The stream source requesting a new sample.</param> /// <param name="args">The sample request to fullfil.</param> private void OnMediaStreamSourceRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { VideoBridge videoBridge; if (sender == localVideoSource) { videoBridge = localVideoBridge; } else if (sender == remoteVideoSource) { videoBridge = remoteVideoBridge; } else { return; } videoBridge.TryServeVideoFrame(args); }
private void OnMediaStreamSourceSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { if (_isRecording && !_closed) { try { using (var frame = _frameGenerator.WaitForNewFrame()) { if (frame == null) { args.Request.Sample = null; DisposeInternal(); return; } if (_isPreviewing) { lock (_previewLock) { _preview.PresentSurface(frame.Surface); } } var timeStamp = frame.SystemRelativeTime; var sample = MediaStreamSample.CreateFromDirect3D11Surface(frame.Surface, timeStamp); args.Request.Sample = sample; } } catch (Exception e) { Debug.WriteLine(e.Message); Debug.WriteLine(e.StackTrace); Debug.WriteLine(e); args.Request.Sample = null; DisposeInternal(); } } else { args.Request.Sample = null; DisposeInternal(); } }
void mkvStreamSource_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { MediaStreamSample sample = null; MediaStreamSourceSampleRequest request = args.Request; MediaStreamSourceSampleRequestDeferral deferal = request.GetDeferral(); try { var mkvFs = mediaStreamFileSource as CCPlayer.HWCodecs.Matroska.MKV.MKVFileSource; FrameBufferData fd = mkvFs.GetFrameData(request.StreamDescriptor); if (fd.Data != null) { sample = MediaStreamSample.CreateFromBuffer(fd.Data, fd.TimeCode); sample.Duration = fd.Duration; sample.KeyFrame = fd.KeyFrame; //자막을 검색하여 추가 MessengerInstance.Send <Message>(new Message("SubtitleFrameInMKV", mkvFs.SubtitleFrames), TransportControlViewModel.NAME); } else if (System.Diagnostics.Debugger.IsAttached) { //NUll이 보고되면 자연스럽게 종료처리가 됨. 즉, MediaEnded Event가 발생함. System.Diagnostics.Debug.WriteLine("***************************** null이 보고 되었음. 종료 코드가 들어옴 => MediaElement의 MediaEndedEvent 발생될 것임."); } request.Sample = sample; } catch (Exception e) { System.Diagnostics.Debug.WriteLine("********************************** 샘플오류 또는 강제 종료 => MediaStreamSource의 Closed 이벤트가 발생될 것임 : " + e.Message); //Close 이벤트 발생 sender.NotifyError(MediaStreamSourceErrorStatus.DecodeError); } finally { if (deferal != null) { deferal.Complete(); } } }
private void OnMediaStreamSourceSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { if (_isRecording) { using (var frame = GetNextFrame()) using (var lockSession = _multiThread.Lock()) using (var sourceTexture = Direct3D11Texture2D.CreateFromDirect3DSurface(frame.Surface)) using (var surface = _device.CreateTexture2D(new Direct3D11Texture2DDescription() { Base = sourceTexture.Description2D.Base, MipLevels = sourceTexture.Description2D.MipLevels, ArraySize = sourceTexture.Description2D.ArraySize, Usage = Direct3DUsage.Default, BindFlags = Direct3DBindings.ShaderResource, CpuAccessFlags = 0, MiscFlags = 0 })) { var timeStamp = frame.SystemRelativeTime; _deviceContext.CopyResource(surface, frame.Surface); try { var sample = MediaStreamSample.CreateFromDirect3D11Surface(surface, timeStamp); args.Request.Sample = sample; } catch (Exception e) { System.Diagnostics.Debug.WriteLine(e); args.Request.Sample = null; } } } else { args.Request.Sample = null; } }
private void OnMediaSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { // Retrieve the deferral for the request to ensure the consumer doesn't time out var request = args.Request; var deferral = request.GetDeferral(); var encoding = this.sourceStream.Format.Encoding; var bytesForBuffer = new byte[(uint)(this.sampleDuration.TotalSeconds * encoding.Bitrate / 8)]; var bytesRetrieved = this.sourceStream.Read(bytesForBuffer, 0, bytesForBuffer.Length); if (bytesRetrieved == bytesForBuffer.Length) { var mediaSampleBuffer = CryptographicBuffer.CreateFromByteArray(bytesForBuffer); var mediaSample = MediaStreamSample.CreateFromBuffer(mediaSampleBuffer, this.totalPlaybackDuration); mediaSample.KeyFrame = true; mediaSample.Duration = this.sampleDuration; request.Sample = mediaSample; this.totalPlaybackDuration += this.sampleDuration; } deferral.Complete(); }
private void OnMediaStreamSourceSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { if (_isRecording && !_closed) { try { using (var frame = WaitForNewFrame()) { if (frame == null) { args.Request.Sample = null; Dispose(); return; } var timeStamp = frame.SystemRelativeTime; var sample = MediaStreamSample.CreateFromDirect3D11Surface(frame.Surface, timeStamp); args.Request.Sample = sample; } } catch (Exception e) { System.Diagnostics.Debugger.Break(); Debug.WriteLine(e.Message); Debug.WriteLine(e.StackTrace); Debug.WriteLine(e); args.Request.Sample = null; Dispose(); } } else { args.Request.Sample = null; Dispose(); } }
private void OnSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { m_generator.Generate(args.Request); }
/// <summary> /// Callback from the Media Foundation pipeline when a new video frame is needed. /// </summary> /// <param name="sender">The stream source requesting a new sample.</param> /// <param name="args">The sample request to fullfil.</param> private void OnMediaStreamSourceRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { _videoBridge.TryServeVideoFrame(args); }
/// <summary> /// Video stream source sample requested callback /// </summary> private void _videoMss_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { _streamSource.VideoSampleRequested(args); }
/// <summary> /// //Encoding a Win2D surface (CanvasRenderTarget) as a video frame /// </summary> private async void OnMSSSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { if (parent != null) { parent.StartWritingOutput("OnSampleRequested " + frameCounter.ToString(), 0); } if (unpackList == null) { if (parent != null) { parent.StartWritingOutput("Unpack List Null Error!", 1); } //this will stop the encoding args.Request.Sample = null; return; } int len = unpackList.Count; if (frameCounter >= len) { if (parent != null) { parent.StartWritingOutput("Encoding Completed.", 1); } //this will stop the encoding args.Request.Sample = null; return; } if ((frameCounter < 0) || (0 == len)) { if (parent != null) { parent.StartWritingOutput("Invalid Frame", 1); } //this will stop the encoding args.Request.Sample = null; return; } //need deferral because CanvasBitmap.LoadAsync takes some time to complete ? var deferral = args.Request.GetDeferral(); /// UnpackItem unpackItem = unpackList[frameCounter]; Windows.Storage.Streams.Buffer buffer = unpackItem.compressedBuffer; InMemoryRandomAccessStream inMemoryRandomAccessStream = null; using (inMemoryRandomAccessStream = new InMemoryRandomAccessStream()) { await inMemoryRandomAccessStream.WriteAsync(buffer); await inMemoryRandomAccessStream.FlushAsync(); inMemoryRandomAccessStream.Seek(0); CanvasBitmap tempBitmap = null; try { tempBitmap = await CanvasBitmap.LoadAsync(CanvasDevice.GetSharedDevice(), inMemoryRandomAccessStream); } catch (Exception e) { if (parent != null) { parent.StartWritingOutput("CBM Error : " + e.Message, 1); } } if (tempBitmap != null) { CanvasRenderTarget canvasRenderTarget = new CanvasRenderTarget(CanvasDevice.GetSharedDevice(), tempBitmap.SizeInPixels.Width, tempBitmap.SizeInPixels.Height, tempBitmap.Dpi); using (CanvasDrawingSession session = canvasRenderTarget.CreateDrawingSession()) { session.Clear(Colors.Black); //session.DrawEllipse(new System.Numerics.Vector2(120 + frameCounter * 2, 100), 30, 20, Colors.White); session.DrawImage(tempBitmap); } TimeSpan timeLapsed = unpackItem.frameTime; Timestamp += timeLapsed; //set sample after defferal ? nope ....stop at 1st frame... MediaStreamSample sample = MediaStreamSample.CreateFromDirect3D11Surface(canvasRenderTarget, Timestamp); args.Request.Sample = sample; deferral.Complete(); } else { args.Request.Sample = null; deferral.Complete(); } frameCounter++; } }
/// <summary> /// Occurs when the MediaStreamSource request a MediaStreamSample for a specified stream. /// </summary> /// <param name="sender">Represents a media source that delivers media samples directly to the media pipeline.</param> /// <param name="args">Provides the data for the SampleRequested event.</param> private async void OnStreamSourceSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { MediaStreamSourceSampleRequest request = args.Request; // check if the sample requested byte offset is within the file size if (this.m_byteOffset + sampleSize <= (ulong)this.m_audioStreamDownloader.TotalBytesToReceive) { //Calculate the current position within the track double ratio = (double)this.m_byteOffset / (double)this.m_audioStreamDownloader.TotalBytesToReceive; this.m_playerPosition = new TimeSpan((long)(this.CurrentTrack.Duration.Ticks * ratio)); MediaStreamSourceSampleRequestDeferral deferal = request.GetDeferral(); var inputStream = m_mediaStream.GetInputStreamAt(this.m_byteOffset); if (inputStream != null) { // create the MediaStreamSample and assign to the request object. // You could also create the MediaStreamSample using createFromBuffer(...) MediaStreamSample sample = await MediaStreamSample.CreateFromStreamAsync(inputStream, sampleSize, m_timeOffset); sample.Duration = sampleDuration; sample.KeyFrame = true; // increment the time and byte offset this.m_byteOffset += sampleSize; this.m_timeOffset = this.m_timeOffset.Add(sampleDuration); request.Sample = sample; } deferal.Complete(); } }
protected abstract void MssOnSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args);
private void StreamSource_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { }
private async void MediaStreamSource_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { var request = args.Request; var deferral = request.GetDeferral(); bool connected = true; try { cancelTokenSource.Token.ThrowIfCancellationRequested(); try { await ReadSampleAsync(request).ConfigureAwait(false); cancelTokenSource.Token.ThrowIfCancellationRequested(); } catch (ShoutcastDisconnectionException) { //Reset and reconnect. DisconnectSockets(); connected = false; } catch (COMException ex) { //Usually this is thrown when we get disconnected because of inactivity. //Reset and reconnect. DisconnectSockets(); connected = false; } if (!cancelTokenSource.IsCancellationRequested) { cancelTokenSource.Token.ThrowIfCancellationRequested(); if (!connected) { try { await ReconnectSocketsAsync().ConfigureAwait(false); Reconnected?.Invoke(this, EventArgs.Empty); await ReadSampleAsync(request).ConfigureAwait(false); } catch (Exception) { MediaStreamSource.NotifyError(MediaStreamSourceErrorStatus.ConnectionToServerLost); } } } } catch (OperationCanceledException) { } catch (Exception) { MediaStreamSource.NotifyError(MediaStreamSourceErrorStatus.Other); } finally { deferral.Complete(); } }
private void flvStreamSource_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { MediaStreamSourceSampleRequest request = args.Request; MediaStreamSourceSampleRequestDeferral deferal = request.GetDeferral(); FlvFile flvFile = mediaStreamFileSource as FlvFile; FlvTag flvTag = null; MemoryStream stream = null; MediaStreamSample sample = null; try { if (flvFile != null) { if (request.StreamDescriptor is VideoStreamDescriptor) { flvTag = flvFile.FlvFileBody.CurrentVideoTag; if (flvTag.VideoData.CodecID == CodecID.AVC) { byte[] by = flvTag.VideoData.AVCVideoPacket.NALUs; if (by != null && by.Length > 0) { MemoryStream srcStream = new MemoryStream(by); stream = new MemoryStream(); if (flvTag.VideoData.FrameType == FrameType.Keyframe) { if (NALUnitHeader != null) { stream.Write(NALUnitHeader, 0, NALUnitHeader.Length); } } using (BinaryReader reader = new BinaryReader(srcStream)) { var sampleSize = srcStream.Length; while (sampleSize > 4L) { var ui32 = reader.ReadUInt32(); var count = OldSkool.swaplong(ui32); stream.Write(h264StartCode, 0, h264StartCode.Length); stream.Write(reader.ReadBytes((int)count), 0, (int)count); sampleSize -= 4 + (uint)count; } } if (stream != null && stream.Length > 0) { IBuffer buffer = stream.ToArray().AsBuffer(); stream.Position = 0; sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromTicks(flvTag.Timestamp)); sample.KeyFrame = flvTag.VideoData.FrameType == FrameType.Keyframe; } } } else { IBuffer buffer = flvTag.VideoData.RawData.AsBuffer(); sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromTicks(flvTag.Timestamp)); sample.KeyFrame = flvTag.VideoData.FrameType == FrameType.Keyframe; } } else { byte[] by = null; flvTag = flvFile.FlvFileBody.CurrentAudioTag; switch (flvTag.AudioData.SoundFormat) { case SoundFormat.AAC: by = (flvTag.AudioData.SoundData as AACAudioData).RawAACFrameData; break; case SoundFormat.MP3: by = flvTag.AudioData.SoundData.RawData; break; case SoundFormat.ADPCM: by = flvTag.AudioData.SoundData.RawData; break; } if (by != null && by.Length > 0) { stream = new MemoryStream(by); IBuffer buffer = by.AsBuffer(); sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromTicks(flvTag.Timestamp)); sample.KeyFrame = true; request.Sample = sample; } } } //샘플보고 request.Sample = sample; } catch (Exception e) { System.Diagnostics.Debug.WriteLine("샘플오류 " + e.Message); sender.NotifyError(MediaStreamSourceErrorStatus.DecodeError); } finally { if (deferal != null) { deferal.Complete(); } } }
void _mss_SampleRequested(Windows.Media.Core.MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { _sampleGenerator.GenerateSample(args.Request); }
static void OnSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { Debug.WriteLine("NullMediaSource.OnSampleRequested()"); }
async void MSS_SampleRequested(Windows.Media.Core.MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { MediaStreamSourceSampleRequest request = args.Request; // check if the sample requested byte offset is within the file size if (byteOffset + sampleSize <= mssStream.Size) { MediaStreamSourceSampleRequestDeferral deferal = request.GetDeferral(); inputStream = mssStream.GetInputStreamAt(byteOffset); // create the MediaStreamSample and assign to the request object. // You could also create the MediaStreamSample using createFromBuffer(...) MediaStreamSample sample = await MediaStreamSample.CreateFromStreamAsync(inputStream, sampleSize, timeOffset); sample.Duration = sampleDuration; sample.KeyFrame = true; // increment the time and byte offset byteOffset += sampleSize; timeOffset = timeOffset.Add(sampleDuration); request.Sample = sample; deferal.Complete(); } }