async void MSS_SampleRequested(Windows.Media.Core.MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { MediaStreamSourceSampleRequest request = args.Request; // check if the sample requested byte offset is within the file size if (byteOffset + sampleSize <= mssStream.Size) { MediaStreamSourceSampleRequestDeferral deferal = request.GetDeferral(); IInputStream inputStream = mssStream.GetInputStreamAt(byteOffset); // create the MediaStreamSample and assign to the request object. // You could also create the MediaStreamSample using createFromBuffer(...) MediaStreamSample sample = await MediaStreamSample.CreateFromStreamAsync(inputStream, sampleSize, timeOffset); sample.Duration = sampleDuration; sample.KeyFrame = true; // increment the time and byte offset byteOffset += sampleSize; timeOffset = timeOffset.Add(sampleDuration); request.Sample = sample; deferal.Complete(); } }
/// <summary> /// Fulfill a pending Media Foundation video sample request with an incoming /// video frame packet, short-circuiting the internal frame queue. /// </summary> /// <param name="framePacket">The incoming video frame packet to consume.</param> /// <remarks> /// This must be called with the <see cref="_deferralLock"/> acquired. /// </remarks> private void MakeSampleForPendingRequest(I420AVideoFrame frame) { // Calculate frame timestamp TimeSpan timestamp = TimeSpan.FromSeconds(_frameCount / 30.0); ++_frameCount; // Get a sample // FIXME - There are some wrong assumptions around strides here, see MemCpyStride uint pixelSize = frame.width * frame.height; uint byteSize = (pixelSize / 2 * 3); // I420 = 12 bits per pixel //Debug.Assert(byteSize == frame.Size); var sample = _streamSamplePool.Pop(byteSize, timestamp); sample.Duration = TimeSpan.FromSeconds(1.0 / 30.0); // Copy the frame data into the sample's buffer. // Unfortunately the C# interface to Windows.Storage.Streams.Buffer seems to // only offer a copy from a byte[] buffer, so need to copy first into a temporary // one (packed YUV) before copying into the sample's Buffer object. byte[] buffer = new byte[byteSize]; frame.CopyTo(buffer); buffer.CopyTo(0, sample.Buffer, 0, (int)byteSize); // Assign the sample _request.Sample = sample; _request.ReportSampleProgress(100); _deferral.Complete(); _request = null; _deferral = null; }
/// <summary> /// Clear the bridge of any pending frames and reset for reuse. /// </summary> public void Clear() { lock (_deferralLock) { _request = null; _deferral?.Complete(); _deferral = null; } _frameQueue.Clear(); }
void mkvStreamSource_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { MediaStreamSample sample = null; MediaStreamSourceSampleRequest request = args.Request; MediaStreamSourceSampleRequestDeferral deferal = request.GetDeferral(); try { var mkvFs = mediaStreamFileSource as CCPlayer.HWCodecs.Matroska.MKV.MKVFileSource; FrameBufferData fd = mkvFs.GetFrameData(request.StreamDescriptor); if (fd.Data != null) { sample = MediaStreamSample.CreateFromBuffer(fd.Data, fd.TimeCode); sample.Duration = fd.Duration; sample.KeyFrame = fd.KeyFrame; //자막을 검색하여 추가 MessengerInstance.Send <Message>(new Message("SubtitleFrameInMKV", mkvFs.SubtitleFrames), TransportControlViewModel.NAME); } else if (System.Diagnostics.Debugger.IsAttached) { //NUll이 보고되면 자연스럽게 종료처리가 됨. 즉, MediaEnded Event가 발생함. System.Diagnostics.Debug.WriteLine("***************************** null이 보고 되었음. 종료 코드가 들어옴 => MediaElement의 MediaEndedEvent 발생될 것임."); } request.Sample = sample; } catch (Exception e) { System.Diagnostics.Debug.WriteLine("********************************** 샘플오류 또는 강제 종료 => MediaStreamSource의 Closed 이벤트가 발생될 것임 : " + e.Message); //Close 이벤트 발생 sender.NotifyError(MediaStreamSourceErrorStatus.DecodeError); } finally { if (deferal != null) { deferal.Complete(); } } }
public void EnqueueAudioSample(byte[] buf) { MediaStreamSample sample = CreateAudioSample(buf); // This loop puts back-pressure in the DU queue in // common. It's needed so that we avoid our queue getting // too large. for (; ;) { lock (audioQueueLock) { if (pendingAudioRequest == null) { continue; } pendingAudioRequest.Sample = sample; pendingAudioDeferral.Complete(); pendingAudioRequest = null; break; } } }
private void flvStreamSource_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { MediaStreamSourceSampleRequest request = args.Request; MediaStreamSourceSampleRequestDeferral deferal = request.GetDeferral(); FlvFile flvFile = mediaStreamFileSource as FlvFile; FlvTag flvTag = null; MemoryStream stream = null; MediaStreamSample sample = null; try { if (flvFile != null) { if (request.StreamDescriptor is VideoStreamDescriptor) { flvTag = flvFile.FlvFileBody.CurrentVideoTag; if (flvTag.VideoData.CodecID == CodecID.AVC) { byte[] by = flvTag.VideoData.AVCVideoPacket.NALUs; if (by != null && by.Length > 0) { MemoryStream srcStream = new MemoryStream(by); stream = new MemoryStream(); if (flvTag.VideoData.FrameType == FrameType.Keyframe) { if (NALUnitHeader != null) { stream.Write(NALUnitHeader, 0, NALUnitHeader.Length); } } using (BinaryReader reader = new BinaryReader(srcStream)) { var sampleSize = srcStream.Length; while (sampleSize > 4L) { var ui32 = reader.ReadUInt32(); var count = OldSkool.swaplong(ui32); stream.Write(h264StartCode, 0, h264StartCode.Length); stream.Write(reader.ReadBytes((int)count), 0, (int)count); sampleSize -= 4 + (uint)count; } } if (stream != null && stream.Length > 0) { IBuffer buffer = stream.ToArray().AsBuffer(); stream.Position = 0; sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromTicks(flvTag.Timestamp)); sample.KeyFrame = flvTag.VideoData.FrameType == FrameType.Keyframe; } } } else { IBuffer buffer = flvTag.VideoData.RawData.AsBuffer(); sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromTicks(flvTag.Timestamp)); sample.KeyFrame = flvTag.VideoData.FrameType == FrameType.Keyframe; } } else { byte[] by = null; flvTag = flvFile.FlvFileBody.CurrentAudioTag; switch (flvTag.AudioData.SoundFormat) { case SoundFormat.AAC: by = (flvTag.AudioData.SoundData as AACAudioData).RawAACFrameData; break; case SoundFormat.MP3: by = flvTag.AudioData.SoundData.RawData; break; case SoundFormat.ADPCM: by = flvTag.AudioData.SoundData.RawData; break; } if (by != null && by.Length > 0) { stream = new MemoryStream(by); IBuffer buffer = by.AsBuffer(); sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromTicks(flvTag.Timestamp)); sample.KeyFrame = true; request.Sample = sample; } } } //샘플보고 request.Sample = sample; } catch (Exception e) { System.Diagnostics.Debug.WriteLine("샘플오류 " + e.Message); sender.NotifyError(MediaStreamSourceErrorStatus.DecodeError); } finally { if (deferal != null) { deferal.Complete(); } } }
/// <summary> /// Processes a NAL. /// </summary> /// <param name="nal">The NAL to be processed.</param> /// <returns>Type of NAL.</returns> private int ProcessNal(Nal nal) { // get the NAL type int nalType = -1; if (nal.Buffer.Length > 4) { byte[] header = new byte[5]; nal.Buffer.CopyTo(0, header, 0, 5); nalType = (header[0] == 0 && header[1] == 0 && header[2] == 0 && header[3] == 1) ? (header[4] & 0x1F) : -1; } //Log.Verbose("NAL: type = {0}, len = {1}", nalType, nal.Buffer.Length); // process the first SPS record we encounter if (nalType == 7 && !isDecoding) { byte[] sps = new byte[nal.Buffer.Length]; nal.Buffer.CopyTo(sps); SpsParser parser = new SpsParser(sps, (int)nal.Buffer.Length); //Log.Verbose("SPS: {0}x{1} @ {2}", parser.width, parser.height, parser.fps); VideoEncodingProperties properties = VideoEncodingProperties.CreateH264(); properties.ProfileId = H264ProfileIds.High; properties.Width = (uint)parser.width; properties.Height = (uint)parser.height; streamSource = new MediaStreamSource(new VideoStreamDescriptor(properties)); streamSource.BufferTime = TimeSpan.Zero; streamSource.CanSeek = false; streamSource.Duration = TimeSpan.Zero; streamSource.SampleRequested += HandleSampleRequested; var action = Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.High, () => { statusTextBlock.Visibility = Visibility.Collapsed; media.SetMediaStreamSource(streamSource); media.Play(); storyboard.Begin(); }); isDecoding = true; } // queue the frame if (nalType > 0 && isDecoding) { if (deferral != null) { request.Sample = MediaStreamSample.CreateFromBuffer(nal.Buffer, new TimeSpan(0)); lock (availableNals) { //Log.Verbose("availableNals.Enqueue"); availableNals.Enqueue(nal); } deferral.Complete(); deferral = null; request = null; //Log.Verbose("Deferral Complete"); } else { //Log.Verbose("usedNals.Enqueue"); lock (usedNals) { usedNals.Enqueue(nal); } } } // return the NAL type return(isDecoding ? nalType : -1); }
private void Mss_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { if (!(args.Request.StreamDescriptor is VideoStreamDescriptor)) { return; } Debug.WriteLine("requesting sample"); MediaStreamSourceSampleRequest request = args.Request; MpegTS.VideoSample rawSample = null; MediaStreamSourceSampleRequestDeferral deferal = request.GetDeferral(); try { //block here for signal from mpegTS parser that a sample is ready //if (extractor.SampleCount == 0) // threadSync.WaitOne(); //dequeue the raw sample here if (!foundKeyFrame) { do { threadSync.WaitOne(); rawSample = extractor.DequeueNextSample(false); }while (rawSample == null || extractor.SampleCount == 0); } else { if (extractor.SampleCount > 0) { rawSample = extractor.DequeueNextSample(false); } if (rawSample == null) { request.Sample = emptySample; deferal.Complete(); return; } } //if (!gotT0) //{ // gotT0 = true; // //T0.TotalMilliseconds = 33.3667; //} //check max size of current buffer, increase if needed. if (buff.Capacity < rawSample.Length) { buff = new Windows.Storage.Streams.Buffer((uint)rawSample.Length); bStream.Dispose(); //bStream = sample.Buffer.AsStream(); bStream = buff.AsStream(); } //create our sample here may need to keep initial time stamp for relative time? sample = MediaStreamSample.CreateFromBuffer(buff, new TimeSpan(T0.Ticks * frameCount)); bStream.Position = 0; //write the raw sample to the reqest sample stream; rawSample.WriteToStream(bStream); sample.Buffer.Length = (uint)rawSample.Length; Debug.WriteLine("sample length: {0}", rawSample.Length); //sample.DecodeTimestamp = new TimeSpan(T0.Ticks * frameCount); sample.Duration = T0; sample.KeyFrame = ScanForKeyframe(bStream);//rawSample.Length > 3000;// //not sure if this is correct... sample.Discontinuous = !lastFrame; //this just tells us if the MpegTS Continuity Counter //for all Mpeg packets in the sample were in order. (0-15) lastFrame = rawSample.IsComplete; //if (!foundKeyFrame) // sample = emptySample; //else ++frameCount; // create the MediaStreamSample and assign to the request object. // You could also create the MediaStreamSample using createFromBuffer(...) //MediaStreamSample sample = await MediaStreamSample.CreateFromStreamAsync(inputStream, sampleSize, timeOffset); //sample.Duration = sampleDuration; //sample.KeyFrame = true; // increment the time and byte offset //byteOffset += sampleSize; //timeOffset = timeOffset.Add(sampleDuration); request.Sample = sample; } catch (Exception ex) { var exStr = ex.ToString(); } finally { deferal.Complete(); } Debug.WriteLine("exit request sample"); }