partial void UpdateImpl(ref TimeSpan elapsed) { if (videoOutputSurface == null || PlayState == PlayState.Stopped) { return; } //Transfer frame if a new one is available if (mediaEngine.OnVideoStreamTick(out var presentationTimeTicks)) { CurrentTime = TimeSpan.FromTicks(presentationTimeTicks); // Check end of media var endOfMedia = reachedEOF; if (!endOfMedia) { //check the video loop and play range if (PlayRange.IsValid() && CurrentTime > PlayRange.End) { endOfMedia = true; } else if (IsLooping && LoopRange.IsValid() && CurrentTime > LoopRange.End) { endOfMedia = true; } } if (endOfMedia) { if (IsLooping) { //Restart the video at LoopRangeStart Seek(LoopRange.Start); } else { //stop the video Stop(); return; } } if (videoComponent.Target != null && videoOutputSurface != null && videoOutputTexture != null) { videoTexture.SetTargetContentToVideoStream(videoComponent.Target); // Now update the video texture with data of the new video frame: var graphicsContext = services.GetSafeServiceAs <GraphicsContext>(); mediaEngine.TransferVideoFrame(videoOutputSurface, null, new SharpDX.Mathematics.Interop.RawRectangle(0, 0, videoWidth, videoHeight), null); videoTexture.CopyDecoderOutputToTopLevelMipmap(graphicsContext, videoOutputTexture); videoTexture.GenerateMipMaps(graphicsContext); } } }
partial void UpdateImpl(ref TimeSpan elapsed) { if (stream == null) { return; } if (PlayState == PlayState.Stopped) { return; } var speedFactor = SpeedFactor; if (PlayState == PlayState.Paused) { speedFactor = 0; } // Compare elapsed time with video framerate var frameDurationTicks = stream.FrameDuration.Ticks; adjustedTicksSinceLastFrame += (long)(elapsed.Ticks * speedFactor); if (adjustedTicksSinceLastFrame < frameDurationTicks) { return; } var frameCount = (int)(adjustedTicksSinceLastFrame / frameDurationTicks); if (frameCount == 0) { // Note: in case of slow speed factor, we might not need to update at each draw return; } if (frameCount > 4) { // Reading more than a few frames can be expensive, better seek. // FIXME: we might need a heuristic here to auto-adapt. It is probably dependent on the video being played (e.g. resolution, codec, file size, etc.) Seek(CurrentTime + TimeSpan.FromTicks(frameDurationTicks * frameCount)); frameCount = 1; } // Extract the frames var extractedFrameCount = media.ExtractFrames(stream, frameCount); if (extractedFrameCount > 0) { adjustedTicksSinceLastFrame = adjustedTicksSinceLastFrame % stream.FrameDuration.Ticks; } // Get the last one var streamInfo = media.GetStreamInfo(stream); if (streamInfo?.Image == null) { return; } // Check end of media bool endOfMedia = streamInfo.ReachedEnd; if (!endOfMedia) { if (extractedFrameCount > 0) { CurrentTime = stream.TimestampToTime(streamInfo.Image.Timestamp); } //check the video loop and play range if (PlayRange.IsValid() && CurrentTime > PlayRange.End) { endOfMedia = true; } else if (IsLooping && LoopRange.IsValid() && CurrentTime > LoopRange.End) { endOfMedia = true; } } if (endOfMedia) { if (IsLooping) { //Restart the video at LoopRangeStart //(ToCheck: is there a better way to do this (directly updating CurrentTime does not seem good, but if not doing, it will not work)) CurrentTime = LoopRange.Start; Seek(LoopRange.Start); return; } else { //stop the video Stop(); return; } } // return if the frame extraction failed and didn't reached and of the video if (extractedFrameCount == 0) { return; } if (videoComponent.Target != null) { videoTexture.SetTargetContentToVideoStream(videoComponent.Target); // Now update the video texture with data of the new video frame: var graphicsContext = services.GetSafeServiceAs <GraphicsContext>(); if (streamInfo.Codec.IsHardwareAccelerated && streamInfo.Image == null) { videoTexture.CopyDecoderOutputToTopLevelMipmap(graphicsContext, streamInfo.Codec.DecoderOutputTexture); } else { videoTexture.UpdateTopLevelMipmapFromData(graphicsContext, streamInfo.Image); } videoTexture.GenerateMipMaps(graphicsContext); } }