public async Task Stop() { await Task.Run(() => { if (CurrentItem == null) { return; } if (this.PeriodicTimeObserverObject != null) { if (Player.Rate != 1.0) { Player.Pause(); } } CurrentItem.Seek(CMTime.FromSeconds(0d, 1)); Status = MediaPlayerStatus.Stopped; }); }
public async Task <OperationResult> TrimVideo(string sourcePath, string destinationPath, double startTime, double endTime) { if (string.IsNullOrEmpty(sourcePath) || !File.Exists(sourcePath)) { return(OperationResult.AsFailure("Invalid video file path specified")); } var url = NSUrl.CreateFileUrl(sourcePath, false, null); var asset = AVAsset.FromUrl(url); var session = new AVAssetExportSession(asset, AVAssetExportSession.PresetPassthrough); session.OutputUrl = NSUrl.FromFilename(destinationPath); session.OutputFileType = AVFileType.Mpeg4; var cmStartTime = CMTime.FromSeconds(startTime, asset.Duration.TimeScale); var duration = CMTime.FromSeconds(endTime - startTime, asset.Duration.TimeScale); var range = new CMTimeRange(); range.Start = cmStartTime; range.Duration = duration; session.TimeRange = range; await session.ExportTaskAsync(); if (session.Status == AVAssetExportSessionStatus.Cancelled) { return(OperationResult.AsCancel()); } else if (session.Status == AVAssetExportSessionStatus.Failed) { return(OperationResult.AsFailure(session.Error.LocalizedDescription)); } else { return(OperationResult.AsSuccess()); } }
//Utilities void addTimeObserverToPlayer() { if (timeObserver != null) { return; } if (Player == null) { return; } if (Player.CurrentItem == null) { return; } if (Player.CurrentItem.Status != AVPlayerItemStatus.ReadyToPlay) { return; } double duration = playerItemDuration; if (!Double.IsInfinity(duration)) { float width = (float)scrubber.Bounds.Width; double interval = 0.5 * duration / width; if (interval > 1.0) { interval = 1.0; } timeObserver = Player.AddPeriodicTimeObserver(CMTime.FromSeconds(interval, NSEC_PER_SEC), DispatchQueue.MainQueue, delegate { updateScrubber(); updateTimeLabel(); }); } }
private void AddTimeObserverToPlayer() { if (timeObserver != null) { Console.WriteLine("timeObserver != null"); return; } if (player == null) { Console.WriteLine("Player == null"); return; } if (player.CurrentItem == null) { Console.WriteLine("Player.CurrentItem == null"); return; } if (player.CurrentItem.Status != AVPlayerItemStatus.ReadyToPlay) { Console.WriteLine("Player.CurrentItem.Status != AVPlayerItemStatus.ReadyToPlay"); return; } timeObserver = player.AddPeriodicTimeObserver(CMTime.FromSeconds(1, 1), DispatchQueue.MainQueue, delegate { if (PositionChanged != null) { var EmployeeList = new Dictionary <string, object>(); EmployeeList.Add("CurrentDuration", player.CurrentTime.Seconds); EmployeeList.Add("CurrentText", (string)PlayerCurrettime()); PositionChanged(EmployeeList, EventArgs.Empty); } }); }
protected override void OnElementPropertyChanged(object sender, PropertyChangedEventArgs args) { base.OnElementPropertyChanged(sender, args); if (args.PropertyName == VideoPlayer.AreTransportControlsEnabledProperty.PropertyName) { SetAreTransportControlsEnabled(); } else if (args.PropertyName == VideoPlayer.SourceProperty.PropertyName) { SetSource(); } else if (args.PropertyName == VideoPlayer.PositionProperty.PropertyName) { TimeSpan controlPosition = ConvertTime(player.CurrentTime); if (Math.Abs((controlPosition - Element.Position).TotalSeconds) > 1) { player.Seek(CMTime.FromSeconds(Element.Position.TotalSeconds, 1)); } } }
private void ScrubToSliderValue(float sliderValue) { var duration = this.GetPlayerItemDuration().Seconds; if (!double.IsInfinity(duration)) { var width = this.scrubber.Bounds.Width; double time = duration * sliderValue; double tolerance = 1d * duration / width; this.isScrubInFlight = true; this.player.Seek(CMTime.FromSeconds(time, NSEC_PER_SEC), CMTime.FromSeconds(tolerance, NSEC_PER_SEC), CMTime.FromSeconds(tolerance, NSEC_PER_SEC), (finished) => { this.isScrubInFlight = false; this.UpdateTimeLabel(); }); } }
public PodcastDetailController() { View.BackgroundColor = UIColor.White; AddUI(); if (Application.IsiOS7) { EdgesForExtendedLayout = UIRectEdge.None; } podcastSlider.ValueChanged += async(sender, e) => { var newTime = CMTime.FromSeconds((double)(podcastSlider.Value * player.CurrentItem.Duration.Seconds), 1); await player.SeekAsync(newTime); if (playing) { player.Play(); } }; AddTransportControls(); }
private void LoadAsset(AVAsset asset, string[] assetKeysToLoad, DispatchGroup dispatchGroup) { dispatchGroup.Enter(); asset.LoadValuesAsynchronously(assetKeysToLoad, () => { bool add_asset = true; // First test whether the values of each of the keys we need have been successfully loaded. foreach (var key in assetKeysToLoad) { if (asset.StatusOfValue(key, out NSError error) == AVKeyValueStatus.Failed) { Console.WriteLine($"Key value loading failed for key:{key} with error: {error?.LocalizedDescription ?? ""}"); add_asset = false; break; } } if (!asset.Composable) { Console.WriteLine("Asset is not composable"); add_asset = false; } if (add_asset) { this.clips.Add(asset); // This code assumes that both assets are atleast 5 seconds long. var value = new CMTimeRange { Start = CMTime.FromSeconds(0, 1), Duration = CMTime.FromSeconds(5, 1) }; this.clipTimeRanges.Add(value); } dispatchGroup.Leave(); }); }
/// <summary> /// Views the did load. /// </summary> public override void ViewDidLoad() { base.ViewDidLoad(); // Hide no camera label NoCamera.Hidden = ThisApp.CameraAvailable; // Attach to camera view ThisApp.Recorder.DisplayView = CameraView; // Set min and max values Offset.MinValue = ThisApp.CaptureDevice.MinExposureTargetBias; Offset.MaxValue = ThisApp.CaptureDevice.MaxExposureTargetBias; Duration.MinValue = 0.0f; Duration.MaxValue = 1.0f; ISO.MinValue = ThisApp.CaptureDevice.ActiveFormat.MinISO; ISO.MaxValue = ThisApp.CaptureDevice.ActiveFormat.MaxISO; Bias.MinValue = ThisApp.CaptureDevice.MinExposureTargetBias; Bias.MaxValue = ThisApp.CaptureDevice.MaxExposureTargetBias; // Create a timer to monitor and update the UI SampleTimer = new Timer(5000); SampleTimer.Elapsed += (sender, e) => { // Update position slider Offset.BeginInvokeOnMainThread(() => { Offset.Value = ThisApp.Input.Device.ExposureTargetOffset; }); Duration.BeginInvokeOnMainThread(() => { var newDurationSeconds = CMTimeGetSeconds(ThisApp.Input.Device.ExposureDuration); var minDurationSeconds = Math.Max(CMTimeGetSeconds(ThisApp.CaptureDevice.ActiveFormat.MinExposureDuration), ExposureMinimumDuration); var maxDurationSeconds = CMTimeGetSeconds(ThisApp.CaptureDevice.ActiveFormat.MaxExposureDuration); var p = (newDurationSeconds - minDurationSeconds) / (maxDurationSeconds - minDurationSeconds); Duration.Value = (float)Math.Pow(p, 1.0f / ExposureDurationPower); }); ISO.BeginInvokeOnMainThread(() => { ISO.Value = ThisApp.Input.Device.ISO; }); Bias.BeginInvokeOnMainThread(() => { Bias.Value = ThisApp.Input.Device.ExposureTargetBias; }); }; // Watch for value changes Segments.ValueChanged += (object sender, EventArgs e) => { // Lock device for change if (ThisApp.CaptureDevice.LockForConfiguration(out Error)) { // Take action based on the segment selected switch (Segments.SelectedSegment) { case 0: // Activate auto exposure and start monitoring position Duration.Enabled = false; ISO.Enabled = false; ThisApp.CaptureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; SampleTimer.Start(); Automatic = true; break; case 1: // Lock exposure and allow the user to control the camera SampleTimer.Stop(); ThisApp.CaptureDevice.ExposureMode = AVCaptureExposureMode.Locked; Automatic = false; Duration.Enabled = false; ISO.Enabled = false; break; case 2: // Custom exposure and allow the user to control the camera SampleTimer.Stop(); ThisApp.CaptureDevice.ExposureMode = AVCaptureExposureMode.Custom; Automatic = false; Duration.Enabled = true; ISO.Enabled = true; break; } // Unlock device ThisApp.CaptureDevice.UnlockForConfiguration(); } }; // Monitor position changes Duration.ValueChanged += (object sender, EventArgs e) => { // If we are in the automatic mode, ignore changes if (Automatic) { return; } // Calculate value var p = Math.Pow(Duration.Value, ExposureDurationPower); var minDurationSeconds = Math.Max(CMTimeGetSeconds(ThisApp.CaptureDevice.ActiveFormat.MinExposureDuration), ExposureMinimumDuration); var maxDurationSeconds = CMTimeGetSeconds(ThisApp.CaptureDevice.ActiveFormat.MaxExposureDuration); var newDurationSeconds = p * (maxDurationSeconds - minDurationSeconds) + minDurationSeconds; // Update Focus position if (ThisApp.CaptureDevice.LockForConfiguration(out Error)) { ThisApp.CaptureDevice.LockExposure(CMTime.FromSeconds(p, 1000 * 1000 * 1000), ThisApp.CaptureDevice.ISO, null); ThisApp.CaptureDevice.UnlockForConfiguration(); } }; ISO.ValueChanged += (object sender, EventArgs e) => { // If we are in the automatic mode, ignore changes if (Automatic) { return; } // Update Focus position if (ThisApp.CaptureDevice.LockForConfiguration(out Error)) { ThisApp.CaptureDevice.LockExposure(ThisApp.CaptureDevice.ExposureDuration, ISO.Value, null); ThisApp.CaptureDevice.UnlockForConfiguration(); } }; Bias.ValueChanged += (object sender, EventArgs e) => { // If we are in the automatic mode, ignore changes // if (Automatic) return; // Update Focus position if (ThisApp.CaptureDevice.LockForConfiguration(out Error)) { ThisApp.CaptureDevice.SetExposureTargetBias(Bias.Value, null); ThisApp.CaptureDevice.UnlockForConfiguration(); } }; }
private void DrawVideoCompositionTracks(CGRect bannerRect, CGRect rowRect, ref float runningTop) { bannerRect.Y = runningTop; var context = UIGraphics.GetCurrentContext(); context.SetFillColor(1.00f, 1.00f, 1.00f, 1.00f); var compositionTitle = new NSString("AVComposition"); compositionTitle.DrawString(bannerRect, UIFont.PreferredCaption1); runningTop += (float)bannerRect.Height; rowRect.Y = runningTop; CGRect stageRect = rowRect; foreach (APLVideoCompositionStageInfo stage in videoCompositionStages) { stageRect.Width = (float)stage.TimeRange.Duration.Seconds * scaledDurationToWidth; int layerCount = stage.LayerNames.Count; CGRect layerRect = stageRect; if (layerCount > 0) { layerRect.Height /= layerCount; } foreach (string layerName in stage.LayerNames) { CGRect bufferRect = layerRect; int intValueOfName; Int32.TryParse(layerName, out intValueOfName); if (intValueOfName % 2 == 1) { context.SetFillColor(0.55f, 0.02f, 0.02f, 1.00f); // darker red context.SetStrokeColor(0.87f, 0.10f, 0.10f, 1.00f); // brighter red } else { context.SetFillColor(0.00f, 0.40f, 0.76f, 1.00f); // darker blue context.SetStrokeColor(0.00f, 0.67f, 1.00f, 1.00f); // brighter blue } context.SetLineWidth(2f); bufferRect = bufferRect.Inset(2f, 3f); context.AddRect(bufferRect); context.DrawPath(CGPathDrawingMode.FillStroke); context.SetFillColor(0.00f, 0.00f, 0.00f, 1.00f); // white DrawVerticallyCenteredInRect(layerName, bufferRect); // Draw the opacity ramps for each layer as per the layerInstructions List <CGPoint> rampArray = new List <CGPoint> (); if (stage.OpacityRamps != null) { rampArray = stage.OpacityRamps [layerName]; } if (rampArray.Count > 0) { CGRect rampRect = bufferRect; rampRect.Width = (float)duration.Seconds * scaledDurationToWidth; rampRect = rampRect.Inset(3f, 3f); context.BeginPath(); context.SetStrokeColor(0.95f, 0.68f, 0.09f, 1.00f); // yellow context.SetLineWidth(2f); bool firstPoint = true; foreach (CGPoint point in rampArray) { CGPoint timeVolumePoint = point; CGPoint pointInRow = new CGPoint(); pointInRow.X = (float)HorizontalPositionForTime(CMTime.FromSeconds(timeVolumePoint.X, 1)) - 9.0f; pointInRow.Y = rampRect.Y + (0.9f - 0.8f * timeVolumePoint.Y) * rampRect.Height; pointInRow.X = (float)Math.Max(pointInRow.X, rampRect.GetMinX()); pointInRow.X = (float)Math.Min(pointInRow.X, rampRect.GetMaxX()); if (firstPoint) { context.MoveTo(pointInRow.X, pointInRow.Y); firstPoint = false; } else { context.AddLineToPoint(pointInRow.X, pointInRow.Y); } } context.StrokePath(); } layerRect.Y += layerRect.Height; } stageRect.X += stageRect.Width; } runningTop += (float)rowRect.Height; runningTop += GapAfterRows; }
/// <summary> /// Updates the time elapsed interval of the video player. /// </summary> private void UpdateTimeElapsedInterval() { if (_periodicTimeOberserver != null) { _playerControl?.Player?.RemoveTimeObserver(_periodicTimeOberserver); _periodicTimeOberserver = null; } var element = Element; if (element != null && Element?.TimeElapsedInterval > 0) { _periodicTimeOberserver = _playerControl?.Player?.AddPeriodicTimeObserver(CMTime.FromSeconds(element.TimeElapsedInterval, 1), null, time => element.OnTimeElapsed(CreateVideoPlayerEventArgs())); } }
public void Seek(float progress) { player?.Seek(CMTime.FromSeconds(player.CurrentItem.Duration.Seconds * progress, CMTimeScale.MaxValue.Value)); }
protected void Initialize() { // configure the capture session for medium resolution, change this if your code // can cope with more data or volume CaptureSession = new AVCaptureSession { SessionPreset = AVCaptureSession.PresetMedium }; previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } // SET to slow motion NSError error; var input = new AVCaptureDeviceInput(device, out error); movieFileOutput = new AVCaptureMovieFileOutput { //set max record time to 10 minutes MaxRecordedDuration = CMTime.FromSeconds(600, 1) }; photoFileOutput = new AVCapturePhotoOutput(); photoFileOutput.IsHighResolutionCaptureEnabled = true; if (CaptureSession.CanAddOutput(movieFileOutput)) { CaptureSession.BeginConfiguration(); CaptureSession.AddOutput(movieFileOutput); CaptureSession.AddOutput(photoFileOutput); var ranges = device.ActiveFormat.VideoSupportedFrameRateRanges; if (device.LockForConfiguration(out error)) { device.ActiveVideoMinFrameDuration = new CMTime(1, (int)ranges.First().MinFrameRate); device.ActiveVideoMaxFrameDuration = new CMTime(1, (int)ranges.First().MaxFrameRate); } var connection = movieFileOutput.ConnectionFromMediaType(AVMediaType.Video); if (connection != null) { if (connection.SupportsVideoStabilization) { connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } } CaptureSession.CommitConfiguration(); } CaptureSession.AddInput(input); Layer.AddSublayer(previewLayer); CaptureSession.StartRunning(); // set frame rate if Slow-mo is requested if (speedOptions == SpeedOptions.SlowMo) { foreach (var vFormat in device.Formats) { var _ranges = vFormat.VideoSupportedFrameRateRanges as AVFrameRateRange[]; var frameRates = _ranges[0]; if (frameRates.MaxFrameRate >= 240.0) { device.LockForConfiguration(out NSError _error); if (_error is null) { device.ActiveFormat = vFormat as AVCaptureDeviceFormat; device.ActiveVideoMinFrameDuration = frameRates.MinFrameDuration; device.ActiveVideoMaxFrameDuration = frameRates.MaxFrameDuration; device.UnlockForConfiguration(); break; } } } } IsPreviewing = true; }
public static void SetPosition(VideoHandle handle, double position) { handle.PlayerItem.Seek(CMTime.FromSeconds(position, 1000)); }
public Task <OperationResult> AddAudioToVideoTrack(string videoFilePath, string audioFilePath, string outputFilePath, float volume = 1, float fadeOutDuration = 0) { var tcs = new TaskCompletionSource <OperationResult>(); var composition = AVMutableComposition.Create(); var videoCompositionTrack = composition.AddMutableTrack(AVMediaType.Video, 0); var audioCompositionTrack = composition.AddMutableTrack(AVMediaType.Audio, 0); var videoUrl = NSUrl.FromFilename(videoFilePath); var videoAsset = AVAsset.FromUrl(videoUrl); var videoAssetTrack = videoAsset.TracksWithMediaType(AVMediaType.Video).First(); var audioUrl = NSUrl.FromFilename(audioFilePath); var audioAsset = AVAsset.FromUrl(audioUrl); var audioAssetTrack = audioAsset.TracksWithMediaType(AVMediaType.Audio).First(); CGSize size = videoAssetTrack.NaturalSize; CMTime time = CMTime.Zero; var range = new CMTimeRange { Start = CMTime.Zero, Duration = videoAssetTrack.TimeRange.Duration }; NSError error = null; videoCompositionTrack.InsertTimeRange(range, videoAssetTrack, time, out error); if (error != null) { Console.WriteLine("Error adding video composition track: " + error.LocalizedDescription); } error = null; audioCompositionTrack.InsertTimeRange(range, audioAssetTrack, time, out error); if (error != null) { Console.WriteLine("Error adding audio composition track: " + error.LocalizedDescription); } var audioMix = AVMutableAudioMix.Create(); var audioInputParams = AVMutableAudioMixInputParameters.FromTrack(audioCompositionTrack); audioInputParams.SetVolume(volume, CMTime.Zero); if (fadeOutDuration > 0) { var fadeOutStartTime = CMTime.Subtract(videoAssetTrack.TimeRange.Duration, CMTime.FromSeconds(fadeOutDuration, audioAssetTrack.NaturalTimeScale)); var fadeOutRange = new CMTimeRange { Start = fadeOutStartTime, Duration = CMTime.FromSeconds(fadeOutDuration, audioAssetTrack.NaturalTimeScale) }; audioInputParams.SetVolumeRamp(volume, 0.0f, fadeOutRange); } audioMix.InputParameters = new[] { audioInputParams }; var session = new AVAssetExportSession(composition, AVAssetExportSession.PresetHighestQuality); session.OutputUrl = NSUrl.FromFilename(outputFilePath); session.OutputFileType = AVFileType.Mpeg4; session.AudioMix = audioMix; session.ExportAsynchronously(() => { if (session.Status == AVAssetExportSessionStatus.Failed) { tcs.SetResult(OperationResult.AsFailure(session.Error.LocalizedDescription)); } else { tcs.SetResult(OperationResult.AsSuccess()); } }); return(tcs.Task); }
public PlaybackService(IMusicProviderService musicProviderService, ITelemetryService telemetryService, IDialogService dialogService) { // Setup background audio AVAudioSession session = AVAudioSession.SharedInstance(); session.SetCategory(AVAudioSessionCategory.Playback, AVAudioSessionCategoryOptions.AllowAirPlay | AVAudioSessionCategoryOptions.MixWithOthers | AVAudioSessionCategoryOptions.AllowBluetooth | AVAudioSessionCategoryOptions.AllowBluetoothA2DP); session.SetActive(true); // Setup services _musicProviderService = musicProviderService; _telemetryService = telemetryService; _dialogService = dialogService; // Setup player _player = new AVPlayer(); // This should handle times a bit better _player.AddPeriodicTimeObserver(CMTime.FromSeconds(1, 1), null, time => { MainThread.BeginInvokeOnMainThread(() => { UpdateInfoCenter(); }); }); // Create the queue _queue = new List <Track>(); // Setup the random class _random = new Random(); // Setup Command Center var commandCenter = MPRemoteCommandCenter.Shared; commandCenter.PreviousTrackCommand.Enabled = true; commandCenter.PreviousTrackCommand.AddTarget(PrevCommand); commandCenter.NextTrackCommand.Enabled = true; commandCenter.NextTrackCommand.AddTarget(NextCommand); commandCenter.TogglePlayPauseCommand.Enabled = true; commandCenter.TogglePlayPauseCommand.AddTarget(ToggleCommand); commandCenter.PlayCommand.Enabled = true; commandCenter.PlayCommand.AddTarget(PlayCommand); commandCenter.PauseCommand.Enabled = true; commandCenter.PauseCommand.AddTarget(PauseCommand); commandCenter.ChangeRepeatModeCommand.Enabled = true; commandCenter.ChangeRepeatModeCommand.AddTarget(ChangeRepeatModeCommand); commandCenter.ChangePlaybackPositionCommand.Enabled = true; commandCenter.ChangePlaybackPositionCommand.AddTarget((c) => { var e = (MPChangePlaybackPositionCommandEvent)c; var time = e.PositionTime; _player.SeekAsync(CMTime.FromSeconds(time, 1)); return(MPRemoteCommandHandlerStatus.Success); }); // Setup the pinger to run for app lifetime. // This is used to make sure session information is correctly // kept as long playing tracks may cause telemetry services to think // the app is closed. var pingTimer = new Timer { Interval = 180000 }; pingTimer.Elapsed += (sender, args) => _telemetryService.TrackEvent("Ping"); pingTimer.Start(); }
public async Task Seek(TimeSpan position) { await Player.SeekAsync(CMTime.FromSeconds(position.TotalSeconds, 1)); }
public void SeekTo(int seconds) { var targetTime = CMTime.FromSeconds(seconds, TimeScale); audioPlayer.SeekAsync(targetTime, CMTime.Zero, CMTime.Zero); }
public async Task Seek(TimeSpan position) { await Task.Run(() => { Player.CurrentItem?.Seek(CMTime.FromSeconds(position.TotalSeconds, 1)); }); }
/// <summary> /// Updates the video source property on the native player. /// </summary> /// <param name="oldElement">The old element.</param> private async Task UpdateSource(VideoPlayer oldElement = null) { try { var newSource = Element?.Source; if (oldElement != null) { var oldSource = oldElement.Source; if (!oldSource.Equals(newSource)) { return; } } Element.SetValue(VideoPlayer.IsLoadingPropertyKey, true); var videoSourceHandler = VideoSourceHandler.Create(newSource); var path = await videoSourceHandler.LoadVideoAsync(newSource, new CancellationToken()); Log.Info($"Video Source: {path}"); if (!string.IsNullOrEmpty(path)) { if (_currentTimeObserver != null) { _playerControl.Player.RemoveTimeObserver(_currentTimeObserver); } if (_didPlayToEndTimeNotificationObserver != null) { NSNotificationCenter.DefaultCenter.RemoveObserver(_didPlayToEndTimeNotificationObserver); } // Update video source. Element.SetValue(VideoPlayer.CurrentTimePropertyKey, TimeSpan.Zero); var pathUrl = newSource is UriVideoSource?NSUrl.FromString(path) : NSUrl.FromFilename(path); _playerControl.Player.CurrentItem?.RemoveObserver(FromObject(this), "status"); _playerControl.Player.ReplaceCurrentItemWithPlayerItem(AVPlayerItem.FromUrl(pathUrl)); _playerControl.Player.CurrentItem.AddObserver(this, (NSString)"status", 0, Handle); Element.OnPlayerStateChanged(CreateVideoPlayerStateChangedEventArgs(PlayerState.Initialized)); _didPlayToEndTimeNotificationObserver = NSNotificationCenter.DefaultCenter.AddObserver( AVPlayerItem.DidPlayToEndTimeNotification, DidPlayToEndTimeNotification, _playerControl.Player.CurrentItem); _currentTimeObserver = _playerControl.Player.AddPeriodicTimeObserver(CMTime.FromSeconds(1, 1), null, time => Element?.SetValue(VideoPlayer.CurrentTimePropertyKey, double.IsNaN(time.Seconds) ? TimeSpan.Zero : TimeSpan.FromSeconds(time.Seconds))); } } catch (Exception ex) { Log.Error(ex); Element.SetValue(VideoPlayer.IsLoadingPropertyKey, false); } }
async Task scrubTo(float sliderValue) { var duration = playerItemDuration; if (Double.IsInfinity(duration)) { return; } var width = scrubber.Bounds.Width; var time = duration * sliderValue; var tolerance = 1f * duration / width; scrubInFlight = true; await Player.SeekAsync(CMTime.FromSeconds(time, NSEC_PER_SEC), CMTime.FromSeconds(tolerance, NSEC_PER_SEC), CMTime.FromSeconds(tolerance, NSEC_PER_SEC)); scrubInFlight = false; updateTimeLabel(); }
public void Seek(int second) { #warning 未测试 player.Seek(CMTime.FromSeconds(second, NSEC_PER_SEC)); }