예제 #1
0
		public CVPixelBuffer CopyPixelBuffer (CMTime itemTime, ref CMTime outItemTimeForDisplay)
		{
			var ptr = WeakCopyPixelBuffer (itemTime, ref outItemTimeForDisplay);
			if (ptr == IntPtr.Zero)
				return null;

			return new CVPixelBuffer (ptr, true);
		}
		public void DidFinishProcessingLivePhotoMovie (AVCapturePhotoOutput captureOutput, NSUrl outputFileUrl, CMTime duration, CMTime photoDisplayTime, AVCaptureResolvedPhotoSettings resolvedSettings, NSError error)
		{
			if (error != null) {
				Console.WriteLine ($"Error processing live photo companion movie: {error.LocalizedDescription})");
				return;
			}

			livePhotoCompanionMovieUrl = outputFileUrl;
		}
		private void ProcessComposition (AVMutableComposition composition)
		{
			var tracks = new List<List<APLCompositionTrackSegmentInfo>> ();
			foreach (AVCompositionTrack track in composition.Tracks) {
				var segmentInfos = new List<APLCompositionTrackSegmentInfo> ();
				foreach (AVCompositionTrackSegment segment in track.Segments) {
					var segmentInfo = new APLCompositionTrackSegmentInfo (segment, track.MediaType);
					segmentInfos.Add (segmentInfo);
				}

				tracks.Add (segmentInfos);
			}

			compositionTracks = tracks;
			duration = CMTime.GetMaximum (duration, composition.Duration);
		}
		public void SynchronizeToCompositoin (AVMutableComposition composition, AVMutableVideoComposition videoComposition, AVMutableAudioMix audioMix)
		{
			compositionTracks = null;
			audioMixTracks = null;
			videoCompositionStages = null;

			duration = new CMTime (1, 1);

			if (composition != null)
				ProcessComposition (composition);

			if (videoComposition != null)
				ProcessVideoComposition (videoComposition);

			if (audioMix != null)
				ProcessAudioMix (audioMix);
		}
예제 #5
0
 public virtual bool InsertTimeRanges(NSValue cmTimeRanges, AVAssetTrack[] tracks, CMTime startTime, out NSError error)
 {
     return(InsertTimeRanges(new NSValue [] { cmTimeRanges }, tracks, startTime, out error));
 }
예제 #6
0
        void DidDecompress(IntPtr sourceFrame, VTStatus status, VTDecodeInfoFlags flags, CVImageBuffer buffer, CMTime presentationTimeStamp, CMTime presentationDuration)
        {
            if (status != VTStatus.Ok)
            {
                Console.WriteLine("Error decompresssing frame at time: {0:#.###} error: {1} infoFlags: {2}",
                                  (float)presentationTimeStamp.Value / presentationTimeStamp.TimeScale, (int)status, flags);
                return;
            }

            if (buffer == null)
            {
                return;
            }

            // Find the correct position for this frame in the output frames array
            if (presentationTimeStamp.IsInvalid)
            {
                Console.WriteLine("Not a valid time for image buffer");
                return;
            }

            var framePTS = presentationTimeStamp.Seconds;

            lock (thisLock) {
                // since we want to keep the managed `pixelBuffer` alive outside the execution
                // of the callback we need to create our own (managed) instance from the handle
                var pixelBuffer = Runtime.GetINativeObject <CVPixelBuffer> (buffer.Handle, false);

                int insertionIndex = presentationTimes.Count - 1;
                while (insertionIndex >= 0)
                {
                    var aNumber = presentationTimes [insertionIndex];
                    if (aNumber <= framePTS)
                    {
                        break;
                    }
                    insertionIndex--;
                }

                if (insertionIndex + 1 == presentationTimes.Count)
                {
                    presentationTimes.Add(framePTS);
                    outputFrames.Add(pixelBuffer);
                }
                else
                {
                    presentationTimes.Insert(insertionIndex + 1, framePTS);
                    outputFrames.Insert(insertionIndex + 1, pixelBuffer);
                }
            }
        }
예제 #7
0
        private void BuildTransitionComposition(AVMutableComposition mutableComposition, AVMutableVideoComposition mutableVideoComposition)
        {
            var nextClipStartTime = CMTime.Zero;
            var clipsCount        = this.Clips.Count;

            // Make transitionDuration no greater than half the shortest clip duration.
            var transitionDuration = this.TransitionDuration;

            foreach (var clipTimeRange in this.ClipTimeRanges)
            {
                var halfClipDuration = clipTimeRange.Duration;
                halfClipDuration.TimeScale *= 2; // You can halve a rational by doubling its denominator.
                transitionDuration          = CMTime.GetMinimum(transitionDuration, halfClipDuration);
            }

            // Add two video tracks and two audio tracks.
            var compositionVideoTracks = new AVMutableCompositionTrack[2];
            var compositionAudioTracks = new AVMutableCompositionTrack[2];

            compositionVideoTracks[0] = mutableComposition.AddMutableTrack(AVMediaType.Video, 0);
            compositionVideoTracks[1] = mutableComposition.AddMutableTrack(AVMediaType.Video, 0);
            compositionAudioTracks[0] = mutableComposition.AddMutableTrack(AVMediaType.Audio, 0);
            compositionAudioTracks[1] = mutableComposition.AddMutableTrack(AVMediaType.Audio, 0);

            var passThroughTimeRanges = new CMTimeRange[clipsCount];
            var transitionTimeRanges  = new CMTimeRange[clipsCount];

            // Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
            for (int i = 0; i < clipsCount; i++)
            {
                int alternatingIndex = i % 2; // alternating targets: 0, 1, 0, 1, ...
                var asset            = this.Clips[i];
                var timeRangeInAsset = this.ClipTimeRanges[i];

                var clipVideoTrack = asset.TracksWithMediaType(AVMediaType.Video)[0];
                compositionVideoTracks[alternatingIndex].InsertTimeRange(timeRangeInAsset, clipVideoTrack, nextClipStartTime, out _);

                var clipAudioTrack = asset.TracksWithMediaType(AVMediaType.Audio)[0];
                compositionAudioTracks[alternatingIndex].InsertTimeRange(timeRangeInAsset, clipAudioTrack, nextClipStartTime, out _);

                // Remember the time range in which this clip should pass through.
                // First clip ends with a transition.
                // Second clip begins with a transition.
                // Exclude that transition from the pass through time ranges
                passThroughTimeRanges[i] = new CMTimeRange {
                    Start = nextClipStartTime, Duration = timeRangeInAsset.Duration
                };

                if (i > 0)
                {
                    passThroughTimeRanges[i].Start    = CMTime.Add(passThroughTimeRanges[i].Start, transitionDuration);
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }

                if (i + 1 < clipsCount)
                {
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }

                // The end of this clip will overlap the start of the next by transitionDuration.
                // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
                nextClipStartTime = CMTime.Add(nextClipStartTime, timeRangeInAsset.Duration);
                nextClipStartTime = CMTime.Subtract(nextClipStartTime, transitionDuration);

                // Remember the time range for the transition to the next item.
                if (i + 1 < clipsCount)
                {
                    transitionTimeRanges[i] = new CMTimeRange {
                        Start = nextClipStartTime, Duration = transitionDuration
                    };
                }
            }

            // Set up the video composition to perform cross dissolve or diagonal wipe transitions between clips.
            var instructions = new List <AVVideoCompositionInstruction>();

            // Cycle between "pass through A", "transition from A to B", "pass through B"
            for (int i = 0; i < clipsCount; i++)
            {
                int alternatingIndex = i % 2; // alternating targets

                if (mutableVideoComposition.CustomVideoCompositorClass != null)
                {
                    var videoInstruction = new CustomVideoCompositionInstruction(compositionVideoTracks[alternatingIndex].TrackID, passThroughTimeRanges[i]);
                    instructions.Add(videoInstruction);
                }
                else
                {
                    // Pass through clip i.
                    var passThroughInstruction = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                    passThroughInstruction.TimeRange = passThroughTimeRanges[i];

                    var passThroughLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks[alternatingIndex]);
                    passThroughInstruction.LayerInstructions = new[] { passThroughLayer };

                    instructions.Add(passThroughInstruction);
                }

                if (i + 1 < clipsCount)
                {
                    // Add transition from clip i to clip i+1.
                    if (mutableVideoComposition.CustomVideoCompositorClass != null)
                    {
                        var videoInstruction = new CustomVideoCompositionInstruction(new NSNumber[]
                        {
                            compositionVideoTracks[0].TrackID,
                            compositionVideoTracks[1].TrackID
                        }, transitionTimeRanges[i]);

                        if (alternatingIndex == 0)
                        {
                            // First track -> Foreground track while compositing
                            videoInstruction.ForegroundTrackId = compositionVideoTracks[alternatingIndex].TrackID;
                            // Second track -> Background track while compositing
                            videoInstruction.BackgroundTrackId = compositionVideoTracks[1 - alternatingIndex].TrackID;
                        }

                        instructions.Add(videoInstruction);
                    }
                    else
                    {
                        var transitionInstruction = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                        transitionInstruction.TimeRange = transitionTimeRanges[i];

                        var fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks[alternatingIndex]);
                        var toLayer   = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks[1 - alternatingIndex]);

                        transitionInstruction.LayerInstructions = new[] { toLayer, fromLayer };
                        instructions.Add(transitionInstruction);
                    }
                }
            }

            mutableVideoComposition.Instructions = instructions.ToArray();
        }
		public double HorizontalPositionForTime (CMTime time)
		{
			double seconds = 0.0;
			if (CMTime.Compare (time, CMTime.Zero) == 1)
				seconds = time.Seconds;

			return seconds * scaledDurationToWidth + LeftInsetToMatchTimeSlider + LeftMarginInset;
		}
예제 #9
0
using AVFoundation;
using ClubManagement.IBLL;
using Foundation;
using Radar.iOS;
using System;
using System.Collections.Generic;

using Xamarin.Forms;
using ClubManagement.Model;
using AudioToolbox;
using System.Threading.Tasks;
using CoreMedia;
using UIKit;

[assembly: Dependency(typeof(AudioiOS))]

namespace Radar.iOS
{
    public class AudioiOS : IAudio
    {
        float _volume = 15;
        AudioCanalEnum _canal = AudioCanalEnum.Nenhum;
        IList<string> _audioAtual = new List<string>();
        int _audioIndex = 0;
        AVAudioPlayer _player;

        public AudioCanalEnum Canal
        {
            get {
                return _canal;
            }

            set
            {
                _canal = value;
            }
        }

        public float Volume {
            get {
                return _volume;
            }
            set {
                _volume = value;
            }
        }

		 AVAudioPlayer criarAudio(string arquivo)
		{
			//UIApplicationState sharedApplication = new UIApplicationState();
			NSUrl songURL = new NSUrl(arquivo);
			NSError err;

			var state = UIApplication.SharedApplication.ApplicationState;
			if (state.ToString() != "Background")
			{
				AVAudioPlayer player = new AVAudioPlayer(songURL, "wav", out err);
				player.Volume = Volume;
				player.NumberOfLoops = 0;
				return player;
			}
			else {

				NSUrl url = NSUrl.FromFilename(arquivo);
				//SystemSound notificationSound = SystemSound.FromFile(NotificationSoundPath);
				SystemSound mySound = new SystemSound(url);

				mySound.AddSystemSoundCompletion(SystemSound.Vibrate.PlaySystemSound);
				var asset = AVAsset.FromUrl(NSUrl.FromFilename(arquivo));

				CMTime audioDuration = asset.Duration;
				double tempo = audioDuration.Value / audioDuration.TimeScale;

				mySound.PlaySystemSound();
				esperaFinalizarSom(tempo);
			}

			return null;
		}

		public void esperaFinalizarSom(double tempo)
		{
			int tempoInt = Convert.ToInt32(tempo);
			if (tempoInt > 5 )
			{
				tempoInt = Convert.ToInt32(tempo) * 1000 + 1000;
			}
			else {
				tempoInt = Convert.ToInt32(tempo) * 1000 + 1000;
			}

			Task.Delay(tempoInt).Wait();
		}

		  void playProximo()
		{
			if (_audioAtual != null && _audioIndex < _audioAtual.Count)
			{
				string arquivo = _audioAtual[_audioIndex];
				_audioIndex++;
				_player = criarAudio(arquivo);
				var state = UIApplication.SharedApplication.ApplicationState;
				if (state.ToString() != "Background")
				{
					_player.FinishedPlaying += (sender, e) =>
					{
						playProximo();

					} ;
					_player.Play();
				}
				else {
					playProximo();
				}


			}
			else {
				//_player.Dispose();
				_player = null;
			}
		}

		public void play(string[] arquivos)
        {
            _audioIndex = 0;
            _audioAtual = null;
           if (_player != null)
            {
                if (_player.Playing)
                    _player.Stop();
                _player.Dispose();
               _player = null;
            }
            _audioIndex = 0;
			if(arquivos != null)
            _audioAtual = arquivos;
            playProximo();
        }

        public void play(string arquivo)
        {
			var player = criarAudio(arquivo);
			//criarAudio(arquivo);
			var state = UIApplication.SharedApplication.ApplicationState;
			if (state.ToString() != "Background")
			{
				player.Play();
			}
        }
    }
}


예제 #10
0
		public void RenderCrosshairs (float[] crosshairCoordinates, nuint numberOfCrosshairs, CMTime frameTime)
		{
			fixed (float* c = &crosshairCoordinates [0]) {
				RenderCrosshairs ((IntPtr)c, numberOfCrosshairs, frameTime);
			}
		}
예제 #11
0
 public void Seek(CMTime time, AVCompletionHandler completion)
 {
     Seek (time, (x) => { completion (); });
 }
		//Utilities methods

		static double FactorForTimeInRange( CMTime time, CMTimeRange range)
		{
			CMTime elapsed = CMTime.Subtract (time, range.Start);
			return elapsed.Seconds / range.Duration.Seconds;
		}
예제 #13
0
 public void Seek(CMTime time, AVCompletionHandler completion)
 {
     Seek(time, (x) => { completion(); });
 }
예제 #14
0
 public static void SetPosition(VideoHandle handle, double position)
 {
     handle.PlayerItem.Seek(CMTime.FromSeconds(position, 1000));
 }
예제 #15
0
        static PixelBuffer CopyPixelBuffer(AVPlayerItemVideoOutput output, CMTime time, ref CMTime outItemTimeForDisplay)
        {
            var args   = new object[] { time, outItemTimeForDisplay };
            var result = _copyPixelBufferMethod.Invoke(output, args);

            return(new PixelBuffer(_gl, (IntPtr)result));
        }
예제 #16
0
		private void configureOutput()
		{
			output = new AVCaptureMovieFileOutput ();

			long totalSeconds = 10000;
			Int32 preferredTimeScale = 30;
			CMTime maxDuration = new CMTime (totalSeconds, preferredTimeScale);
			output.MinFreeDiskSpaceLimit = 1024 * 1024;
			output.MaxRecordedDuration = maxDuration;

			if (session.CanAddOutput (output)) {
				session.AddOutput (output);
			}

			session.SessionPreset = AVCaptureSession.PresetMedium;

			//configure output location
			var documents = Environment.GetFolderPath (Environment.SpecialFolder.MyDocuments);
			var library = System.IO.Path.Combine (documents, "..", "Library");
			videoPath = System.IO.Path.Combine (library, "sweetMovieFilm.mov");

			videoLocation = new NSUrl (videoPath, false);

			session.StartRunning ();
			this.btnRecord.TouchUpInside += startStopPushed;
		}
		static string BuildTimeString (CMTime time, string sign = null)
		{
			sign = sign ?? string.Empty;
			var seconds = Math.Round (time.Seconds);

			int hh = (int)Math.Floor (seconds / 3600);
			int mm = (int)Math.Floor ((seconds - hh * 3600) / 60);
			int ss = (int)seconds % 60;

			return hh > 0
				? string.Format ("{0}{1:00}:{2:00}:{3:00}", sign, hh, mm, ss)
				: string.Format ("{0}{1:00}:{2:00}", sign, mm, ss);
		}
		void UpdateTimeIndicators (CMTime time, CMTime duration)
		{
			ElapsedTimeLabel.Text = BuildTimeString (time);
			CurrentTimeSlider.Value = (float)(time.Seconds / duration.Seconds);
			RemainingTimeLabel.Text = BuildTimeString (duration - time, "-");
		}
예제 #19
0
        /// <summary>
        /// Updates the time elapsed interval of the video player.
        /// </summary>
        private void UpdateTimeElapsedInterval()
        {
            if (_periodicTimeOberserver != null)
            {
                _playerControl?.Player?.RemoveTimeObserver(_periodicTimeOberserver);
                _periodicTimeOberserver = null;
            }

            var element = Element;

            if (element != null && Element?.TimeElapsedInterval > 0)
            {
                _periodicTimeOberserver = _playerControl?.Player?.AddPeriodicTimeObserver(CMTime.FromSeconds(element.TimeElapsedInterval, 1), null,
                                                                                          time => element.OnTimeElapsed(CreateVideoPlayerEventArgs()));
            }
        }
예제 #20
0
 public void Seek(CMTime time, CMTime toleranceBefore, CMTime toleranceAfter, AVCompletionHandler completion)
 {
     Seek(time, toleranceBefore, toleranceAfter, (x) => { completion(); });
 }
 void handler(CMTime requestedTime, IntPtr imageRef, CMTime actualTime, AVAssetImageGeneratorResult result, NSError error)
 {
     handled = true;
     mre.Set();
 }
예제 #22
0
 /// <summary>
 /// CMs the time get seconds.
 /// </summary>
 /// <returns>The time get seconds.</returns>
 /// <param name="time">Time.</param>
 float CMTimeGetSeconds(CMTime time)
 {
     return((float)time.Value / (float)time.TimeScale);
 }
 public SyncScrubberEventArgs(CMTime time)
 {
     this.Time = time;
     
 }
예제 #24
0
        /// <summary>
        /// Views the did load.
        /// </summary>
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();

            // Hide no camera label
            NoCamera.Hidden = ThisApp.CameraAvailable;

            // Attach to camera view
            ThisApp.Recorder.DisplayView = CameraView;

            // Set min and max values
            Offset.MinValue = ThisApp.CaptureDevice.MinExposureTargetBias;
            Offset.MaxValue = ThisApp.CaptureDevice.MaxExposureTargetBias;

            Duration.MinValue = 0.001f;
            Duration.MaxValue = 1f;

            ISO.MinValue = ThisApp.CaptureDevice.ActiveFormat.MinISO;
            ISO.MaxValue = ThisApp.CaptureDevice.ActiveFormat.MaxISO;

            Bias.MinValue = ThisApp.CaptureDevice.MinExposureTargetBias;
            Bias.MaxValue = ThisApp.CaptureDevice.MaxExposureTargetBias;

            // Create a timer to monitor and update the UI
            SampleTimer          = new Timer(5000);
            SampleTimer.Elapsed += (sender, e) => {
                // Update position slider
                Offset.BeginInvokeOnMainThread(() => {
                    Offset.Value = ThisApp.Input.Device.ExposureTargetOffset;
                });

                Duration.BeginInvokeOnMainThread(() => {
                    var newDurationSeconds = CMTimeGetSeconds(ThisApp.Input.Device.ExposureDuration);
                    var minDurationSeconds = Math.Max(CMTimeGetSeconds(ThisApp.CaptureDevice.ActiveFormat.MinExposureDuration), ExposureMinimumDuration);
                    var maxDurationSeconds = CMTimeGetSeconds(ThisApp.CaptureDevice.ActiveFormat.MaxExposureDuration);
                    var p          = (newDurationSeconds - minDurationSeconds) / (maxDurationSeconds - minDurationSeconds);
                    Duration.Value = (float)Math.Pow(p, 1.0f / ExposureDurationPower);
                });

                ISO.BeginInvokeOnMainThread(() => {
                    ISO.Value = ThisApp.Input.Device.ISO;
                });

                Bias.BeginInvokeOnMainThread(() => {
                    Bias.Value = ThisApp.Input.Device.ExposureTargetBias;
                });
            };

            // Watch for value changes
            Segments.ValueChanged += (sender, e) => {
                NSError err;
                // Lock device for change
                if (ThisApp.CaptureDevice.LockForConfiguration(out err))
                {
                    // Take action based on the segment selected
                    switch (Segments.SelectedSegment)
                    {
                    case 0:
                        // Activate auto exposure and start monitoring position
                        Duration.Enabled = false;
                        ISO.Enabled      = false;
                        ThisApp.CaptureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure;
                        SampleTimer.Start();
                        Automatic = true;
                        break;

                    case 1:
                        // Lock exposure and allow the user to control the camera
                        SampleTimer.Stop();
                        ThisApp.CaptureDevice.ExposureMode = AVCaptureExposureMode.Locked;
                        Automatic        = false;
                        Duration.Enabled = false;
                        ISO.Enabled      = false;
                        break;

                    case 2:
                        // Custom exposure and allow the user to control the camera
                        SampleTimer.Stop();
                        ThisApp.CaptureDevice.ExposureMode = AVCaptureExposureMode.Custom;
                        Automatic        = false;
                        Duration.Enabled = true;
                        ISO.Enabled      = true;
                        break;
                    }

                    // Unlock device
                    ThisApp.CaptureDevice.UnlockForConfiguration();
                }
            };

            // Monitor position changes
            Duration.ValueChanged += (sender, e) => {
                // If we are in the automatic mode, ignore changes
                if (Automatic)
                {
                    return;
                }

                // Calculate value
                var p = Math.Pow(Duration.Value, ExposureDurationPower);
                var minDurationSeconds = Math.Max(ThisApp.CaptureDevice.ActiveFormat.MinExposureDuration.Seconds, ExposureMinimumDuration);
                var maxDurationSeconds = ThisApp.CaptureDevice.ActiveFormat.MaxExposureDuration.Seconds;
                var newDurationSeconds = p * (maxDurationSeconds - minDurationSeconds) + minDurationSeconds;

                NSError err;
                // Update Focus position
                if (ThisApp.CaptureDevice.LockForConfiguration(out err))
                {
                    ThisApp.CaptureDevice.LockExposure(CMTime.FromSeconds(newDurationSeconds, 1000 * 1000 * 1000), AVCaptureDevice.ISOCurrent, null);
                    ThisApp.CaptureDevice.UnlockForConfiguration();
                }
            };

            ISO.ValueChanged += (sender, e) => {
                // If we are in the automatic mode, ignore changes
                if (Automatic)
                {
                    return;
                }

                NSError err;
                // Update Focus position
                if (ThisApp.CaptureDevice.LockForConfiguration(out err))
                {
                    ThisApp.CaptureDevice.LockExposure(ThisApp.CaptureDevice.ExposureDuration, ISO.Value, null);
                    ThisApp.CaptureDevice.UnlockForConfiguration();
                }
            };

            Bias.ValueChanged += (sender, e) => {
                NSError err;
                // Update Focus position
                if (ThisApp.CaptureDevice.LockForConfiguration(out err))
                {
                    ThisApp.CaptureDevice.SetExposureTargetBias(Bias.Value, null);
                    ThisApp.CaptureDevice.UnlockForConfiguration();
                }
            };
        }
예제 #25
0
        public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            try
            {
                lastSampleTime = sampleBuffer.PresentationTimeStamp;

                var image = ImageFromSampleBuffer(sampleBuffer);

                if (frame == 0)
                {
                    writer.StartWriting();
                    writer.StartSessionAtSourceTime(lastSampleTime);
                    frame = 1;
                }
                String infoString = "";
                if (inputWriter.ReadyForMoreMediaData)
                {
                    if (!inputWriter.AppendSampleBuffer(sampleBuffer))
                    {
                        infoString = "Failed to append sample buffer";
                    }
                    else
                    {
                        infoString = String.Format("{0} frames captured", frame++);
                    }
                }
                else
                {
                    infoString = "Writer not ready";
                }

                ImageView.BeginInvokeOnMainThread(() => ImageView.Image = image);
                InfoLabel.BeginInvokeOnMainThread(() => InfoLabel.Text = infoString);
            }
            catch (Exception e)
            {
                Failure.Alert(e.Message);
            }
            finally
            {
                sampleBuffer.Dispose();
            }
        }
예제 #26
0
        static unsafe void VTDecompressionOutputHandlerTrampoline(BlockLiteral *block,
                                                                  VTStatus status, VTDecodeInfoFlags infoFlags, IntPtr imageBuffer,
                                                                  CMTime presentationTimeStamp, CMTime presentationDuration)
        {
            var del = (VTDecompressionOutputHandler)(block->Target);

            if (del != null)
            {
                del(status, infoFlags, new CVImageBuffer(imageBuffer), presentationTimeStamp, presentationDuration);
            }
        }
		void ResetUpdateTimeIndicators (CMTime duration)
		{
			ElapsedTimeLabel.Text = BuildTimeString (CMTime.Zero);
			CurrentTimeSlider.Value = 0;
			RemainingTimeLabel.Text = BuildTimeString (duration, "-");
		}
예제 #28
0
        static void DecompressionCallback(IntPtr outputCallbackClosure, IntPtr sourceFrame, VTStatus status,
                                          VTDecodeInfoFlags infoFlags, IntPtr imageBufferPtr, CMTime presentationTimeStamp, CMTime presentationDuration)
        {
            var gch  = GCHandle.FromIntPtr(outputCallbackClosure);
            var func = (VTDecompressionOutputCallback)gch.Target;

            // Apple headers states that the callback should get a CVImageBuffer but it turned out that not all of them are a
            // CVImageBuffer, some can be instances of CVImageBuffer and others can be instances of CVPixelBuffer. So we go one
            // step further in the inheritance hierarchy and supply the callback a CVPixelBuffer and the callback supplies
            // to the developer a CVImageBuffer, so the developer can choose when to use one or the other and we mimic
            // what Apple provides on its headers.
            using (var sampleBuffer = new CVPixelBuffer(imageBufferPtr)) {
                func(sourceFrame, status, infoFlags, sampleBuffer, presentationTimeStamp, presentationDuration);
            }
        }
        //Utilities methods

        static double FactorForTimeInRange(CMTime time, CMTimeRange range)
        {
            CMTime elapsed = CMTime.Subtract(time, range.Start);

            return(elapsed.Seconds / range.Duration.Seconds);
        }
 public virtual void DidFinishProcessingLivePhotoMovie(AVCapturePhotoOutput captureOutput, NSUrl outputFileUrl, CMTime duration, CMTime photoDisplayTime, AVCaptureResolvedPhotoSettings resolvedSettings, NSError error)
 {
     if (error != null)
     {
         Console.WriteLine($"Error processing live photo companion movie: {error}", error);
         return;
     }
 }
 TimeSpan ConvertTime(CMTime cmTime)
 {
     return(TimeSpan.FromSeconds(Double.IsNaN(cmTime.Seconds) ? 0 : cmTime.Seconds));
 }
        public void TestCallbackBackground(bool stronglyTyped)
        {
            var      width  = 640;
            var      height = 480;
            var      encoder_specification = new VTVideoEncoderSpecification();
            var      source_attributes     = new CVPixelBufferAttributes(CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange, width, height);
            var      duration = new CMTime(40, 1);
            VTStatus status;

            using var frameProperties = new NSDictionary();

            int callbackCounter = 0;
            var failures        = new List <string> ();
            var callback        = new VTCompressionSession.VTCompressionOutputCallback((IntPtr sourceFrame, VTStatus status, VTEncodeInfoFlags flags, CMSampleBuffer buffer) =>
            {
                Interlocked.Increment(ref callbackCounter);
                if (status != VTStatus.Ok)
                {
                    failures.Add($"Callback #{callbackCounter} failed. Expected status = Ok, got status = {status}");
                }
#if !NET
                // Work around a crash that occur if the buffer isn't retained
                if (stronglyTyped)
                {
                    CFRetain(buffer.Handle);
                }
#endif
            });

            using var session = stronglyTyped
                                ? VTCompressionSession.Create(
                      width, height,
                      CMVideoCodecType.H264,
                      callback,
                      encoder_specification,
                      source_attributes
                      )
                                : VTCompressionSession.Create(
                      width, height,
                      CMVideoCodecType.H264,
                      callback,
                      encoder_specification,
                      source_attributes.Dictionary
                      );

            var frameCount = 20;

            for (var i = 0; i < frameCount; i++)
            {
                using var imageBuffer = new CVPixelBuffer(width, height, CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange);
                var pts = new CMTime(40 * i, 1);
                status = session.EncodeFrame(imageBuffer, pts, duration, null, imageBuffer, out var infoFlags);
                Assert.AreEqual(status, VTStatus.Ok, $"status #{i}");
                // This looks weird, but it seems the video encoder can become overwhelmed otherwise, and it
                // will start failing (and taking a long time to do so, eventually timing out the test).
                Thread.Sleep(10);
            }
            ;
            status = session.CompleteFrames(new CMTime(40 * frameCount, 1));
            Assert.AreEqual(status, VTStatus.Ok, "status finished");
            Assert.AreEqual(callbackCounter, frameCount, "frame count");
            Assert.That(failures, Is.Empty, "no callback failures");
        }
예제 #33
0
        private async Task scrubTo(float sliderValue)
        {
            var duration = playerItemDuration;

            if (Double.IsInfinity(duration))
            {
                return;
            }

            var width = slider.Bounds.Width;

            var time      = duration * sliderValue;
            var tolerance = 1f * duration / width;

            scrubInFlight = true;

            await _player.SeekAsync(CMTime.FromSeconds(time, NSEC_PER_SEC), CMTime.FromSeconds(tolerance, NSEC_PER_SEC), CMTime.FromSeconds(tolerance, NSEC_PER_SEC));

            scrubInFlight = false;
            updateTimeLabel();
        }
        public virtual void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription();

            if (connection == videoConnection)
            {
                // Get framerate
                CMTime timestamp = sampleBuffer.PresentationTimeStamp;
                CalculateFramerateAtTimestamp(timestamp);

                // Get frame dimensions (for onscreen display)
                if (VideoDimensions.IsEmpty)
                {
                    VideoDimensions = formatDescription.GetVideoPresentationDimensions(true, false);
                }

                // Get the buffer type
                if (VideoType == 0)
                {
                    VideoType = formatDescription.MediaSubType;
                }

                // Synchronously process the pixel buffer to de-green it.
                using (var pixelBuffer = sampleBuffer.GetImageBuffer())
                    ProcessPixelBuffer(pixelBuffer);

                previewBufferQueue.Enqueue(sampleBuffer);

                //var writeBuffer = sampleBuffer.Duplicate ();
                InvokeOnMainThread(() => {
                    var j = previewBufferQueue.Dequeue();

                    var sbuf = j as CMSampleBuffer;
                    if (sbuf == null)
                    {
#if DEBUG
                        // Record the current sampleBuffer.ClassHandle
                        // Then run another iteration and on the next one, print the ClassHandle
                        Console.WriteLine("The type is {0}", new NSString(CFCopyDescription(j.Handle)));
#endif
                        return;
                    }

                    using (CVImageBuffer pixBuf = sbuf.GetImageBuffer()) {
                        if (PixelBufferReadyForDisplay != null)
                        {
                            PixelBufferReadyForDisplay(pixBuf);
                        }
                    }
                });
            }
            // keep a reference to 'sampleBuffer', movieWritingQueue will remove it
            CompleteBufferUse(sampleBuffer);

            movieWritingQueue.DispatchAsync(() => {
                if (assetWriter != null)
                {
                    bool wasReadyToRecord = (readyToRecordAudio && readyToRecordVideo);

                    if (connection == videoConnection)
                    {
                        // Initialize the video input if this is not done yet
                        if (!readyToRecordVideo)
                        {
                            readyToRecordVideo = SetupAssetWriterVideoInput(formatDescription);
                        }

                        // Write the video data to file
                        if (readyToRecordVideo && readyToRecordAudio)
                        {
                            WriteSampleBuffer(sampleBuffer, AVMediaType.Video);
                        }
                    }
                    else if (connection == audioConnection)
                    {
                        if (!readyToRecordAudio)
                        {
                            readyToRecordAudio = SetupAssetWriterAudioInput(formatDescription);
                        }

                        if (readyToRecordAudio && readyToRecordVideo)
                        {
                            WriteSampleBuffer(sampleBuffer, AVMediaType.Audio);
                        }
                    }
                    bool isReadyToRecord = (readyToRecordAudio && readyToRecordVideo);

                    if (!wasReadyToRecord && isReadyToRecord)
                    {
                        recordingWillBeStarted = false;
                        IsRecording            = true;

                        if (RecordingDidStart != null)
                        {
                            RecordingDidStart();
                        }
                    }
                }
                CompleteBufferUse(sampleBuffer);
            });
        }
예제 #35
0
        /// <summary>
        /// Updates the video source property on the native player.
        /// </summary>
        /// <param name="oldElement">The old element.</param>
        private async Task UpdateSource(VideoPlayer oldElement = null)
        {
            try
            {
                var newSource = Element?.Source;

                if (oldElement != null)
                {
                    var oldSource = oldElement.Source;

                    if (!oldSource.Equals(newSource))
                    {
                        return;
                    }
                }

                Element.SetValue(VideoPlayer.IsLoadingProperty, true);
                var videoSourceHandler = VideoSourceHandler.Create(newSource);
                var path = await videoSourceHandler.LoadVideoAsync(newSource, new CancellationToken());

                Log.Info($"Video Source: {path}");

                if (!string.IsNullOrEmpty(path))
                {
                    if (_currentTimeObserver != null)
                    {
                        _playerControl.Player.RemoveTimeObserver(_currentTimeObserver);
                    }
                    if (_didPlayToEndTimeNotificationObserver != null)
                    {
                        NSNotificationCenter.DefaultCenter.RemoveObserver(_didPlayToEndTimeNotificationObserver);
                    }

                    // Update video source.
                    Element.SetValue(VideoPlayer.CurrentTimePropertyKey, TimeSpan.Zero);

                    var pathUrl = newSource is UriVideoSource?NSUrl.FromString(path) : NSUrl.FromFilename(path);

                    _playerControl.Player.CurrentItem?.RemoveObserver(FromObject(this), "status");

                    _playerControl.Player.ReplaceCurrentItemWithPlayerItem(AVPlayerItem.FromUrl(pathUrl));

                    _playerControl.Player.CurrentItem.AddObserver(this, (NSString)"status", 0, Handle);

                    Element.OnPlayerStateChanged(CreateVideoPlayerStateChangedEventArgs(elitePlayerState.Initialized));

                    _didPlayToEndTimeNotificationObserver = NSNotificationCenter.DefaultCenter.AddObserver(
                        AVPlayerItem.DidPlayToEndTimeNotification, DidPlayToEndTimeNotification, _playerControl.Player.CurrentItem);

                    _currentTimeObserver = _playerControl.Player.AddPeriodicTimeObserver(CMTime.FromSeconds(1, 1), null,
                                                                                         time => {
                        Element?.SetValue(VideoPlayer.CurrentTimePropertyKey,
                                          double.IsNaN(time.Seconds) ? TimeSpan.Zero : TimeSpan.FromSeconds(time.Seconds));

                        if (_playerControl.Player.CurrentItem.PlaybackBufferEmpty)
                        {
                            Element.OnBuffering(new EventArgsVideoPlayer());
                        }
                        else if (_playerControl.Player.CurrentItem.PlaybackBufferFull || _playerControl.Player.CurrentItem.PlaybackLikelyToKeepUp)
                        {
                            Element.OnFinishedBuffering(new EventArgsVideoPlayer());
                        }
                    });
                }
            }
            catch (Exception ex)
            {
                Log.Error(ex);
                Element.SetValue(VideoPlayer.IsLoadingProperty, false);
            }
        }
예제 #36
0
        public override void DidFinishProcessingLivePhotoMovie(AVCapturePhotoOutput captureOutput, NSUrl outputFileUrl, CMTime duration, CMTime photoDisplayTime, AVCaptureResolvedPhotoSettings resolvedSettings, NSError error)
        {
            if (error != null)
            {
                Console.WriteLine($"Error processing live photo companion movie: {error.LocalizedDescription})");
                return;
            }

            livePhotoCompanionMovieUrl = outputFileUrl;
        }
 public async Task Seek(TimeSpan position)
 {
     await Task.Run(() => { Player.CurrentItem?.Seek(CMTime.FromSeconds(position.TotalSeconds, 1)); });
 }
예제 #38
0
        void DidDecompress(IntPtr sourceFrame, VTStatus status, VTDecodeInfoFlags flags, CVImageBuffer buffer, CMTime presentationTimeStamp, CMTime presentationDuration)
        {
            if (status != VTStatus.Ok) {
                Console.WriteLine ("Error decompresssing frame at time: {0:#.###} error: {1} infoFlags: {2}",
                    (float)presentationTimeStamp.Value / presentationTimeStamp.TimeScale, (int)status, flags);
                return;
            }

            if (buffer == null)
                return;

            // Find the correct position for this frame in the output frames array
            if (presentationTimeStamp.IsInvalid) {
                Console.WriteLine ("Not a valid time for image buffer");
                return;
            }

            var framePTS = presentationTimeStamp.Seconds;

            lock (thisLock) {
                // since we want to keep the managed `pixelBuffer` alive outside the execution
                // of the callback we need to create our own (managed) instance from the handle
                var pixelBuffer = Runtime.GetINativeObject<CVPixelBuffer> (buffer.Handle, false);

                int insertionIndex = presentationTimes.Count - 1;
                while (insertionIndex >= 0) {
                    var aNumber = presentationTimes [insertionIndex];
                    if (aNumber <= framePTS)
                        break;
                    insertionIndex--;
                }

                if (insertionIndex + 1 == presentationTimes.Count) {
                    presentationTimes.Add (framePTS);
                    outputFrames.Add (pixelBuffer);
                } else {
                    presentationTimes.Insert (insertionIndex + 1, framePTS);
                    outputFrames.Insert (insertionIndex + 1, pixelBuffer);
                }
            }
        }
예제 #39
0
		public unsafe void RenderLines (float[] lineSlopeAndIntercepts, nuint numberOfLines, CMTime frameTime)
		{
			fixed (float* l = &lineSlopeAndIntercepts [0]) {
				RenderLines ((IntPtr)l, numberOfLines, frameTime);
			}
		}
		/// <summary>
		/// CMs the time get seconds.
		/// </summary>
		/// <returns>The time get seconds.</returns>
		/// <param name="time">Time.</param>
		private float CMTimeGetSeconds(CMTime time) {
			return (float)time.Value / (float)time.TimeScale;
		}
예제 #41
0
        public override void ViewDidLoad()
        {
            base.ViewDidLoad ();
            weAreRecording = false;
            lblError.Hidden = true;

            btnStartRecording.SetTitle("Start Recording", UIControlState.Normal);

            //Set up session
            session = new AVCaptureSession ();

            //Set up inputs and add them to the session
            //this will only work if using a physical device!

            Console.WriteLine ("getting device inputs");
            try{
                //add video capture device
                device = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
                input = AVCaptureDeviceInput.FromDevice (device);
                session.AddInput (input);

                //add audio capture device
                audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);
                audioInput = AVCaptureDeviceInput.FromDevice(audioDevice);
                session.AddInput(audioInput);

            }
            catch(Exception ex){
                //show the label error.  This will always show when running in simulator instead of physical device.
                lblError.Hidden = false;
                return;
            }

            //Set up preview layer (shows what the input device sees)
            Console.WriteLine ("setting up preview layer");
            previewlayer = new AVCaptureVideoPreviewLayer (session);
            previewlayer.Frame = this.View.Bounds;

            //this code makes UI controls sit on top of the preview layer!  Allows you to just place the controls in interface builder
            UIView cameraView = new UIView ();
            cameraView = new UIView ();
            cameraView.Layer.AddSublayer (previewlayer);
            this.View.AddSubview (cameraView);
            this.View.SendSubviewToBack (cameraView);

            Console.WriteLine ("Configuring output");
            output = new AVCaptureMovieFileOutput ();

            long totalSeconds = 10000;
            Int32 preferredTimeScale = 30;
            CMTime maxDuration = new CMTime (totalSeconds, preferredTimeScale);
            output.MinFreeDiskSpaceLimit = 1024 * 1024;
            output.MaxRecordedDuration = maxDuration;

            if (session.CanAddOutput (output)) {
                session.AddOutput (output);
            }

            session.SessionPreset = AVCaptureSession.PresetMedium;

            Console.WriteLine ("About to start running session");

            session.StartRunning ();

            //toggle recording button was pushed.
            btnStartRecording.TouchUpInside += startStopPushed;

            //Console.ReadLine ();
        }
예제 #42
0
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();
            weAreRecording  = false;
            lblError.Hidden = true;

            btnStartRecording.SetTitle("Start Recording", UIControlState.Normal);

            //Set up session
            session = new AVCaptureSession();


            //Set up inputs and add them to the session
            //this will only work if using a physical device!

            Console.WriteLine("getting device inputs");
            try{
                //add video capture device
                device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
                input  = AVCaptureDeviceInput.FromDevice(device);
                session.AddInput(input);

                //add audio capture device
                audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);
                audioInput  = AVCaptureDeviceInput.FromDevice(audioDevice);
                session.AddInput(audioInput);
            }
            catch (Exception ex) {
                //show the label error.  This will always show when running in simulator instead of physical device.
                lblError.Hidden = false;
                return;
            }



            //Set up preview layer (shows what the input device sees)
            Console.WriteLine("setting up preview layer");
            previewlayer       = new AVCaptureVideoPreviewLayer(session);
            previewlayer.Frame = this.View.Bounds;

            //this code makes UI controls sit on top of the preview layer!  Allows you to just place the controls in interface builder
            UIView cameraView = new UIView();

            cameraView = new UIView();
            cameraView.Layer.AddSublayer(previewlayer);
            this.View.AddSubview(cameraView);
            this.View.SendSubviewToBack(cameraView);

            Console.WriteLine("Configuring output");
            output = new AVCaptureMovieFileOutput();

            long   totalSeconds       = 10000;
            Int32  preferredTimeScale = 30;
            CMTime maxDuration        = new CMTime(totalSeconds, preferredTimeScale);

            output.MinFreeDiskSpaceLimit = 1024 * 1024;
            output.MaxRecordedDuration   = maxDuration;

            if (session.CanAddOutput(output))
            {
                session.AddOutput(output);
            }

            session.SessionPreset = AVCaptureSession.PresetMedium;

            Console.WriteLine("About to start running session");

            session.StartRunning();

            //toggle recording button was pushed.
            btnStartRecording.TouchUpInside += startStopPushed;


            //Console.ReadLine ();
        }
        public Task <OperationResult> AddAudioToVideoTrack(string videoFilePath, string audioFilePath, string outputFilePath,
                                                           float volume = 1, float fadeOutDuration = 0)
        {
            var tcs = new TaskCompletionSource <OperationResult>();

            var composition           = AVMutableComposition.Create();
            var videoCompositionTrack = composition.AddMutableTrack(AVMediaType.Video, 0);
            var audioCompositionTrack = composition.AddMutableTrack(AVMediaType.Audio, 0);

            var videoUrl        = NSUrl.FromFilename(videoFilePath);
            var videoAsset      = AVAsset.FromUrl(videoUrl);
            var videoAssetTrack = videoAsset.TracksWithMediaType(AVMediaType.Video).First();

            var audioUrl        = NSUrl.FromFilename(audioFilePath);
            var audioAsset      = AVAsset.FromUrl(audioUrl);
            var audioAssetTrack = audioAsset.TracksWithMediaType(AVMediaType.Audio).First();

            CGSize size = videoAssetTrack.NaturalSize;
            CMTime time = CMTime.Zero;

            var range = new CMTimeRange
            {
                Start    = CMTime.Zero,
                Duration = videoAssetTrack.TimeRange.Duration
            };

            NSError error = null;

            videoCompositionTrack.InsertTimeRange(range, videoAssetTrack, time, out error);
            if (error != null)
            {
                Console.WriteLine("Error adding video composition track: " + error.LocalizedDescription);
            }

            error = null;
            audioCompositionTrack.InsertTimeRange(range, audioAssetTrack, time, out error);
            if (error != null)
            {
                Console.WriteLine("Error adding audio composition track: " + error.LocalizedDescription);
            }


            var audioMix         = AVMutableAudioMix.Create();
            var audioInputParams = AVMutableAudioMixInputParameters.FromTrack(audioCompositionTrack);

            audioInputParams.SetVolume(volume, CMTime.Zero);

            if (fadeOutDuration > 0)
            {
                var fadeOutStartTime = CMTime.Subtract(videoAssetTrack.TimeRange.Duration, CMTime.FromSeconds(fadeOutDuration, audioAssetTrack.NaturalTimeScale));
                var fadeOutRange     = new CMTimeRange
                {
                    Start    = fadeOutStartTime,
                    Duration = CMTime.FromSeconds(fadeOutDuration, audioAssetTrack.NaturalTimeScale)
                };

                audioInputParams.SetVolumeRamp(volume, 0.0f, fadeOutRange);
            }

            audioMix.InputParameters = new[] { audioInputParams };

            var session = new AVAssetExportSession(composition, AVAssetExportSession.PresetHighestQuality);

            session.OutputUrl      = NSUrl.FromFilename(outputFilePath);
            session.OutputFileType = AVFileType.Mpeg4;
            session.AudioMix       = audioMix;

            session.ExportAsynchronously(() =>
            {
                if (session.Status == AVAssetExportSessionStatus.Failed)
                {
                    tcs.SetResult(OperationResult.AsFailure(session.Error.LocalizedDescription));
                }
                else
                {
                    tcs.SetResult(OperationResult.AsSuccess());
                }
            });

            return(tcs.Task);
        }
 void ResetUpdateTimeIndicators(CMTime duration)
 {
     ElapsedTimeLabel.Text   = BuildTimeString(CMTime.Zero);
     CurrentTimeSlider.Value = 0;
     RemainingTimeLabel.Text = BuildTimeString(duration, "-");
 }
예제 #45
0
 public void Seek(CMTime time, CMTime toleranceBefore, CMTime toleranceAfter, AVCompletionHandler completion)
 {
     Seek (time, toleranceBefore, toleranceAfter, (x) => { completion (); });
 }
		void SetupPlayerPeriodicTimeObserver()
		{
			// Only add the time observer if one hasn't been created yet.
			if (timeObserverToken != null)
				return;

			var time = new CMTime (1, 1);
			timeObserverToken = Player.AddPeriodicTimeObserver (time, DispatchQueue.MainQueue, t => TimeSlider.Value = (float)t.Seconds);
		}
        UIImage ImageFor(AVAsset avAsset, double time)
        {
            AVAssetImageGenerator imageGenerator = AVAssetImageGenerator.FromAsset (avAsset);
            imageGenerator.AppliesPreferredTrackTransform = true;

            CMTime actualTime;
            NSError error = null;
            var requestedTime = new CMTime ((long)time, 100);
            using (CGImage posterImage = imageGenerator.CopyCGImageAtTime (requestedTime, out actualTime, out error))
                return UIImage.FromImage (posterImage);
        }
예제 #48
0
 extern static VTStatus VTCompressionSessionCompleteFrames(IntPtr session, CMTime completeUntilPresentationTimeStamp);
예제 #49
0
        public VTStatus EncodeFrame(CVImageBuffer imageBuffer, CMTime presentationTimestampe, CMTime duration,
                                    NSDictionary frameProperties, IntPtr sourceFrame, out VTEncodeInfoFlags infoFlags)
        {
            if (Handle == IntPtr.Zero)
            {
                throw new ObjectDisposedException("CompressionSession");
            }
            if (imageBuffer == null)
            {
                throw new ArgumentNullException("imageBuffer");
            }

            return(VTCompressionSessionEncodeFrame(Handle, imageBuffer.Handle, presentationTimestampe, duration,
                                                   frameProperties == null ? IntPtr.Zero : frameProperties.Handle,
                                                   sourceFrame, out infoFlags));
        }
 void UpdateTimeIndicators(CMTime time, CMTime duration)
 {
     ElapsedTimeLabel.Text   = BuildTimeString(time);
     CurrentTimeSlider.Value = (float)(time.Seconds / duration.Seconds);
     RemainingTimeLabel.Text = BuildTimeString(duration - time, "-");
 }
		public void CalculateFramerateAtTimestamp (CMTime timeStamp)
		{
			previousSecondTimestamps.Add (timeStamp);
			
			var oneSecond = CMTime.FromSeconds (1, 1);
			var oneSecondAgo = CMTime.Subtract (timeStamp, oneSecond);
			
			while (previousSecondTimestamps.Count > 0 && CMTime.Compare (previousSecondTimestamps[0], oneSecondAgo) < 0)
				previousSecondTimestamps.RemoveAt (0);
			
			double newRate = Convert.ToDouble (previousSecondTimestamps.Count);
			
			VideoFrameRate = (VideoFrameRate + newRate) / 2;
		}
예제 #52
0
        public VTStatus EncodeFrame(CVImageBuffer imageBuffer, CMTime presentationTimestamp, CMTime duration,
                                    NSDictionary frameProperties, IntPtr sourceFrame, out VTEncodeInfoFlags infoFlags,
                                    VTCompressionOutputHandler outputHandler)
        {
            if (Handle == IntPtr.Zero)
            {
                throw new ObjectDisposedException("CompressionSession");
            }
            if (imageBuffer == null)
            {
                throw new ArgumentNullException("imageBuffer");
            }
            if (outputHandler == null)
            {
                throw new ArgumentNullException("outputHandler");
            }

            unsafe {
                var block    = new BlockLiteral();
                var blockPtr = &block;
                block.SetupBlock(compressionOutputHandlerTrampoline, outputHandler);

                try {
                    return(VTCompressionSessionEncodeFrameWithOutputHandler(Handle,
                                                                            imageBuffer.Handle, presentationTimestamp, duration,
                                                                            frameProperties == null ? IntPtr.Zero : frameProperties.Handle,
                                                                            out infoFlags, blockPtr));
                } finally {
                    blockPtr->CleanupBlock();
                }
            }
        }