Example #1
0
        public void CMTimeMappingFactoryMethods()
        {
            if (!UIDevice.CurrentDevice.CheckSystemVersion(9, 0))
            {
                Assert.Inconclusive("Requires 9.0+");
            }

            var first = new CMTimeRange()
            {
                Duration = new CMTime(12, 1), Start = new CMTime(1, 1)
            };
            var second = new CMTimeRange()
            {
                Duration = new CMTime(4, 1), Start = new CMTime(1, 1)
            };

            CMTimeMapping map = CMTimeMapping.Create(first, second);

            CompareCMTimeRange(map.Source, first, "CMTimeMapping.Create");
            CompareCMTimeRange(map.Target, second, "CMTimeMapping.Create");

            map = CMTimeMapping.CreateEmpty(first);
            CompareCMTimeRange(map.Source, CMTimeRange.InvalidRange, "CMTimeMapping.CreateEmpty");
            CompareCMTimeRange(map.Target, first, "CMTimeMapping.CreateEmpty");

            map = CMTimeMapping.CreateFromDictionary(new NSDictionary());
            CompareCMTimeRange(map.Source, CMTimeRange.InvalidRange, "CMTimeMapping.CreateFromDictionary");
            CompareCMTimeRange(map.Target, CMTimeRange.InvalidRange, "CMTimeMapping.CreateFromDictionary");

            Assert.IsNotNull(map.AsDictionary(), "CMTimeMapping AsDictionary");

            Assert.IsNotNull(map.Description, "CMTimeMapping Description");
        }
        public MTAudioProcessingTapError GetSourceAudio(long frames, ref AudioBufferList bufferList, out MTAudioProcessingTapFlags flags,
                                                        out CMTimeRange timeRange, long framesProvided)
        {
            int outFp;

            return(MTAudioProcessingTapGetSourceAudio(handle, (int)frames, ref bufferList, out flags, out timeRange, out outFp));
        }
Example #3
0
        public VTStatus GetTimeRangesForNextPass(out CMTimeRange [] timeRanges)
        {
            if (Handle == IntPtr.Zero)
            {
                throw new ObjectDisposedException("CompressionSession");
            }
            IntPtr target;
            int    count;
            var    v = VTCompressionSessionGetTimeRangesForNextPass(Handle, out count, out target);

            if (v != VTStatus.Ok)
            {
                timeRanges = null;
                return(v);
            }
            timeRanges = new CMTimeRange [count];
            unsafe {
                CMTimeRange *ptr = (CMTimeRange *)target;
                for (int i = 0; i < count; i++)
                {
                    timeRanges [i] = ptr [i];
                }
            }
            return(VTStatus.Ok);
        }
        private void LoadAsset(AVUrlAsset asset, string[] assetKeysToLoadandTest, DispatchGroup dispatchGroup)
        {
            dispatchGroup.Enter();
            asset.LoadValuesAsynchronously(assetKeysToLoadandTest, () => {
                foreach (string key in assetKeysToLoadandTest)
                {
                    NSError error;
                    if (asset.StatusOfValue(key, out error) == AVKeyValueStatus.Failed)
                    {
                        Console.Error.WriteLine("Key value loading failed for key" + key + " with error: " + error.ToString());
                        dispatchGroup.Leave();
                    }
                }

                if (!asset.Composable)
                {
                    Console.Error.WriteLine("Asset is not composable");
                    dispatchGroup.Leave();
                }

                Clips.Add(asset);
                CMTimeRange timeRange = new CMTimeRange()
                {
                    Start    = CMTime.FromSeconds(0, 1),
                    Duration = CMTime.FromSeconds(5, 1)
                };

                ClipTimeRanges.Add(NSValue.FromCMTimeRange(timeRange));
                dispatchGroup.Leave();
            });
        }
 public CustomVideoCompositionInstruction(NSNumber [] sourceTracksIDS, CMTimeRange timeRange)
 {
     requiredSourceTrackIDs = sourceTracksIDS;
     passthroughTrackID     = 0;
     this.timeRange         = timeRange;
     containsTweening       = true;
     enablePostProcessing   = false;
 }
		public CustomVideoCompositionInstruction(NSNumber [] sourceTracksIDS, CMTimeRange timeRange) : base()
		{
			requiredSourceTrackIDs = sourceTracksIDS;
			passthroughTrackID = 0;
			this.timeRange = timeRange;
			containsTweening = true;
			enablePostProcessing = false;
		}
		public CustomVideoCompositionInstruction (int passthroughTrackID, CMTimeRange timeRange) : base()
		{
			this.passthroughTrackID = passthroughTrackID;
			requiredSourceTrackIDs = null;
			this.timeRange = timeRange;
			containsTweening = false;
			enablePostProcessing = false;
		}
Example #8
0
 public CustomVideoCompositionInstruction(int passthroughTrackId, CMTimeRange timeRange) : base()
 {
     this.passthroughTrackId     = passthroughTrackId;
     this.requiredSourceTrackIds = null;
     this.timeRange            = timeRange;
     this.containsTweening     = false;
     this.enablePostProcessing = false;
 }
Example #9
0
 public CustomVideoCompositionInstruction(NSNumber[] sourceTracksIds, CMTimeRange timeRange) : base()
 {
     this.requiredSourceTrackIds = sourceTracksIds;
     this.passthroughTrackId     = 0;
     this.timeRange            = timeRange;
     this.containsTweening     = true;
     this.enablePostProcessing = false;
 }
 public CustomVideoCompositionInstruction(int passthroughTrackID, CMTimeRange timeRange)
 {
     this.passthroughTrackID = passthroughTrackID;
     requiredSourceTrackIDs  = null;
     this.timeRange          = timeRange;
     containsTweening        = false;
     enablePostProcessing    = false;
 }
        public void WriteToUrl(NSUrl localOutputURL, Action <float> progress, Action <NSError> completion)
        {
            _outputURL = localOutputURL;

            AVAsset localAsset = _asset;

            _completionProc = completion;
            _progressProc   = progress;

            // Dispatch the setup work with _cancellationTokenSrc, to ensure this work can be cancelled
            localAsset.LoadValuesTaskAsync(new string[] { "tracks", "duration" }).ContinueWith(_ => {
                // Since we are doing these things asynchronously, the user may have already cancelled on the main thread.
                // In that case, simply return from this block
                _cancellationTokenSrc.Token.ThrowIfCancellationRequested();

                bool success       = true;
                NSError localError = null;

                success = localAsset.StatusOfValue("tracks", out localError) == AVKeyValueStatus.Loaded &&
                          localAsset.StatusOfValue("duration", out localError) == AVKeyValueStatus.Loaded;

                if (!success)
                {
                    throw new NSErrorException(localError);
                }

                _timeRange = new CMTimeRange {
                    Start    = CMTime.Zero,
                    Duration = localAsset.Duration
                };

                // AVAssetWriter does not overwrite files for us, so remove the destination file if it already exists
                if (File.Exists(localOutputURL.Path))
                {
                    File.Delete(localOutputURL.Path);
                }

                // Set up the AVAssetReader and AVAssetWriter, then begin writing samples or flag an error
                SetupReaderAndWriter();
                StartReadingAndWriting();

                return(localError);
            }, _cancellationTokenSrc.Token).ContinueWith(prevTask => {
                switch (prevTask.Status)
                {
                case TaskStatus.Canceled:
                    ReadingAndWritingDidFinish(false, null);
                    break;

                case TaskStatus.Faulted:
                    ReadingAndWritingDidFinish(false, ((NSErrorException)prevTask.Exception.InnerException).Error);
                    break;

                default:
                    break;
                }
            });
        }
		public void WriteToUrl(NSUrl localOutputURL, Action<float> progress, Action<NSError> completion)
		{
			_outputURL = localOutputURL;

			AVAsset localAsset = _asset;

			_completionProc = completion;
			_progressProc = progress;

			// Dispatch the setup work with _cancellationTokenSrc, to ensure this work can be cancelled
			localAsset.LoadValuesTaskAsync (new string[] { "tracks", "duration" }).ContinueWith(_ => {
				// Since we are doing these things asynchronously, the user may have already cancelled on the main thread.
				// In that case, simply return from this block
				_cancellationTokenSrc.Token.ThrowIfCancellationRequested();

				bool success = true;
				NSError localError = null;

				success = localAsset.StatusOfValue("tracks", out localError) == AVKeyValueStatus.Loaded &&
				          localAsset.StatusOfValue("duration", out localError) == AVKeyValueStatus.Loaded;

				if(!success)
					throw new NSErrorException(localError);

				_timeRange = new CMTimeRange {
					Start = CMTime.Zero,
					Duration = localAsset.Duration
				};

				// AVAssetWriter does not overwrite files for us, so remove the destination file if it already exists
				if (File.Exists(localOutputURL.Path))
					File.Delete(localOutputURL.Path);

				// Set up the AVAssetReader and AVAssetWriter, then begin writing samples or flag an error
				SetupReaderAndWriter();
				StartReadingAndWriting();

				return localError;
			}, _cancellationTokenSrc.Token).ContinueWith(prevTask => {
				switch(prevTask.Status) {
					case TaskStatus.Canceled:
						ReadingAndWritingDidFinish(false, null);
						break;

					case TaskStatus.Faulted:
						ReadingAndWritingDidFinish(false, ((NSErrorException)prevTask.Exception.InnerException).Error);
						break;

					default:
						break;
				}
			});
		}
        public MTAudioProcessingTapError GetSourceAudio(long frames, AudioBuffers bufferList, out MTAudioProcessingTapFlags flags,
                                                        out CMTimeRange timeRange, long framesProvided)
        {
            if (bufferList == null)
            {
                throw new ArgumentNullException("bufferList");
            }

            int result;
            var r = MTAudioProcessingTapGetSourceAudio(handle, (int)frames, (IntPtr)bufferList, out flags, out timeRange, out result);

            return(r);
        }
        private void ProcessVideoComposition(AVMutableVideoComposition videoComposition)
        {
            var stages = new List <APLVideoCompositionStageInfo> ();

            foreach (AVVideoCompositionInstruction instruction in videoComposition.Instructions)
            {
                var stage = new APLVideoCompositionStageInfo();
                stage.TimeRange = instruction.TimeRange;

                var rampsDictionary = new Dictionary <string, List <CGPoint> > ();
                var layerNames      = new List <string> ();
                foreach (AVVideoCompositionLayerInstruction layerInstruction in instruction.LayerInstructions)
                {
                    var ramp = new List <CGPoint> ();

                    CMTime      startTime    = CMTime.Zero;
                    float       startOpacity = 1f;
                    float       endOpacity   = 1f;
                    CMTimeRange timeRange    = new CMTimeRange();

                    while (layerInstruction.GetOpacityRamp(startTime, ref startOpacity, ref endOpacity, ref timeRange))
                    {
                        if (CMTime.Compare(startTime, CMTime.Zero) == 0 &&
                            CMTime.Compare(timeRange.Start, CMTime.Zero) == 1)
                        {
                            ramp.Add(new CGPoint((float)timeRange.Start.Seconds, startOpacity));
                        }

                        CMTime endTime = CMTime.Add(timeRange.Start, timeRange.Duration);
                        ramp.Add(new CGPoint((float)endTime.Seconds, endOpacity));
                        startTime = CMTime.Add(timeRange.Start, timeRange.Duration);
                    }

                    NSString name = new NSString(layerInstruction.TrackID.ToString());
                    layerNames.Add(name);
                    rampsDictionary [name] = ramp;
                }

                if (layerNames.Count > 1)
                {
                    stage.OpacityRamps = rampsDictionary;
                }

                stage.LayerNames = layerNames;
                stages.Add(stage);
            }

            videoCompositionStages = stages;
        }
        private void ProcessAudioMix(AVMutableAudioMix audioMix)
        {
            var mixTracks = new List <List <CGPoint> > ();

            foreach (AVAudioMixInputParameters input in audioMix.InputParameters)
            {
                List <CGPoint> ramp = new List <CGPoint> ();

                CMTime      startTime   = CMTime.Zero;
                float       startVolume = 1f;
                float       endVolume   = 1f;
                CMTimeRange timeRange   = new CMTimeRange();

                while (input.GetVolumeRamp(startTime, ref startVolume, ref endVolume, ref timeRange))
                {
                    if (CMTime.Compare(startTime, CMTime.Zero) == 0 &&
                        CMTime.Compare(timeRange.Start, CMTime.Zero) == 1)
                    {
                        ramp.Add(new CGPoint(0f, 1f));
                        ramp.Add(new CGPoint((float)timeRange.Start.Seconds, startVolume));
                    }

                    ramp.Add(new CGPoint((float)timeRange.Start.Seconds, startVolume));

                    CMTime endTime = CMTime.Add(timeRange.Start, timeRange.Duration);
                    ramp.Add(new CGPoint((float)endTime.Seconds, endVolume));
                    startTime = CMTime.Add(timeRange.Start, timeRange.Duration);
                }

                if (CMTime.Compare(startTime, duration) == -1)
                {
                    ramp.Add(new CGPoint((float)duration.Seconds, endVolume));
                }

                mixTracks.Add(ramp);
            }

            audioMixTracks = mixTracks;
        }
        public async Task <OperationResult> TrimVideo(string sourcePath, string destinationPath, double startTime, double endTime)
        {
            if (string.IsNullOrEmpty(sourcePath) || !File.Exists(sourcePath))
            {
                return(OperationResult.AsFailure("Invalid video file path specified"));
            }

            var url   = NSUrl.CreateFileUrl(sourcePath, false, null);
            var asset = AVAsset.FromUrl(url);

            var session = new AVAssetExportSession(asset, AVAssetExportSession.PresetPassthrough);

            session.OutputUrl      = NSUrl.FromFilename(destinationPath);
            session.OutputFileType = AVFileType.Mpeg4;

            var cmStartTime = CMTime.FromSeconds(startTime, asset.Duration.TimeScale);
            var duration    = CMTime.FromSeconds(endTime - startTime, asset.Duration.TimeScale);

            var range = new CMTimeRange();

            range.Start       = cmStartTime;
            range.Duration    = duration;
            session.TimeRange = range;

            await session.ExportTaskAsync();

            if (session.Status == AVAssetExportSessionStatus.Cancelled)
            {
                return(OperationResult.AsCancel());
            }
            else if (session.Status == AVAssetExportSessionStatus.Failed)
            {
                return(OperationResult.AsFailure(session.Error.LocalizedDescription));
            }
            else
            {
                return(OperationResult.AsSuccess());
            }
        }
Example #17
0
        void StartReadingAndWriting(CMTimeRange timeRange)
        {
            // Instruct the asset reader and asset writer to get ready to do work
            if (!assetReader.StartReading())
            {
                throw new NSErrorException(assetReader.Error);
            }

            if (!assetWriter.StartWriting())
            {
                throw new NSErrorException(assetWriter.Error);
            }

            // Start a sample-writing session
            assetWriter.StartSessionAtSourceTime(timeRange.Start);

            Task audioTask = Start(audioSampleBufferChannel);
            Task videoTask = Start(videoSampleBufferChannel);

            // Set up a callback for when the sample writing is finished
            Task.WhenAll(audioTask, videoTask).ContinueWith(_ => {
                if (cancellationTokenSrc.Token.IsCancellationRequested)
                {
                    assetReader.CancelReading();
                    assetWriter.CancelWriting();
                    throw new OperationCanceledException();
                }

                if (assetReader.Status != AVAssetReaderStatus.Failed)
                {
                    assetWriter.FinishWriting(() => {
                        bool success = assetWriter.Status == AVAssetWriterStatus.Completed;
                        ReadingAndWritingDidFinish(success, assetWriter.Error);
                    });
                }
            }, cancellationTokenSrc.Token);
        }
Example #18
0
        private void LoadAsset(AVAsset asset, string[] assetKeysToLoad, DispatchGroup dispatchGroup)
        {
            dispatchGroup.Enter();
            asset.LoadValuesAsynchronously(assetKeysToLoad, () =>
            {
                bool add_asset = true;
                // First test whether the values of each of the keys we need have been successfully loaded.
                foreach (var key in assetKeysToLoad)
                {
                    if (asset.StatusOfValue(key, out NSError error) == AVKeyValueStatus.Failed)
                    {
                        Console.WriteLine($"Key value loading failed for key:{key} with error: {error?.LocalizedDescription ?? ""}");
                        add_asset = false;
                        break;
                    }
                }

                if (!asset.Composable)
                {
                    Console.WriteLine("Asset is not composable");
                    add_asset = false;
                }

                if (add_asset)
                {
                    this.clips.Add(asset);
                    // This code assumes that both assets are atleast 5 seconds long.
                    var value = new CMTimeRange {
                        Start = CMTime.FromSeconds(0, 1), Duration = CMTime.FromSeconds(5, 1)
                    };
                    this.clipTimeRanges.Add(value);
                }

                dispatchGroup.Leave();
            });
        }
		void buildTransitionComposition(AVMutableComposition composition, AVMutableVideoComposition videoComposition)
		{
			CMTime nextClipStartTime = CMTime.Zero;
			int clipsCount = Clips.Count;

			// Make transitionDuration no greater than half the shortest clip duration.
			CMTime transitionDuration = TransitionDuration;

			foreach (var clipTimeRange in ClipTimeRanges) {
				if (clipTimeRange == null)
					continue;

				CMTime halfClipDuration = clipTimeRange.CMTimeRangeValue.Duration;
				halfClipDuration.TimeScale *= 2;
				transitionDuration = CMTime.GetMinimum(transitionDuration,halfClipDuration);
			}

			// Add two video tracks and two audio tracks.
			var compositionVideoTracks = new AVMutableCompositionTrack [2];
			var compositionAudioTracks = new AVMutableCompositionTrack [2];

			compositionVideoTracks [0] = composition.AddMutableTrack (AVMediaType.Video, 0);
			compositionVideoTracks [1] = composition.AddMutableTrack (AVMediaType.Video, 0);
			compositionAudioTracks [0] = composition.AddMutableTrack (AVMediaType.Audio, 0);
			compositionAudioTracks [1] = composition.AddMutableTrack (AVMediaType.Audio, 0);

			var passThroughTimeRanges = new CMTimeRange[clipsCount];
			var transitionTimeRanges = new CMTimeRange[clipsCount];

			// Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
			for(int i = 0; i < clipsCount; i++)
			{
				int alternatingIndex = i % 2;
				AVAsset asset = Clips [i];
				NSValue clipTimeRange = ClipTimeRanges [i];
				CMTimeRange timeRangeInAsset;
				if (clipTimeRange != null)
					timeRangeInAsset = clipTimeRange.CMTimeRangeValue;
				else
				{
					timeRangeInAsset = new CMTimeRange {
						Start = CMTime.Zero,
						Duration = asset.Duration
					};
				}
				NSError error;
				AVAssetTrack clipVideoTrack = asset.TracksWithMediaType (AVMediaType.Video) [0];
				compositionVideoTracks [alternatingIndex].InsertTimeRange (timeRangeInAsset, clipVideoTrack, nextClipStartTime, out error);

				AVAssetTrack clipAudioTrack = asset.TracksWithMediaType (AVMediaType.Audio) [0];
				compositionAudioTracks [alternatingIndex].InsertTimeRange (timeRangeInAsset, clipAudioTrack, nextClipStartTime, out error);

				// Remember the time range in which this clip should pass through.
				// First clip ends with a transition.
				// Second clip begins with a transition.
				// Exclude that transition from the pass through time ranges
				passThroughTimeRanges [i] = new CMTimeRange {
					Start = nextClipStartTime,
					Duration = timeRangeInAsset.Duration
				};

				if (i > 0) {
					passThroughTimeRanges[i].Start = CMTime.Add(passThroughTimeRanges[i].Start, transitionDuration);
					passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
				}
				if( i + 1 < clipsCount)
					passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration,transitionDuration);

				// The end of this clip will overlap the start of the next by transitionDuration.
				// (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
				nextClipStartTime = CMTime.Add (nextClipStartTime, timeRangeInAsset.Duration);
				nextClipStartTime = CMTime.Subtract (nextClipStartTime, transitionDuration);

				// Remember the time range for the transition to the next item.

				if(i + 1 < clipsCount)
				{
					transitionTimeRanges [i] = new CMTimeRange {
						Start  = nextClipStartTime,
						Duration = transitionDuration
					};

				}
			}

			// Set up the video composition to perform cross dissolve or diagonal wipe transitions between clips.
			var instructions = new List<AVVideoCompositionInstruction> ();

			// Cycle between "pass through A", "transition from A to B", "pass through B"
			for(int i = 0;  i < clipsCount; i++)
			{
				int alternatingIndex = i % 2;

//				if (videoComposition.CustomVideoCompositorClass != null) {
//					var videoInstruction = new CustomVideoCompositionInstruction (compositionVideoTracks [alternatingIndex].TrackID, passThroughTimeRanges [i]);
//					instructions.Add (videoInstruction);
//				} else {
//					// Pass through clip i.
//					var passThroughInstruction = AVMutableVideoCompositionInstruction.Create () as AVMutableVideoCompositionInstruction;
//					passThroughInstruction.TimeRange = passThroughTimeRanges [i];
//					var passThroughLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [alternatingIndex]);
//					passThroughInstruction.LayerInstructions = new [] { passThroughLayer };
//					instructions.Add (passThroughInstruction);
//
//				}
				//TODO: Remove following call if previous works
 				if (videoComposition.CustomVideoCompositorClass.Name != "nil") {
					var videoInstruction = new CustomVideoCompositionInstruction (compositionVideoTracks [alternatingIndex].TrackID, passThroughTimeRanges [i]);
					instructions.Add (videoInstruction);

				}
				else {
					// Pass through clip i.
					var passThroughInstruction = AVMutableVideoCompositionInstruction.Create () as AVMutableVideoCompositionInstruction;
					passThroughInstruction.TimeRange = passThroughTimeRanges [i];
					var passThroughLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [alternatingIndex]);
					passThroughInstruction.LayerInstructions = new [] { passThroughLayer };
					instructions.Add (passThroughInstruction);

				}

				if (i + 1 < clipsCount) {
					// Add transition from clip i to clip i+1.
//					if (videoComposition.CustomVideoCompositorClass != null) {
//						var videoInstruction = new CustomVideoCompositionInstruction (new NSNumber [] {
//							compositionVideoTracks [0].TrackID,
//							compositionVideoTracks [1].TrackID
//						}, transitionTimeRanges [1]);
//
//						if (alternatingIndex == 0) {
//							videoInstruction.ForegroundTrackID = compositionVideoTracks [alternatingIndex].TrackID;
//							videoInstruction.BackgroundTrackID = compositionVideoTracks [1 - alternatingIndex].TrackID;
//						}
//
//						instructions.Add (videoInstruction);
//					} else {
//						var transitionInstruction = AVMutableVideoCompositionInstruction.Create () as AVMutableVideoCompositionInstruction;
//						transitionInstruction.TimeRange = transitionTimeRanges [i];
//						var fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [alternatingIndex]);
//						var toLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [1 - alternatingIndex]);
//						transitionInstruction.LayerInstructions = new [] { toLayer, fromLayer };
//						instructions.Add (transitionInstruction);
//					}
					// TODO: remove following call if previous works
					if (videoComposition.CustomVideoCompositorClass.Name != "nil") {
						NSNumber[] sources = {
							new NSNumber (compositionVideoTracks [0].TrackID),
							new NSNumber (compositionVideoTracks [1].TrackID)
						};
						var videoInstructions = new CustomVideoCompositionInstruction (sources, transitionTimeRanges [i]);
						if (alternatingIndex == 0) {
							videoInstructions.ForegroundTrackID = compositionVideoTracks [alternatingIndex].TrackID;
							videoInstructions.BackgroundTrackID = compositionVideoTracks [1 - alternatingIndex].TrackID;
						}

						instructions.Add (videoInstructions);
						Console.WriteLine ("Add transition from clip i to clip i+1");
					} else {
						var transitionInstruction = AVMutableVideoCompositionInstruction.Create () as AVMutableVideoCompositionInstruction;
						transitionInstruction.TimeRange = transitionTimeRanges [i];
						AVMutableVideoCompositionLayerInstruction fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [alternatingIndex]);
						AVMutableVideoCompositionLayerInstruction toLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [1 - alternatingIndex]);
						transitionInstruction.LayerInstructions = new AVVideoCompositionLayerInstruction[] {
							fromLayer,
							toLayer,
						};
						instructions.Add (transitionInstruction);
					}
				}
			}

			videoComposition.Instructions = instructions.ToArray ();
		}
		private void ProcessVideoComposition (AVMutableVideoComposition videoComposition)
		{
			var stages = new List<APLVideoCompositionStageInfo> ();
			foreach (AVVideoCompositionInstruction instruction in videoComposition.Instructions) {
				var stage = new APLVideoCompositionStageInfo ();
				stage.TimeRange = instruction.TimeRange;

				var rampsDictionary = new Dictionary<string, List<CGPoint>> ();
				var layerNames = new List<string> ();
				foreach (AVVideoCompositionLayerInstruction layerInstruction in instruction.LayerInstructions) {
					var ramp = new List<CGPoint> ();

					CMTime startTime = CMTime.Zero;
					float startOpacity = 1f;
					float endOpacity = 1f;
					CMTimeRange timeRange = new CMTimeRange ();

					while (layerInstruction.GetOpacityRamp (startTime, ref startOpacity, ref endOpacity, ref timeRange)) {
						if (CMTime.Compare (startTime, CMTime.Zero) == 0 &&
						    CMTime.Compare (timeRange.Start, CMTime.Zero) == 1) {
							ramp.Add (new CGPoint ((float)timeRange.Start.Seconds, startOpacity));
						}

						CMTime endTime = CMTime.Add (timeRange.Start, timeRange.Duration);
						ramp.Add (new CGPoint ((float)endTime.Seconds, endOpacity));
						startTime = CMTime.Add (timeRange.Start, timeRange.Duration);
					}

					NSString name = new NSString (layerInstruction.TrackID.ToString ());
					layerNames.Add (name);
					rampsDictionary [name] = ramp;
				}

				if (layerNames.Count > 1) {
					stage.OpacityRamps = rampsDictionary;
				}

				stage.LayerNames = layerNames;
				stages.Add (stage);
			}

			videoCompositionStages = stages;
		}
		private void ProcessAudioMix (AVMutableAudioMix audioMix)
		{
			var mixTracks = new List<List<CGPoint>> ();
			foreach (AVAudioMixInputParameters input in audioMix.InputParameters) {
				List<CGPoint> ramp = new List<CGPoint> ();

				CMTime startTime = CMTime.Zero;
				float startVolume = 1f;
				float endVolume = 1f;
				CMTimeRange timeRange = new CMTimeRange ();

				while (input.GetVolumeRamp (startTime, ref startVolume, ref endVolume, ref timeRange)) {
					if (CMTime.Compare (startTime, CMTime.Zero) == 0 &&
					    CMTime.Compare (timeRange.Start, CMTime.Zero) == 1) {
						ramp.Add (new CGPoint (0f, 1f));
						ramp.Add (new CGPoint ((float)timeRange.Start.Seconds, startVolume));
					}

					ramp.Add (new CGPoint ((float)timeRange.Start.Seconds, startVolume));

					CMTime endTime = CMTime.Add (timeRange.Start, timeRange.Duration);
					ramp.Add (new CGPoint ((float)endTime.Seconds, endVolume));
					startTime = CMTime.Add (timeRange.Start, timeRange.Duration);
				}

				if (CMTime.Compare (startTime, duration) == -1) {
					ramp.Add (new CGPoint ((float)duration.Seconds, endVolume));
				}

				mixTracks.Add (ramp);
			}

			audioMixTracks = mixTracks;
		}
Example #22
0
        private static double FactorForTimeInRange(CMTime time, CMTimeRange range)
        {
            var elapsed = CMTime.Subtract(time, range.Start);

            return(elapsed.Seconds / range.Duration.Seconds);
        }
Example #23
0
 public virtual void DidLoadTimeRange(NSUrlSession session, AVAssetDownloadTask assetDownloadTask, CMTimeRange timeRange, NSValue[] loadedTimeRanges, CMTimeRange timeRangeExpectedToLoad)
 {
     throw new NotImplementedException();
 }
Example #24
0
 unsafe extern static /* OSStatus */ VTStatus VTFrameSiloCallFunctionForEachSampleBuffer(
     /* VTFrameSiloRef */ IntPtr silo,
     /* CMTimeRange */ CMTimeRange timeRange,             // CMTimeRange.Invalid retrieves all sample buffers
     /* void* */ IntPtr callbackInfo,
Example #25
0
        private void BuildTransitionComposition(AVMutableComposition composition, AVMutableVideoComposition videoComposition, AVMutableAudioMix audioMix)
        {
            CMTime nextClipStartTime = CMTime.Zero;
            int    clipsCount        = Clips.Count;

            // Make transitionDuration no greater than half the shortest clip duration.
            CMTime transitionDuration = TransitionDuration;

            Console.WriteLine("Clips Count:" + clipsCount);
            Console.WriteLine("Clips Range Count:" + ClipTimeRanges.Count);

            for (int i = 0; i < clipsCount; i++)
            {
                NSValue clipTimeRange = ClipTimeRanges [i];
                if (clipTimeRange != null)
                {
                    CMTime halfClipDuration = clipTimeRange.CMTimeRangeValue.Duration;
                    halfClipDuration.TimeScale *= 2;
                    transitionDuration          = CMTime.GetMinimum(transitionDuration, halfClipDuration);
                }
            }

            // Add two video tracks and two audio tracks.
            var compositionVideoTracks = new AVMutableCompositionTrack [] {
                composition.AddMutableTrack(AVMediaType.Video, 0),
                composition.AddMutableTrack(AVMediaType.Video, 0)
            };
            var compositionAudioTracks = new AVMutableCompositionTrack [] {
                composition.AddMutableTrack(AVMediaType.Audio, 0),
                composition.AddMutableTrack(AVMediaType.Audio, 0)
            };

            var passThroughTimeRanges = new CMTimeRange[clipsCount];
            var transitionTimeRanges  = new CMTimeRange[clipsCount];

            // Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
            for (int i = 0; i < clipsCount; i++)
            {
                int         alternatingIndex = i % 2;
                AVAsset     asset            = Clips [i];
                NSValue     clipTimeRange    = ClipTimeRanges [i];
                CMTimeRange timeRangeInAsset;
                if (clipTimeRange != null)
                {
                    timeRangeInAsset = clipTimeRange.CMTimeRangeValue;
                }
                else
                {
                    timeRangeInAsset          = new CMTimeRange();
                    timeRangeInAsset.Start    = CMTime.Zero;
                    timeRangeInAsset.Duration = asset.Duration;
                }
                NSError      error;
                AVAssetTrack clipVideoTrack = asset.TracksWithMediaType(AVMediaType.Video) [0];
                compositionVideoTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipVideoTrack, nextClipStartTime, out error);

                AVAssetTrack clipAudioTrack = asset.TracksWithMediaType(AVMediaType.Audio) [0];
                compositionAudioTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipAudioTrack, nextClipStartTime, out error);

                // Remember the time range in which this clip should pass through.
                // First clip ends with a transition.
                // Second clip begins with a transition.
                // Exclude that transition from the pass through time ranges
                CMTimeRange timeRange = new CMTimeRange();
                timeRange.Start           = nextClipStartTime;
                timeRange.Duration        = timeRangeInAsset.Duration;
                passThroughTimeRanges [i] = timeRange;

                if (i > 0)
                {
                    passThroughTimeRanges[i].Start    = CMTime.Add(passThroughTimeRanges[i].Start, transitionDuration);
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }

                if (i + 1 < clipsCount)
                {
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }

                // The end of this clip will overlap the start of the next by transitionDuration.
                // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
                nextClipStartTime = CMTime.Add(nextClipStartTime, timeRangeInAsset.Duration);
                nextClipStartTime = CMTime.Subtract(nextClipStartTime, transitionDuration);

                // Remember the time range for the transition to the next item
                if (i + 1 < clipsCount)
                {
                    transitionTimeRanges [i] = new CMTimeRange()
                    {
                        Start    = nextClipStartTime,
                        Duration = transitionDuration
                    };
                }
            }

            List <AVVideoCompositionInstruction>    instructions  = new List <AVVideoCompositionInstruction> ();
            List <AVMutableAudioMixInputParameters> trackMixArray = new List <AVMutableAudioMixInputParameters> ();

            // Set up the video composition if we are to perform crossfade transitions between clips.
            for (int i = 0; i < clipsCount; i++)
            {
                int alternatingIndex = i % 2;
                AVMutableVideoCompositionInstruction passThroughInstructions = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                passThroughInstructions.TimeRange = passThroughTimeRanges [i];

                AVMutableVideoCompositionLayerInstruction passThroughLayerInstructions = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);

                passThroughInstructions.LayerInstructions = new AVVideoCompositionLayerInstruction[] { passThroughLayerInstructions };
                instructions.Add(passThroughInstructions);

                if (i + 1 < clipsCount)
                {
                    var transitionInstruction = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                    transitionInstruction.TimeRange = transitionTimeRanges [i];
                    var fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);
                    var toLayer   = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [1 - alternatingIndex]);


                    // Fade in the toLayer by setting a ramp from 0.0 to 1.0.
                    toLayer.SetOpacityRamp(0.0f, 1.0f, transitionTimeRanges [i]);
                    transitionInstruction.LayerInstructions = new AVVideoCompositionLayerInstruction[]
                    {
                        toLayer,
                        fromLayer,
                    };
                    instructions.Add(transitionInstruction);

                    // Add AudioMix to fade in the volume ramps
                    var trackMix = AVMutableAudioMixInputParameters.FromTrack(compositionAudioTracks[0]);
                    trackMix.SetVolumeRamp(1f, 0f, transitionTimeRanges[0]);
                    trackMixArray.Add(trackMix);

                    trackMix = AVMutableAudioMixInputParameters.FromTrack(compositionAudioTracks[1]);
                    trackMix.SetVolumeRamp(0f, 1f, transitionTimeRanges[0]);
                    trackMix.SetVolumeRamp(1f, 1f, passThroughTimeRanges[1]);
                    trackMixArray.Add(trackMix);
                }
            }

            videoComposition.Instructions = instructions.ToArray();
            audioMix.InputParameters      = trackMixArray.ToArray();
        }
		private void LoadAsset (AVUrlAsset asset, string[] assetKeysToLoadandTest, DispatchGroup dispatchGroup)
		{
			dispatchGroup.Enter ();
			asset.LoadValuesAsynchronously (assetKeysToLoadandTest, () => {
				foreach (string key in assetKeysToLoadandTest) {
					NSError error;
					if (asset.StatusOfValue (key, out error) == AVKeyValueStatus.Failed) {
						Console.Error.WriteLine ("Key value loading failed for key" + key + " with error: " + error.ToString ());
						dispatchGroup.Leave ();
					}
				}

				if (!asset.Composable) {
					Console.Error.WriteLine ("Asset is not composable");
					dispatchGroup.Leave ();
				}

				Clips.Add (asset);
				CMTimeRange timeRange = new CMTimeRange () {
					Start = CMTime.FromSeconds (0, 1),
					Duration = CMTime.FromSeconds (5, 1)
				};

				ClipTimeRanges.Add (NSValue.FromCMTimeRange (timeRange));
				dispatchGroup.Leave ();
			});
		}
Example #27
0
 extern static /* OSStatus */ MTAudioProcessingTapError MTAudioProcessingTapGetSourceAudio(
     /* MTAudioProcessingTapRef */ IntPtr tap, IntPtr numberFrames,
     /* AudioBufferList* */ IntPtr bufferListInOut,
     out MTAudioProcessingTapFlags flagsOut, out CMTimeRange timeRangeOut, out IntPtr numberFramesOut);
		void StartReadingAndWriting (CMTimeRange timeRange)
		{
			// Instruct the asset reader and asset writer to get ready to do work
			if (!assetReader.StartReading ())
				throw new NSErrorException (assetReader.Error);

			if (!assetWriter.StartWriting ())
				throw new NSErrorException (assetWriter.Error);

			// Start a sample-writing session
			assetWriter.StartSessionAtSourceTime (timeRange.Start);

			Task audioTask = Start (audioSampleBufferChannel);
			Task videoTask = Start (videoSampleBufferChannel);

			// Set up a callback for when the sample writing is finished
			Task.WhenAll (audioTask, videoTask).ContinueWith (_ => {
				if (cancellationTokenSrc.Token.IsCancellationRequested) {
					assetReader.CancelReading ();
					assetWriter.CancelWriting ();
					throw new OperationCanceledException ();
				}

				if (assetReader.Status != AVAssetReaderStatus.Failed) {
					assetWriter.FinishWriting (() => {
						bool success = assetWriter.Status == AVAssetWriterStatus.Completed;
						ReadingAndWritingDidFinish (success, assetWriter.Error);
					});
				}
			}, cancellationTokenSrc.Token);
		}
Example #29
0
 void CompareCMTimeRange(CMTimeRange first, CMTimeRange second, string description)
 {
     Assert.AreEqual(first.Duration, second.Duration, "CompareCMTimeRange - duration - " + description);
     Assert.AreEqual(first.Start, second.Start, "CompareCMTimeRange - start - " + description);
 }
        public override void DidLoadTimeRange(NSUrlSession session, AVAssetDownloadTask assetDownloadTask, CMTimeRange timeRange, NSValue [] loadedTimeRanges, CMTimeRange timeRangeExpectedToLoad)
        {
            //Log.Debug ($"DidLoadTimeRange: {assetDownloadTask?.TaskDescription}");

            var asset = activeDownloadsMap [assetDownloadTask];

            if (asset != null)
            {
                var percentComplete = 0.0;

                // Iterate through the loaded time ranges
                foreach (var val in loadedTimeRanges)
                {
                    // Unwrap the CMTimeRange from the NSValue
                    var loadedTimeRange = val.CMTimeRangeValue;

                    // Calculate the percentage of the total expected asset duration
                    percentComplete += loadedTimeRange.Duration.Seconds / timeRangeExpectedToLoad.Duration.Seconds;
                }

                AssetDownloadProgressChanged?.Invoke(this, new MusicAssetDownloadProgressChangeArgs(asset.Music, percentComplete));
            }
        }
		//Utilities methods

		static double FactorForTimeInRange( CMTime time, CMTimeRange range)
		{
			CMTime elapsed = CMTime.Subtract (time, range.Start);
			return elapsed.Seconds / range.Duration.Seconds;
		}
Example #32
0
 extern static /* OSStatus */ VTStatus VTFrameSiloCreate(
     /* CFAllocatorRef */ IntPtr allocator,     /* can be null */
     /* CFURLRef */ IntPtr fileUrl,             /* can be null */
     /* CMTimeRange */ CMTimeRange timeRange,   /* can be kCMTimeRangeInvalid */
     /* CFDictionaryRef */ IntPtr options,      /* Reserved, always null */
     /* VTFrameSiloRef */ out IntPtr siloOut);
        public MTAudioProcessingTapError GetSourceAudio(int frames, AudioBuffers bufferList, out MTAudioProcessingTapFlags flags, out CMTimeRange timeRange, out int framesProvided)
        {
            if (bufferList == null)
            {
                throw new ArgumentNullException("bufferList");
            }

            return(MTAudioProcessingTapGetSourceAudio(handle, frames, (IntPtr)bufferList, out flags, out timeRange, out framesProvided));
        }
Example #34
0
 public static void DidLoadTimeRange(this IAVAssetDownloadDelegate This, NSUrlSession session, AVAssetDownloadTask assetDownloadTask, CMTimeRange timeRange, NSValue[] loadedTimeRanges, CMTimeRange timeRangeExpectedToLoad)
 {
     throw new NotImplementedException();
 }
		private void BuildTransitionComposition(AVMutableComposition composition, AVMutableVideoComposition videoComposition, AVMutableAudioMix audioMix)
		{
			CMTime nextClipStartTime = CMTime.Zero;
			int clipsCount = Clips.Count;

			// Make transitionDuration no greater than half the shortest clip duration.
			CMTime transitionDuration = TransitionDuration;
			Console.WriteLine ("Clips Count:" + clipsCount);
			Console.WriteLine ("Clips Range Count:" + ClipTimeRanges.Count);

			for (int i = 0; i < clipsCount; i++) {
				NSValue clipTimeRange = ClipTimeRanges [i];
				if(clipTimeRange != null) {
					CMTime halfClipDuration = clipTimeRange.CMTimeRangeValue.Duration;
					halfClipDuration.TimeScale *= 2;
					transitionDuration = CMTime.GetMinimum(transitionDuration,halfClipDuration);
				}
			}

			// Add two video tracks and two audio tracks.
			var compositionVideoTracks = new AVMutableCompositionTrack [] {
				composition.AddMutableTrack (AVMediaType.Video, 0),
				composition.AddMutableTrack (AVMediaType.Video, 0)
			};
			var compositionAudioTracks = new AVMutableCompositionTrack [] {
				composition.AddMutableTrack (AVMediaType.Audio, 0),
				composition.AddMutableTrack (AVMediaType.Audio, 0)
			};

			var passThroughTimeRanges = new CMTimeRange[clipsCount];
			var transitionTimeRanges = new CMTimeRange[clipsCount];

			// Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
			for(int i = 0; i < clipsCount; i++) {
				int alternatingIndex = i % 2;
				AVAsset asset = Clips [i];
				NSValue clipTimeRange = ClipTimeRanges [i];
				CMTimeRange timeRangeInAsset;
				if (clipTimeRange != null)
					timeRangeInAsset = clipTimeRange.CMTimeRangeValue;
				else {
					timeRangeInAsset = new CMTimeRange ();
					timeRangeInAsset.Start = CMTime.Zero;
					timeRangeInAsset.Duration = asset.Duration;
				}
				NSError error;
				AVAssetTrack clipVideoTrack = asset.TracksWithMediaType (AVMediaType.Video) [0];
				compositionVideoTracks [alternatingIndex].InsertTimeRange (timeRangeInAsset, clipVideoTrack, nextClipStartTime,out error);

				AVAssetTrack clipAudioTrack = asset.TracksWithMediaType (AVMediaType.Audio) [0];
				compositionAudioTracks [alternatingIndex].InsertTimeRange (timeRangeInAsset, clipAudioTrack, nextClipStartTime,out error);

				// Remember the time range in which this clip should pass through.
				// First clip ends with a transition.
				// Second clip begins with a transition.
				// Exclude that transition from the pass through time ranges
				CMTimeRange timeRange = new CMTimeRange();
				timeRange.Start = nextClipStartTime;
				timeRange.Duration = timeRangeInAsset.Duration;
				passThroughTimeRanges [i] = timeRange;

				if (i > 0)
				{
					passThroughTimeRanges[i].Start = CMTime.Add(passThroughTimeRanges[i].Start,transitionDuration);
					passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration,transitionDuration);
				}

				if(i + 1 < clipsCount)
				{
					passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration,transitionDuration);
				}

				// The end of this clip will overlap the start of the next by transitionDuration.
				// (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
				nextClipStartTime = CMTime.Add (nextClipStartTime, timeRangeInAsset.Duration);
				nextClipStartTime = CMTime.Subtract (nextClipStartTime, transitionDuration);

				// Remember the time range for the transition to the next item
				if(i + 1 < clipsCount)
				{
					transitionTimeRanges [i] = new CMTimeRange ()
					{
						Start  = nextClipStartTime,
						Duration = transitionDuration
					};

				}
			}

			List<AVVideoCompositionInstruction> instructions = new List<AVVideoCompositionInstruction> ();
			List<AVMutableAudioMixInputParameters> trackMixArray = new List<AVMutableAudioMixInputParameters> ();

			// Set up the video composition if we are to perform crossfade transitions between clips.
			for (int i = 0; i < clipsCount; i++)
			{
				int alternatingIndex = i % 2;
				AVMutableVideoCompositionInstruction passThroughInstructions = AVMutableVideoCompositionInstruction.Create () as AVMutableVideoCompositionInstruction;
				passThroughInstructions.TimeRange = passThroughTimeRanges [i];

				AVMutableVideoCompositionLayerInstruction passThroughLayerInstructions = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [alternatingIndex]);

				passThroughInstructions.LayerInstructions = new AVVideoCompositionLayerInstruction[] { passThroughLayerInstructions };
				instructions.Add (passThroughInstructions);

				if (i + 1 < clipsCount)
				{
					var transitionInstruction = AVMutableVideoCompositionInstruction.Create () as AVMutableVideoCompositionInstruction;
					transitionInstruction.TimeRange = transitionTimeRanges [i];
					var fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [alternatingIndex]);
					var toLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [1 - alternatingIndex]);

					// Fade in the toLayer by setting a ramp from 0.0 to 1.0.
					toLayer.SetOpacityRamp (0.0f, 1.0f, transitionTimeRanges [i]);
					transitionInstruction.LayerInstructions = new AVVideoCompositionLayerInstruction[]
					{
						toLayer,
						fromLayer,
					};
					instructions.Add(transitionInstruction);

					// Add AudioMix to fade in the volume ramps
					var trackMix = AVMutableAudioMixInputParameters.FromTrack(compositionAudioTracks[0]);
					trackMix.SetVolumeRamp (1f, 0f, transitionTimeRanges[0]);
					trackMixArray.Add (trackMix);

					trackMix = AVMutableAudioMixInputParameters.FromTrack (compositionAudioTracks[1]);
					trackMix.SetVolumeRamp (0f, 1f, transitionTimeRanges[0]);
					trackMix.SetVolumeRamp (1f, 1f, passThroughTimeRanges[1]);
					trackMixArray.Add (trackMix);
				}
			}

			videoComposition.Instructions = instructions.ToArray ();
			audioMix.InputParameters = trackMixArray.ToArray();
		}
Example #36
0
        void buildTransitionComposition(AVMutableComposition composition, AVMutableVideoComposition videoComposition)
        {
            CMTime nextClipStartTime = CMTime.Zero;
            int    clipsCount        = Clips.Count;

            // Make transitionDuration no greater than half the shortest clip duration.
            CMTime transitionDuration = TransitionDuration;

            foreach (var clipTimeRange in ClipTimeRanges)
            {
                if (clipTimeRange == null)
                {
                    continue;
                }

                CMTime halfClipDuration = clipTimeRange.CMTimeRangeValue.Duration;
                halfClipDuration.TimeScale *= 2;
                transitionDuration          = CMTime.GetMinimum(transitionDuration, halfClipDuration);
            }

            // Add two video tracks and two audio tracks.
            var compositionVideoTracks = new AVMutableCompositionTrack [2];
            var compositionAudioTracks = new AVMutableCompositionTrack [2];

            compositionVideoTracks [0] = composition.AddMutableTrack(AVMediaType.Video, 0);
            compositionVideoTracks [1] = composition.AddMutableTrack(AVMediaType.Video, 0);
            compositionAudioTracks [0] = composition.AddMutableTrack(AVMediaType.Audio, 0);
            compositionAudioTracks [1] = composition.AddMutableTrack(AVMediaType.Audio, 0);

            var passThroughTimeRanges = new CMTimeRange[clipsCount];
            var transitionTimeRanges  = new CMTimeRange[clipsCount];

            // Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
            for (int i = 0; i < clipsCount; i++)
            {
                int         alternatingIndex = i % 2;
                AVAsset     asset            = Clips [i];
                NSValue     clipTimeRange    = ClipTimeRanges [i];
                CMTimeRange timeRangeInAsset;
                if (clipTimeRange != null)
                {
                    timeRangeInAsset = clipTimeRange.CMTimeRangeValue;
                }
                else
                {
                    timeRangeInAsset = new CMTimeRange {
                        Start    = CMTime.Zero,
                        Duration = asset.Duration
                    };
                }
                NSError      error          = new NSError();
                AVAssetTrack clipVideoTrack = asset.TracksWithMediaType(AVMediaType.Video) [0];
                compositionVideoTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipVideoTrack, nextClipStartTime, out error);

                AVAssetTrack clipAudioTrack = asset.TracksWithMediaType(AVMediaType.Audio) [0];
                compositionAudioTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipAudioTrack, nextClipStartTime, out error);

                // Remember the time range in which this clip should pass through.
                // First clip ends with a transition.
                // Second clip begins with a transition.
                // Exclude that transition from the pass through time ranges
                passThroughTimeRanges [i] = new CMTimeRange {
                    Start    = nextClipStartTime,
                    Duration = timeRangeInAsset.Duration
                };

                if (i > 0)
                {
                    passThroughTimeRanges[i].Start    = CMTime.Add(passThroughTimeRanges[i].Start, transitionDuration);
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }
                if (i + 1 < clipsCount)
                {
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }

                // The end of this clip will overlap the start of the next by transitionDuration.
                // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
                nextClipStartTime = CMTime.Add(nextClipStartTime, timeRangeInAsset.Duration);
                nextClipStartTime = CMTime.Subtract(nextClipStartTime, transitionDuration);

                // Remember the time range for the transition to the next item.

                if (i + 1 < clipsCount)
                {
                    transitionTimeRanges [i] = new CMTimeRange()
                    {
                        Start    = nextClipStartTime,
                        Duration = transitionDuration
                    };
                }
            }

            // Set up the video composition to perform cross dissolve or diagonal wipe transitions between clips.
            var instructions = new List <AVVideoCompositionInstruction> ();

            // Cycle between "pass through A", "transition from A to B", "pass through B"
            for (int i = 0; i < clipsCount; i++)
            {
                int alternatingIndex = i % 2;

//				if (videoComposition.CustomVideoCompositorClass != null) {
//					var videoInstruction = new CustomVideoCompositionInstruction (compositionVideoTracks [alternatingIndex].TrackID, passThroughTimeRanges [i]);
//					instructions.Add (videoInstruction);
//				} else {
//					// Pass through clip i.
//					var passThroughInstruction = AVMutableVideoCompositionInstruction.Create () as AVMutableVideoCompositionInstruction;
//					passThroughInstruction.TimeRange = passThroughTimeRanges [i];
//					var passThroughLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [alternatingIndex]);
//					passThroughInstruction.LayerInstructions = new [] { passThroughLayer };
//					instructions.Add (passThroughInstruction);
//
//				}
                //TODO: Remove following call if previous works
                if (videoComposition.CustomVideoCompositorClass.Name != "nil")
                {
                    var videoInstruction = new CustomVideoCompositionInstruction(compositionVideoTracks [alternatingIndex].TrackID, passThroughTimeRanges [i]);
                    instructions.Add(videoInstruction);
                }
                else
                {
                    // Pass through clip i.
                    var passThroughInstruction = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                    passThroughInstruction.TimeRange = passThroughTimeRanges [i];
                    var passThroughLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);
                    passThroughInstruction.LayerInstructions = new [] { passThroughLayer };
                    instructions.Add(passThroughInstruction);
                }

                if (i + 1 < clipsCount)
                {
                    // Add transition from clip i to clip i+1.
//					if (videoComposition.CustomVideoCompositorClass != null) {
//						var videoInstruction = new CustomVideoCompositionInstruction (new NSNumber [] {
//							compositionVideoTracks [0].TrackID,
//							compositionVideoTracks [1].TrackID
//						}, transitionTimeRanges [1]);
//
//						if (alternatingIndex == 0) {
//							videoInstruction.ForegroundTrackID = compositionVideoTracks [alternatingIndex].TrackID;
//							videoInstruction.BackgroundTrackID = compositionVideoTracks [1 - alternatingIndex].TrackID;
//						}
//
//						instructions.Add (videoInstruction);
//					} else {
//						var transitionInstruction = AVMutableVideoCompositionInstruction.Create () as AVMutableVideoCompositionInstruction;
//						transitionInstruction.TimeRange = transitionTimeRanges [i];
//						var fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [alternatingIndex]);
//						var toLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [1 - alternatingIndex]);
//						transitionInstruction.LayerInstructions = new [] { toLayer, fromLayer };
//						instructions.Add (transitionInstruction);
//					}
                    // TODO: remove following call if previous works
                    if (videoComposition.CustomVideoCompositorClass.Name != "nil")
                    {
                        NSNumber[] sources = new NSNumber[] {
                            new NSNumber(compositionVideoTracks [0].TrackID),
                            new NSNumber(compositionVideoTracks [1].TrackID)
                        };
                        var videoInstructions = new CustomVideoCompositionInstruction(sources, transitionTimeRanges [i]);
                        if (alternatingIndex == 0)
                        {
                            videoInstructions.ForegroundTrackID = compositionVideoTracks [alternatingIndex].TrackID;
                            videoInstructions.BackgroundTrackID = compositionVideoTracks [1 - alternatingIndex].TrackID;
                        }

                        instructions.Add(videoInstructions);
                        Console.WriteLine("Add transition from clip i to clip i+1");
                    }
                    else
                    {
                        AVMutableVideoCompositionInstruction transitionInstruction = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                        transitionInstruction.TimeRange = transitionTimeRanges [i];
                        AVMutableVideoCompositionLayerInstruction fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);
                        AVMutableVideoCompositionLayerInstruction toLayer   = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [1 - alternatingIndex]);
                        transitionInstruction.LayerInstructions = new AVVideoCompositionLayerInstruction[] {
                            fromLayer,
                            toLayer,
                        };
                        instructions.Add(transitionInstruction);
                    }
                }
            }

            videoComposition.Instructions = instructions.ToArray();
        }
Example #37
0
 extern static void CMTimeRange_objc_msgSend(out CMTimeRange retval, IntPtr receiver, IntPtr selector);
Example #38
0
 public virtual void EncodeCMTimeRange(CMTimeRange timeRange, [Unwrapped] string forKey)
 {
 }
        //Utilities methods

        double factorForTimeInRange(CMTime time, CMTimeRange range)
        {
            CMTime elapsed = CMTime.Subtract(time, range.Start);

            return(elapsed.Seconds / range.Duration.Seconds);
        }
 extern static /* OSStatus */ VTStatus VTMultiPassStorageCreate(
     /* CFAllocatorRef */ IntPtr allocator,     /* can be null */
     /* CFURLRef */ IntPtr fileUrl,             /* can be null */
     /* CMTimeRange */ CMTimeRange timeRange,   /* can be kCMTimeRangeInvalid */
     /* CFDictionaryRef */ IntPtr options,      /* can be null */
     /* VTMultiPassStorageRef */ out IntPtr multiPassStorageOut);
		private static double ProgressOfSampleBufferInTimeRange(CMSampleBuffer sampleBuffer, CMTimeRange timeRange)
		{
			CMTime progressTime = sampleBuffer.PresentationTimeStamp;
			progressTime = progressTime - timeRange.Start;
			CMTime sampleDuration = sampleBuffer.Duration;
			if (sampleDuration.IsNumeric)
				progressTime = progressTime + sampleDuration;
			return progressTime.Seconds / timeRange.Duration.Seconds;
		}