Esempio n. 1
0
        public void BuildCompositionObjects(Boolean playBack)
        {
            if (Clips == null || Clips.Count == 0)
            {
                Composition      = null;
                VideoComposition = null;
                AudioMix         = null;
                return;
            }

            SizeF videoSize         = Clips [0].NaturalSize;
            var   composition1      = AVMutableComposition.Create();
            var   videoComposition1 = AVMutableVideoComposition.Create();
            var   audioMix          = AVMutableAudioMix.Create();

            composition1.NaturalSize = videoSize;

            BuildTransitionComposition(composition1, videoComposition1, audioMix);
            if (videoComposition1 != null)
            {
                videoComposition1.FrameDuration = new CMTime(1, 30);
                videoComposition1.RenderSize    = videoSize;
            }

            Composition      = composition1;
            VideoComposition = videoComposition1;
            AudioMix         = audioMix;
        }
		public void SynchronizeToCompositoin (AVMutableComposition composition, AVMutableVideoComposition videoComposition, AVMutableAudioMix audioMix)
		{
			compositionTracks = null;
			audioMixTracks = null;
			videoCompositionStages = null;

			duration = new CMTime (1, 1);

			if (composition != null)
				ProcessComposition (composition);

			if (videoComposition != null)
				ProcessVideoComposition (videoComposition);

			if (audioMix != null)
				ProcessAudioMix (audioMix);
		}
        unsafe AVAudioMix CreateAudioMix()
        {
            AVMutableAudioMix audioMix = AVMutableAudioMix.Create();
            AVMutableAudioMixInputParameters audioMixInputParameters = AVMutableAudioMixInputParameters.FromTrack(audioAssetTrack);
            var callbacks = new MTAudioProcessingTapCallbacks(TapProcess)
            {
                Initialize = TapInitialization,
                Finalize   = Finalaze,
                Prepare    = TapPrepare,
                Unprepare  = Unprepare,
            };

            audioProcessingTap = new MTAudioProcessingTap(callbacks, MTAudioProcessingTapCreationFlags.PreEffects);
            audioMixInputParameters.AudioTapProcessor = audioProcessingTap;

            audioMix.InputParameters = new AVAudioMixInputParameters[] { audioMixInputParameters };

            return(audioMix);
        }
        private void ProcessAudioMix(AVMutableAudioMix audioMix)
        {
            var mixTracks = new List <List <CGPoint> > ();

            foreach (AVAudioMixInputParameters input in audioMix.InputParameters)
            {
                List <CGPoint> ramp = new List <CGPoint> ();

                CMTime      startTime   = CMTime.Zero;
                float       startVolume = 1f;
                float       endVolume   = 1f;
                CMTimeRange timeRange   = new CMTimeRange();

                while (input.GetVolumeRamp(startTime, ref startVolume, ref endVolume, ref timeRange))
                {
                    if (CMTime.Compare(startTime, CMTime.Zero) == 0 &&
                        CMTime.Compare(timeRange.Start, CMTime.Zero) == 1)
                    {
                        ramp.Add(new CGPoint(0f, 1f));
                        ramp.Add(new CGPoint((float)timeRange.Start.Seconds, startVolume));
                    }

                    ramp.Add(new CGPoint((float)timeRange.Start.Seconds, startVolume));

                    CMTime endTime = CMTime.Add(timeRange.Start, timeRange.Duration);
                    ramp.Add(new CGPoint((float)endTime.Seconds, endVolume));
                    startTime = CMTime.Add(timeRange.Start, timeRange.Duration);
                }

                if (CMTime.Compare(startTime, duration) == -1)
                {
                    ramp.Add(new CGPoint((float)duration.Seconds, endVolume));
                }

                mixTracks.Add(ramp);
            }

            audioMixTracks = mixTracks;
        }
		private void ProcessAudioMix (AVMutableAudioMix audioMix)
		{
			var mixTracks = new List<List<CGPoint>> ();
			foreach (AVAudioMixInputParameters input in audioMix.InputParameters) {
				List<CGPoint> ramp = new List<CGPoint> ();

				CMTime startTime = CMTime.Zero;
				float startVolume = 1f;
				float endVolume = 1f;
				CMTimeRange timeRange = new CMTimeRange ();

				while (input.GetVolumeRamp (startTime, ref startVolume, ref endVolume, ref timeRange)) {
					if (CMTime.Compare (startTime, CMTime.Zero) == 0 &&
					    CMTime.Compare (timeRange.Start, CMTime.Zero) == 1) {
						ramp.Add (new CGPoint (0f, 1f));
						ramp.Add (new CGPoint ((float)timeRange.Start.Seconds, startVolume));
					}

					ramp.Add (new CGPoint ((float)timeRange.Start.Seconds, startVolume));

					CMTime endTime = CMTime.Add (timeRange.Start, timeRange.Duration);
					ramp.Add (new CGPoint ((float)endTime.Seconds, endVolume));
					startTime = CMTime.Add (timeRange.Start, timeRange.Duration);
				}

				if (CMTime.Compare (startTime, duration) == -1) {
					ramp.Add (new CGPoint ((float)duration.Seconds, endVolume));
				}

				mixTracks.Add (ramp);
			}

			audioMixTracks = mixTracks;
		}
Esempio n. 6
0
        private void BuildTransitionComposition(AVMutableComposition composition, AVMutableVideoComposition videoComposition, AVMutableAudioMix audioMix)
        {
            CMTime nextClipStartTime = CMTime.Zero;
            int    clipsCount        = Clips.Count;

            // Make transitionDuration no greater than half the shortest clip duration.
            CMTime transitionDuration = TransitionDuration;

            Console.WriteLine("Clips Count:" + clipsCount);
            Console.WriteLine("Clips Range Count:" + ClipTimeRanges.Count);

            for (int i = 0; i < clipsCount; i++)
            {
                NSValue clipTimeRange = ClipTimeRanges [i];
                if (clipTimeRange != null)
                {
                    CMTime halfClipDuration = clipTimeRange.CMTimeRangeValue.Duration;
                    halfClipDuration.TimeScale *= 2;
                    transitionDuration          = CMTime.GetMinimum(transitionDuration, halfClipDuration);
                }
            }

            // Add two video tracks and two audio tracks.
            var compositionVideoTracks = new AVMutableCompositionTrack [] {
                composition.AddMutableTrack(AVMediaType.Video, 0),
                composition.AddMutableTrack(AVMediaType.Video, 0)
            };
            var compositionAudioTracks = new AVMutableCompositionTrack [] {
                composition.AddMutableTrack(AVMediaType.Audio, 0),
                composition.AddMutableTrack(AVMediaType.Audio, 0)
            };

            var passThroughTimeRanges = new CMTimeRange[clipsCount];
            var transitionTimeRanges  = new CMTimeRange[clipsCount];

            // Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
            for (int i = 0; i < clipsCount; i++)
            {
                int         alternatingIndex = i % 2;
                AVAsset     asset            = Clips [i];
                NSValue     clipTimeRange    = ClipTimeRanges [i];
                CMTimeRange timeRangeInAsset;
                if (clipTimeRange != null)
                {
                    timeRangeInAsset = clipTimeRange.CMTimeRangeValue;
                }
                else
                {
                    timeRangeInAsset          = new CMTimeRange();
                    timeRangeInAsset.Start    = CMTime.Zero;
                    timeRangeInAsset.Duration = asset.Duration;
                }
                NSError      error;
                AVAssetTrack clipVideoTrack = asset.TracksWithMediaType(AVMediaType.Video) [0];
                compositionVideoTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipVideoTrack, nextClipStartTime, out error);

                AVAssetTrack clipAudioTrack = asset.TracksWithMediaType(AVMediaType.Audio) [0];
                compositionAudioTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipAudioTrack, nextClipStartTime, out error);

                // Remember the time range in which this clip should pass through.
                // First clip ends with a transition.
                // Second clip begins with a transition.
                // Exclude that transition from the pass through time ranges
                CMTimeRange timeRange = new CMTimeRange();
                timeRange.Start           = nextClipStartTime;
                timeRange.Duration        = timeRangeInAsset.Duration;
                passThroughTimeRanges [i] = timeRange;

                if (i > 0)
                {
                    passThroughTimeRanges[i].Start    = CMTime.Add(passThroughTimeRanges[i].Start, transitionDuration);
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }

                if (i + 1 < clipsCount)
                {
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }

                // The end of this clip will overlap the start of the next by transitionDuration.
                // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
                nextClipStartTime = CMTime.Add(nextClipStartTime, timeRangeInAsset.Duration);
                nextClipStartTime = CMTime.Subtract(nextClipStartTime, transitionDuration);

                // Remember the time range for the transition to the next item
                if (i + 1 < clipsCount)
                {
                    transitionTimeRanges [i] = new CMTimeRange()
                    {
                        Start    = nextClipStartTime,
                        Duration = transitionDuration
                    };
                }
            }

            List <AVVideoCompositionInstruction>    instructions  = new List <AVVideoCompositionInstruction> ();
            List <AVMutableAudioMixInputParameters> trackMixArray = new List <AVMutableAudioMixInputParameters> ();

            // Set up the video composition if we are to perform crossfade transitions between clips.
            for (int i = 0; i < clipsCount; i++)
            {
                int alternatingIndex = i % 2;
                AVMutableVideoCompositionInstruction passThroughInstructions = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                passThroughInstructions.TimeRange = passThroughTimeRanges [i];

                AVMutableVideoCompositionLayerInstruction passThroughLayerInstructions = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);

                passThroughInstructions.LayerInstructions = new AVVideoCompositionLayerInstruction[] { passThroughLayerInstructions };
                instructions.Add(passThroughInstructions);

                if (i + 1 < clipsCount)
                {
                    var transitionInstruction = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                    transitionInstruction.TimeRange = transitionTimeRanges [i];
                    var fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);
                    var toLayer   = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [1 - alternatingIndex]);


                    // Fade in the toLayer by setting a ramp from 0.0 to 1.0.
                    toLayer.SetOpacityRamp(0.0f, 1.0f, transitionTimeRanges [i]);
                    transitionInstruction.LayerInstructions = new AVVideoCompositionLayerInstruction[]
                    {
                        toLayer,
                        fromLayer,
                    };
                    instructions.Add(transitionInstruction);

                    // Add AudioMix to fade in the volume ramps
                    var trackMix = AVMutableAudioMixInputParameters.FromTrack(compositionAudioTracks[0]);
                    trackMix.SetVolumeRamp(1f, 0f, transitionTimeRanges[0]);
                    trackMixArray.Add(trackMix);

                    trackMix = AVMutableAudioMixInputParameters.FromTrack(compositionAudioTracks[1]);
                    trackMix.SetVolumeRamp(0f, 1f, transitionTimeRanges[0]);
                    trackMix.SetVolumeRamp(1f, 1f, passThroughTimeRanges[1]);
                    trackMixArray.Add(trackMix);
                }
            }

            videoComposition.Instructions = instructions.ToArray();
            audioMix.InputParameters      = trackMixArray.ToArray();
        }
        public void SynchronizeToCompositoin(AVMutableComposition composition, AVMutableVideoComposition videoComposition, AVMutableAudioMix audioMix)
        {
            compositionTracks      = null;
            audioMixTracks         = null;
            videoCompositionStages = null;

            duration = new CMTime(1, 1);

            if (composition != null)
            {
                ProcessComposition(composition);
            }

            if (videoComposition != null)
            {
                ProcessVideoComposition(videoComposition);
            }

            if (audioMix != null)
            {
                ProcessAudioMix(audioMix);
            }
        }
		private void BuildTransitionComposition(AVMutableComposition composition, AVMutableVideoComposition videoComposition, AVMutableAudioMix audioMix)
		{
			CMTime nextClipStartTime = CMTime.Zero;
			int clipsCount = Clips.Count;

			// Make transitionDuration no greater than half the shortest clip duration.
			CMTime transitionDuration = TransitionDuration;
			Console.WriteLine ("Clips Count:" + clipsCount);
			Console.WriteLine ("Clips Range Count:" + ClipTimeRanges.Count);

			for (int i = 0; i < clipsCount; i++) {
				NSValue clipTimeRange = ClipTimeRanges [i];
				if(clipTimeRange != null) {
					CMTime halfClipDuration = clipTimeRange.CMTimeRangeValue.Duration;
					halfClipDuration.TimeScale *= 2;
					transitionDuration = CMTime.GetMinimum(transitionDuration,halfClipDuration);
				}
			}

			// Add two video tracks and two audio tracks.
			var compositionVideoTracks = new AVMutableCompositionTrack [] {
				composition.AddMutableTrack (AVMediaType.Video, 0),
				composition.AddMutableTrack (AVMediaType.Video, 0)
			};
			var compositionAudioTracks = new AVMutableCompositionTrack [] {
				composition.AddMutableTrack (AVMediaType.Audio, 0),
				composition.AddMutableTrack (AVMediaType.Audio, 0)
			};

			var passThroughTimeRanges = new CMTimeRange[clipsCount];
			var transitionTimeRanges = new CMTimeRange[clipsCount];

			// Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
			for(int i = 0; i < clipsCount; i++) {
				int alternatingIndex = i % 2;
				AVAsset asset = Clips [i];
				NSValue clipTimeRange = ClipTimeRanges [i];
				CMTimeRange timeRangeInAsset;
				if (clipTimeRange != null)
					timeRangeInAsset = clipTimeRange.CMTimeRangeValue;
				else {
					timeRangeInAsset = new CMTimeRange ();
					timeRangeInAsset.Start = CMTime.Zero;
					timeRangeInAsset.Duration = asset.Duration;
				}
				NSError error;
				AVAssetTrack clipVideoTrack = asset.TracksWithMediaType (AVMediaType.Video) [0];
				compositionVideoTracks [alternatingIndex].InsertTimeRange (timeRangeInAsset, clipVideoTrack, nextClipStartTime,out error);

				AVAssetTrack clipAudioTrack = asset.TracksWithMediaType (AVMediaType.Audio) [0];
				compositionAudioTracks [alternatingIndex].InsertTimeRange (timeRangeInAsset, clipAudioTrack, nextClipStartTime,out error);

				// Remember the time range in which this clip should pass through.
				// First clip ends with a transition.
				// Second clip begins with a transition.
				// Exclude that transition from the pass through time ranges
				CMTimeRange timeRange = new CMTimeRange();
				timeRange.Start = nextClipStartTime;
				timeRange.Duration = timeRangeInAsset.Duration;
				passThroughTimeRanges [i] = timeRange;

				if (i > 0)
				{
					passThroughTimeRanges[i].Start = CMTime.Add(passThroughTimeRanges[i].Start,transitionDuration);
					passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration,transitionDuration);
				}

				if(i + 1 < clipsCount)
				{
					passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration,transitionDuration);
				}

				// The end of this clip will overlap the start of the next by transitionDuration.
				// (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
				nextClipStartTime = CMTime.Add (nextClipStartTime, timeRangeInAsset.Duration);
				nextClipStartTime = CMTime.Subtract (nextClipStartTime, transitionDuration);

				// Remember the time range for the transition to the next item
				if(i + 1 < clipsCount)
				{
					transitionTimeRanges [i] = new CMTimeRange ()
					{
						Start  = nextClipStartTime,
						Duration = transitionDuration
					};

				}
			}

			List<AVVideoCompositionInstruction> instructions = new List<AVVideoCompositionInstruction> ();
			List<AVMutableAudioMixInputParameters> trackMixArray = new List<AVMutableAudioMixInputParameters> ();

			// Set up the video composition if we are to perform crossfade transitions between clips.
			for (int i = 0; i < clipsCount; i++)
			{
				int alternatingIndex = i % 2;
				AVMutableVideoCompositionInstruction passThroughInstructions = AVMutableVideoCompositionInstruction.Create () as AVMutableVideoCompositionInstruction;
				passThroughInstructions.TimeRange = passThroughTimeRanges [i];

				AVMutableVideoCompositionLayerInstruction passThroughLayerInstructions = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [alternatingIndex]);

				passThroughInstructions.LayerInstructions = new AVVideoCompositionLayerInstruction[] { passThroughLayerInstructions };
				instructions.Add (passThroughInstructions);

				if (i + 1 < clipsCount)
				{
					var transitionInstruction = AVMutableVideoCompositionInstruction.Create () as AVMutableVideoCompositionInstruction;
					transitionInstruction.TimeRange = transitionTimeRanges [i];
					var fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [alternatingIndex]);
					var toLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [1 - alternatingIndex]);

					// Fade in the toLayer by setting a ramp from 0.0 to 1.0.
					toLayer.SetOpacityRamp (0.0f, 1.0f, transitionTimeRanges [i]);
					transitionInstruction.LayerInstructions = new AVVideoCompositionLayerInstruction[]
					{
						toLayer,
						fromLayer,
					};
					instructions.Add(transitionInstruction);

					// Add AudioMix to fade in the volume ramps
					var trackMix = AVMutableAudioMixInputParameters.FromTrack(compositionAudioTracks[0]);
					trackMix.SetVolumeRamp (1f, 0f, transitionTimeRanges[0]);
					trackMixArray.Add (trackMix);

					trackMix = AVMutableAudioMixInputParameters.FromTrack (compositionAudioTracks[1]);
					trackMix.SetVolumeRamp (0f, 1f, transitionTimeRanges[0]);
					trackMix.SetVolumeRamp (1f, 1f, passThroughTimeRanges[1]);
					trackMixArray.Add (trackMix);
				}
			}

			videoComposition.Instructions = instructions.ToArray ();
			audioMix.InputParameters = trackMixArray.ToArray();
		}
        public Task <OperationResult> AddAudioToVideoTrack(string videoFilePath, string audioFilePath, string outputFilePath,
                                                           float volume = 1, float fadeOutDuration = 0)
        {
            var tcs = new TaskCompletionSource <OperationResult>();

            var composition           = AVMutableComposition.Create();
            var videoCompositionTrack = composition.AddMutableTrack(AVMediaType.Video, 0);
            var audioCompositionTrack = composition.AddMutableTrack(AVMediaType.Audio, 0);

            var videoUrl        = NSUrl.FromFilename(videoFilePath);
            var videoAsset      = AVAsset.FromUrl(videoUrl);
            var videoAssetTrack = videoAsset.TracksWithMediaType(AVMediaType.Video).First();

            var audioUrl        = NSUrl.FromFilename(audioFilePath);
            var audioAsset      = AVAsset.FromUrl(audioUrl);
            var audioAssetTrack = audioAsset.TracksWithMediaType(AVMediaType.Audio).First();

            CGSize size = videoAssetTrack.NaturalSize;
            CMTime time = CMTime.Zero;

            var range = new CMTimeRange
            {
                Start    = CMTime.Zero,
                Duration = videoAssetTrack.TimeRange.Duration
            };

            NSError error = null;

            videoCompositionTrack.InsertTimeRange(range, videoAssetTrack, time, out error);
            if (error != null)
            {
                Console.WriteLine("Error adding video composition track: " + error.LocalizedDescription);
            }

            error = null;
            audioCompositionTrack.InsertTimeRange(range, audioAssetTrack, time, out error);
            if (error != null)
            {
                Console.WriteLine("Error adding audio composition track: " + error.LocalizedDescription);
            }


            var audioMix         = AVMutableAudioMix.Create();
            var audioInputParams = AVMutableAudioMixInputParameters.FromTrack(audioCompositionTrack);

            audioInputParams.SetVolume(volume, CMTime.Zero);

            if (fadeOutDuration > 0)
            {
                var fadeOutStartTime = CMTime.Subtract(videoAssetTrack.TimeRange.Duration, CMTime.FromSeconds(fadeOutDuration, audioAssetTrack.NaturalTimeScale));
                var fadeOutRange     = new CMTimeRange
                {
                    Start    = fadeOutStartTime,
                    Duration = CMTime.FromSeconds(fadeOutDuration, audioAssetTrack.NaturalTimeScale)
                };

                audioInputParams.SetVolumeRamp(volume, 0.0f, fadeOutRange);
            }

            audioMix.InputParameters = new[] { audioInputParams };

            var session = new AVAssetExportSession(composition, AVAssetExportSession.PresetHighestQuality);

            session.OutputUrl      = NSUrl.FromFilename(outputFilePath);
            session.OutputFileType = AVFileType.Mpeg4;
            session.AudioMix       = audioMix;

            session.ExportAsynchronously(() =>
            {
                if (session.Status == AVAssetExportSessionStatus.Failed)
                {
                    tcs.SetResult(OperationResult.AsFailure(session.Error.LocalizedDescription));
                }
                else
                {
                    tcs.SetResult(OperationResult.AsSuccess());
                }
            });

            return(tcs.Task);
        }