コード例 #1
0
        unsafe AVAudioMix CreateAudioMix()
        {
            AVMutableAudioMix audioMix = AVMutableAudioMix.Create();
            AVMutableAudioMixInputParameters audioMixInputParameters = AVMutableAudioMixInputParameters.FromTrack(audioAssetTrack);
            var callbacks = new MTAudioProcessingTapCallbacks(TapProcess)
            {
                Initialize = TapInitialization,
                Finalize   = Finalaze,
                Prepare    = TapPrepare,
                Unprepare  = Unprepare,
            };

            audioProcessingTap = new MTAudioProcessingTap(callbacks, MTAudioProcessingTapCreationFlags.PreEffects);
            audioMixInputParameters.AudioTapProcessor = audioProcessingTap;

            audioMix.InputParameters = new AVAudioMixInputParameters[] { audioMixInputParameters };

            return(audioMix);
        }
コード例 #2
0
        private void BuildTransitionComposition(AVMutableComposition composition, AVMutableVideoComposition videoComposition, AVMutableAudioMix audioMix)
        {
            CMTime nextClipStartTime = CMTime.Zero;
            int    clipsCount        = Clips.Count;

            // Make transitionDuration no greater than half the shortest clip duration.
            CMTime transitionDuration = TransitionDuration;

            Console.WriteLine("Clips Count:" + clipsCount);
            Console.WriteLine("Clips Range Count:" + ClipTimeRanges.Count);

            for (int i = 0; i < clipsCount; i++)
            {
                NSValue clipTimeRange = ClipTimeRanges [i];
                if (clipTimeRange != null)
                {
                    CMTime halfClipDuration = clipTimeRange.CMTimeRangeValue.Duration;
                    halfClipDuration.TimeScale *= 2;
                    transitionDuration          = CMTime.GetMinimum(transitionDuration, halfClipDuration);
                }
            }

            // Add two video tracks and two audio tracks.
            var compositionVideoTracks = new AVMutableCompositionTrack [] {
                composition.AddMutableTrack(AVMediaType.Video, 0),
                composition.AddMutableTrack(AVMediaType.Video, 0)
            };
            var compositionAudioTracks = new AVMutableCompositionTrack [] {
                composition.AddMutableTrack(AVMediaType.Audio, 0),
                composition.AddMutableTrack(AVMediaType.Audio, 0)
            };

            var passThroughTimeRanges = new CMTimeRange[clipsCount];
            var transitionTimeRanges  = new CMTimeRange[clipsCount];

            // Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
            for (int i = 0; i < clipsCount; i++)
            {
                int         alternatingIndex = i % 2;
                AVAsset     asset            = Clips [i];
                NSValue     clipTimeRange    = ClipTimeRanges [i];
                CMTimeRange timeRangeInAsset;
                if (clipTimeRange != null)
                {
                    timeRangeInAsset = clipTimeRange.CMTimeRangeValue;
                }
                else
                {
                    timeRangeInAsset          = new CMTimeRange();
                    timeRangeInAsset.Start    = CMTime.Zero;
                    timeRangeInAsset.Duration = asset.Duration;
                }
                NSError      error;
                AVAssetTrack clipVideoTrack = asset.TracksWithMediaType(AVMediaType.Video) [0];
                compositionVideoTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipVideoTrack, nextClipStartTime, out error);

                AVAssetTrack clipAudioTrack = asset.TracksWithMediaType(AVMediaType.Audio) [0];
                compositionAudioTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipAudioTrack, nextClipStartTime, out error);

                // Remember the time range in which this clip should pass through.
                // First clip ends with a transition.
                // Second clip begins with a transition.
                // Exclude that transition from the pass through time ranges
                CMTimeRange timeRange = new CMTimeRange();
                timeRange.Start           = nextClipStartTime;
                timeRange.Duration        = timeRangeInAsset.Duration;
                passThroughTimeRanges [i] = timeRange;

                if (i > 0)
                {
                    passThroughTimeRanges[i].Start    = CMTime.Add(passThroughTimeRanges[i].Start, transitionDuration);
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }

                if (i + 1 < clipsCount)
                {
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }

                // The end of this clip will overlap the start of the next by transitionDuration.
                // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
                nextClipStartTime = CMTime.Add(nextClipStartTime, timeRangeInAsset.Duration);
                nextClipStartTime = CMTime.Subtract(nextClipStartTime, transitionDuration);

                // Remember the time range for the transition to the next item
                if (i + 1 < clipsCount)
                {
                    transitionTimeRanges [i] = new CMTimeRange()
                    {
                        Start    = nextClipStartTime,
                        Duration = transitionDuration
                    };
                }
            }

            List <AVVideoCompositionInstruction>    instructions  = new List <AVVideoCompositionInstruction> ();
            List <AVMutableAudioMixInputParameters> trackMixArray = new List <AVMutableAudioMixInputParameters> ();

            // Set up the video composition if we are to perform crossfade transitions between clips.
            for (int i = 0; i < clipsCount; i++)
            {
                int alternatingIndex = i % 2;
                AVMutableVideoCompositionInstruction passThroughInstructions = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                passThroughInstructions.TimeRange = passThroughTimeRanges [i];

                AVMutableVideoCompositionLayerInstruction passThroughLayerInstructions = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);

                passThroughInstructions.LayerInstructions = new AVVideoCompositionLayerInstruction[] { passThroughLayerInstructions };
                instructions.Add(passThroughInstructions);

                if (i + 1 < clipsCount)
                {
                    var transitionInstruction = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                    transitionInstruction.TimeRange = transitionTimeRanges [i];
                    var fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);
                    var toLayer   = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [1 - alternatingIndex]);


                    // Fade in the toLayer by setting a ramp from 0.0 to 1.0.
                    toLayer.SetOpacityRamp(0.0f, 1.0f, transitionTimeRanges [i]);
                    transitionInstruction.LayerInstructions = new AVVideoCompositionLayerInstruction[]
                    {
                        toLayer,
                        fromLayer,
                    };
                    instructions.Add(transitionInstruction);

                    // Add AudioMix to fade in the volume ramps
                    var trackMix = AVMutableAudioMixInputParameters.FromTrack(compositionAudioTracks[0]);
                    trackMix.SetVolumeRamp(1f, 0f, transitionTimeRanges[0]);
                    trackMixArray.Add(trackMix);

                    trackMix = AVMutableAudioMixInputParameters.FromTrack(compositionAudioTracks[1]);
                    trackMix.SetVolumeRamp(0f, 1f, transitionTimeRanges[0]);
                    trackMix.SetVolumeRamp(1f, 1f, passThroughTimeRanges[1]);
                    trackMixArray.Add(trackMix);
                }
            }

            videoComposition.Instructions = instructions.ToArray();
            audioMix.InputParameters      = trackMixArray.ToArray();
        }
コード例 #3
0
        public Task <OperationResult> AddAudioToVideoTrack(string videoFilePath, string audioFilePath, string outputFilePath,
                                                           float volume = 1, float fadeOutDuration = 0)
        {
            var tcs = new TaskCompletionSource <OperationResult>();

            var composition           = AVMutableComposition.Create();
            var videoCompositionTrack = composition.AddMutableTrack(AVMediaType.Video, 0);
            var audioCompositionTrack = composition.AddMutableTrack(AVMediaType.Audio, 0);

            var videoUrl        = NSUrl.FromFilename(videoFilePath);
            var videoAsset      = AVAsset.FromUrl(videoUrl);
            var videoAssetTrack = videoAsset.TracksWithMediaType(AVMediaType.Video).First();

            var audioUrl        = NSUrl.FromFilename(audioFilePath);
            var audioAsset      = AVAsset.FromUrl(audioUrl);
            var audioAssetTrack = audioAsset.TracksWithMediaType(AVMediaType.Audio).First();

            CGSize size = videoAssetTrack.NaturalSize;
            CMTime time = CMTime.Zero;

            var range = new CMTimeRange
            {
                Start    = CMTime.Zero,
                Duration = videoAssetTrack.TimeRange.Duration
            };

            NSError error = null;

            videoCompositionTrack.InsertTimeRange(range, videoAssetTrack, time, out error);
            if (error != null)
            {
                Console.WriteLine("Error adding video composition track: " + error.LocalizedDescription);
            }

            error = null;
            audioCompositionTrack.InsertTimeRange(range, audioAssetTrack, time, out error);
            if (error != null)
            {
                Console.WriteLine("Error adding audio composition track: " + error.LocalizedDescription);
            }


            var audioMix         = AVMutableAudioMix.Create();
            var audioInputParams = AVMutableAudioMixInputParameters.FromTrack(audioCompositionTrack);

            audioInputParams.SetVolume(volume, CMTime.Zero);

            if (fadeOutDuration > 0)
            {
                var fadeOutStartTime = CMTime.Subtract(videoAssetTrack.TimeRange.Duration, CMTime.FromSeconds(fadeOutDuration, audioAssetTrack.NaturalTimeScale));
                var fadeOutRange     = new CMTimeRange
                {
                    Start    = fadeOutStartTime,
                    Duration = CMTime.FromSeconds(fadeOutDuration, audioAssetTrack.NaturalTimeScale)
                };

                audioInputParams.SetVolumeRamp(volume, 0.0f, fadeOutRange);
            }

            audioMix.InputParameters = new[] { audioInputParams };

            var session = new AVAssetExportSession(composition, AVAssetExportSession.PresetHighestQuality);

            session.OutputUrl      = NSUrl.FromFilename(outputFilePath);
            session.OutputFileType = AVFileType.Mpeg4;
            session.AudioMix       = audioMix;

            session.ExportAsynchronously(() =>
            {
                if (session.Status == AVAssetExportSessionStatus.Failed)
                {
                    tcs.SetResult(OperationResult.AsFailure(session.Error.LocalizedDescription));
                }
                else
                {
                    tcs.SetResult(OperationResult.AsSuccess());
                }
            });

            return(tcs.Task);
        }