コード例 #1
0
        void SetupReaderAndWriter()
        {
            NSError error = null;

            // Create asset reader and asset writer
            assetReader = AVAssetReader.FromAsset(asset, out error);
            if (assetReader == null)
            {
                throw new NSErrorException(error);
            }

            assetWriter = AVAssetWriter.FromUrl(outputURL, AVFileType.QuickTimeMovie, out error);
            if (assetWriter == null)
            {
                throw new NSErrorException(error);
            }

            // Create asset reader outputs and asset writer inputs for the first audio track and first video track of the asset
            // Grab first audio track and first video track, if the asset has them
            AVAssetTrack audioTrack = asset.TracksWithMediaType(AVMediaType.Audio).FirstOrDefault();
            AVAssetTrack videoTrack = asset.TracksWithMediaType(AVMediaType.Video).FirstOrDefault();

            SetupAssetReaderWriterForAudio(audioTrack);
            SetupAssetReaserWriterForVideo(videoTrack);
        }
コード例 #2
0
        private void SetupReaderAndWriter()
        {
            AVAsset localAsset     = _asset;
            NSUrl   localOutputURL = _outputURL;
            NSError error          = null;

            // Create asset reader and asset writer
            _assetReader = new AVAssetReader(localAsset, out error);
            if (_assetReader == null)
            {
                throw new NSErrorException(error);
            }

            _assetWriter = new AVAssetWriter(localOutputURL, AVFileType.QuickTimeMovie, out error);
            if (_assetWriter == null)
            {
                throw new NSErrorException(error);
            }

            // Create asset reader outputs and asset writer inputs for the first audio track and first video track of the asset
            // Grab first audio track and first video track, if the asset has them
            AVAssetTrack audioTrack = localAsset.TracksWithMediaType(AVMediaType.Audio).FirstOrDefault();
            AVAssetTrack videoTrack = localAsset.TracksWithMediaType(AVMediaType.Video).FirstOrDefault();

            SetupAssetReaderWriterForAudio(audioTrack);
            SetupAssetReaserWriterForVideo(videoTrack);
        }
コード例 #3
0
        void ReadSampleBuffers(AVAsset asset)
        {
            NSError error;

            assetReader = AVAssetReader.FromAsset(asset, out error);

            if (error != null)
            {
                Console.WriteLine("Error creating Asset Reader: {0}", error.Description);
            }

            AVAssetTrack[] videoTracks = asset.TracksWithMediaType(AVMediaType.Video);
            AVAssetTrack   videoTrack  = videoTracks [0];

            CreateDecompressionSession(videoTrack);
            var videoTrackOutput = AVAssetReaderTrackOutput.Create(videoTrack, (AVVideoSettingsUncompressed)null);

            if (assetReader.CanAddOutput(videoTrackOutput))
            {
                assetReader.AddOutput(videoTrackOutput);
            }

            if (!assetReader.StartReading())
            {
                return;
            }

            while (assetReader.Status == AVAssetReaderStatus.Reading)
            {
                CMSampleBuffer sampleBuffer = videoTrackOutput.CopyNextSampleBuffer();
                if (sampleBuffer != null)
                {
                    VTDecodeFrameFlags flags = VTDecodeFrameFlags.EnableAsynchronousDecompression;
                    VTDecodeInfoFlags  flagOut;
                    decompressionSession.DecodeFrame(sampleBuffer, flags, IntPtr.Zero, out flagOut);

                    sampleBuffer.Dispose();
                    if (presentationTimes.Count >= 5)
                    {
                        bufferSemaphore.Wait();
                    }
                }
                else if (assetReader.Status == AVAssetReaderStatus.Failed)
                {
                    Console.WriteLine("Asset Reader failed with error: {0}", assetReader.Error.Description);
                }
                else if (assetReader.Status == AVAssetReaderStatus.Completed)
                {
                    Console.WriteLine("Reached the end of the video.");
                    ChangeStatus();
                    ReadSampleBuffers(asset);
                }
            }
        }
コード例 #4
0
        VTDecompressionSession CreateSession(AVAsset asset)
        {
            var videoTracks      = asset.TracksWithMediaType(AVMediaType.Video);
            var track            = videoTracks[0];
            var formatDescriptor = track.FormatDescriptions[0] as CMVideoFormatDescription;

            var session = VTDecompressionSession.Create(
                (sourceFrame, status, flags, buffer, presentationTimeStamp, presentationDuration) => {},
                formatDescriptor);

            return(session);
        }
コード例 #5
0
        private void BuildTransitionComposition(AVMutableComposition composition, AVMutableVideoComposition videoComposition, AVMutableAudioMix audioMix)
        {
            CMTime nextClipStartTime = CMTime.Zero;
            int    clipsCount        = Clips.Count;

            // Make transitionDuration no greater than half the shortest clip duration.
            CMTime transitionDuration = TransitionDuration;

            Console.WriteLine("Clips Count:" + clipsCount);
            Console.WriteLine("Clips Range Count:" + ClipTimeRanges.Count);

            for (int i = 0; i < clipsCount; i++)
            {
                NSValue clipTimeRange = ClipTimeRanges [i];
                if (clipTimeRange != null)
                {
                    CMTime halfClipDuration = clipTimeRange.CMTimeRangeValue.Duration;
                    halfClipDuration.TimeScale *= 2;
                    transitionDuration          = CMTime.GetMinimum(transitionDuration, halfClipDuration);
                }
            }

            // Add two video tracks and two audio tracks.
            var compositionVideoTracks = new AVMutableCompositionTrack [] {
                composition.AddMutableTrack(AVMediaType.Video, 0),
                composition.AddMutableTrack(AVMediaType.Video, 0)
            };
            var compositionAudioTracks = new AVMutableCompositionTrack [] {
                composition.AddMutableTrack(AVMediaType.Audio, 0),
                composition.AddMutableTrack(AVMediaType.Audio, 0)
            };

            var passThroughTimeRanges = new CMTimeRange[clipsCount];
            var transitionTimeRanges  = new CMTimeRange[clipsCount];

            // Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
            for (int i = 0; i < clipsCount; i++)
            {
                int         alternatingIndex = i % 2;
                AVAsset     asset            = Clips [i];
                NSValue     clipTimeRange    = ClipTimeRanges [i];
                CMTimeRange timeRangeInAsset;
                if (clipTimeRange != null)
                {
                    timeRangeInAsset = clipTimeRange.CMTimeRangeValue;
                }
                else
                {
                    timeRangeInAsset          = new CMTimeRange();
                    timeRangeInAsset.Start    = CMTime.Zero;
                    timeRangeInAsset.Duration = asset.Duration;
                }
                NSError      error;
                AVAssetTrack clipVideoTrack = asset.TracksWithMediaType(AVMediaType.Video) [0];
                compositionVideoTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipVideoTrack, nextClipStartTime, out error);

                AVAssetTrack clipAudioTrack = asset.TracksWithMediaType(AVMediaType.Audio) [0];
                compositionAudioTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipAudioTrack, nextClipStartTime, out error);

                // Remember the time range in which this clip should pass through.
                // First clip ends with a transition.
                // Second clip begins with a transition.
                // Exclude that transition from the pass through time ranges
                CMTimeRange timeRange = new CMTimeRange();
                timeRange.Start           = nextClipStartTime;
                timeRange.Duration        = timeRangeInAsset.Duration;
                passThroughTimeRanges [i] = timeRange;

                if (i > 0)
                {
                    passThroughTimeRanges[i].Start    = CMTime.Add(passThroughTimeRanges[i].Start, transitionDuration);
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }

                if (i + 1 < clipsCount)
                {
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }

                // The end of this clip will overlap the start of the next by transitionDuration.
                // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
                nextClipStartTime = CMTime.Add(nextClipStartTime, timeRangeInAsset.Duration);
                nextClipStartTime = CMTime.Subtract(nextClipStartTime, transitionDuration);

                // Remember the time range for the transition to the next item
                if (i + 1 < clipsCount)
                {
                    transitionTimeRanges [i] = new CMTimeRange()
                    {
                        Start    = nextClipStartTime,
                        Duration = transitionDuration
                    };
                }
            }

            List <AVVideoCompositionInstruction>    instructions  = new List <AVVideoCompositionInstruction> ();
            List <AVMutableAudioMixInputParameters> trackMixArray = new List <AVMutableAudioMixInputParameters> ();

            // Set up the video composition if we are to perform crossfade transitions between clips.
            for (int i = 0; i < clipsCount; i++)
            {
                int alternatingIndex = i % 2;
                AVMutableVideoCompositionInstruction passThroughInstructions = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                passThroughInstructions.TimeRange = passThroughTimeRanges [i];

                AVMutableVideoCompositionLayerInstruction passThroughLayerInstructions = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);

                passThroughInstructions.LayerInstructions = new AVVideoCompositionLayerInstruction[] { passThroughLayerInstructions };
                instructions.Add(passThroughInstructions);

                if (i + 1 < clipsCount)
                {
                    var transitionInstruction = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                    transitionInstruction.TimeRange = transitionTimeRanges [i];
                    var fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);
                    var toLayer   = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [1 - alternatingIndex]);


                    // Fade in the toLayer by setting a ramp from 0.0 to 1.0.
                    toLayer.SetOpacityRamp(0.0f, 1.0f, transitionTimeRanges [i]);
                    transitionInstruction.LayerInstructions = new AVVideoCompositionLayerInstruction[]
                    {
                        toLayer,
                        fromLayer,
                    };
                    instructions.Add(transitionInstruction);

                    // Add AudioMix to fade in the volume ramps
                    var trackMix = AVMutableAudioMixInputParameters.FromTrack(compositionAudioTracks[0]);
                    trackMix.SetVolumeRamp(1f, 0f, transitionTimeRanges[0]);
                    trackMixArray.Add(trackMix);

                    trackMix = AVMutableAudioMixInputParameters.FromTrack(compositionAudioTracks[1]);
                    trackMix.SetVolumeRamp(0f, 1f, transitionTimeRanges[0]);
                    trackMix.SetVolumeRamp(1f, 1f, passThroughTimeRanges[1]);
                    trackMixArray.Add(trackMix);
                }
            }

            videoComposition.Instructions = instructions.ToArray();
            audioMix.InputParameters      = trackMixArray.ToArray();
        }
コード例 #6
0
        void buildTransitionComposition(AVMutableComposition composition, AVMutableVideoComposition videoComposition)
        {
            CMTime nextClipStartTime = CMTime.Zero;
            int    clipsCount        = Clips.Count;

            // Make transitionDuration no greater than half the shortest clip duration.
            CMTime transitionDuration = TransitionDuration;

            foreach (var clipTimeRange in ClipTimeRanges)
            {
                if (clipTimeRange == null)
                {
                    continue;
                }

                CMTime halfClipDuration = clipTimeRange.CMTimeRangeValue.Duration;
                halfClipDuration.TimeScale *= 2;
                transitionDuration          = CMTime.GetMinimum(transitionDuration, halfClipDuration);
            }

            // Add two video tracks and two audio tracks.
            var compositionVideoTracks = new AVMutableCompositionTrack [2];
            var compositionAudioTracks = new AVMutableCompositionTrack [2];

            compositionVideoTracks [0] = composition.AddMutableTrack(AVMediaType.Video, 0);
            compositionVideoTracks [1] = composition.AddMutableTrack(AVMediaType.Video, 0);
            compositionAudioTracks [0] = composition.AddMutableTrack(AVMediaType.Audio, 0);
            compositionAudioTracks [1] = composition.AddMutableTrack(AVMediaType.Audio, 0);

            var passThroughTimeRanges = new CMTimeRange[clipsCount];
            var transitionTimeRanges  = new CMTimeRange[clipsCount];

            // Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
            for (int i = 0; i < clipsCount; i++)
            {
                int         alternatingIndex = i % 2;
                AVAsset     asset            = Clips [i];
                NSValue     clipTimeRange    = ClipTimeRanges [i];
                CMTimeRange timeRangeInAsset;
                if (clipTimeRange != null)
                {
                    timeRangeInAsset = clipTimeRange.CMTimeRangeValue;
                }
                else
                {
                    timeRangeInAsset = new CMTimeRange {
                        Start    = CMTime.Zero,
                        Duration = asset.Duration
                    };
                }
                NSError      error          = new NSError();
                AVAssetTrack clipVideoTrack = asset.TracksWithMediaType(AVMediaType.Video) [0];
                compositionVideoTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipVideoTrack, nextClipStartTime, out error);

                AVAssetTrack clipAudioTrack = asset.TracksWithMediaType(AVMediaType.Audio) [0];
                compositionAudioTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipAudioTrack, nextClipStartTime, out error);

                // Remember the time range in which this clip should pass through.
                // First clip ends with a transition.
                // Second clip begins with a transition.
                // Exclude that transition from the pass through time ranges
                passThroughTimeRanges [i] = new CMTimeRange {
                    Start    = nextClipStartTime,
                    Duration = timeRangeInAsset.Duration
                };

                if (i > 0)
                {
                    passThroughTimeRanges[i].Start    = CMTime.Add(passThroughTimeRanges[i].Start, transitionDuration);
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }
                if (i + 1 < clipsCount)
                {
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }

                // The end of this clip will overlap the start of the next by transitionDuration.
                // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
                nextClipStartTime = CMTime.Add(nextClipStartTime, timeRangeInAsset.Duration);
                nextClipStartTime = CMTime.Subtract(nextClipStartTime, transitionDuration);

                // Remember the time range for the transition to the next item.

                if (i + 1 < clipsCount)
                {
                    transitionTimeRanges [i] = new CMTimeRange()
                    {
                        Start    = nextClipStartTime,
                        Duration = transitionDuration
                    };
                }
            }

            // Set up the video composition to perform cross dissolve or diagonal wipe transitions between clips.
            var instructions = new List <AVVideoCompositionInstruction> ();

            // Cycle between "pass through A", "transition from A to B", "pass through B"
            for (int i = 0; i < clipsCount; i++)
            {
                int alternatingIndex = i % 2;

//				if (videoComposition.CustomVideoCompositorClass != null) {
//					var videoInstruction = new CustomVideoCompositionInstruction (compositionVideoTracks [alternatingIndex].TrackID, passThroughTimeRanges [i]);
//					instructions.Add (videoInstruction);
//				} else {
//					// Pass through clip i.
//					var passThroughInstruction = AVMutableVideoCompositionInstruction.Create () as AVMutableVideoCompositionInstruction;
//					passThroughInstruction.TimeRange = passThroughTimeRanges [i];
//					var passThroughLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [alternatingIndex]);
//					passThroughInstruction.LayerInstructions = new [] { passThroughLayer };
//					instructions.Add (passThroughInstruction);
//
//				}
                //TODO: Remove following call if previous works
                if (videoComposition.CustomVideoCompositorClass.Name != "nil")
                {
                    var videoInstruction = new CustomVideoCompositionInstruction(compositionVideoTracks [alternatingIndex].TrackID, passThroughTimeRanges [i]);
                    instructions.Add(videoInstruction);
                }
                else
                {
                    // Pass through clip i.
                    var passThroughInstruction = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                    passThroughInstruction.TimeRange = passThroughTimeRanges [i];
                    var passThroughLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);
                    passThroughInstruction.LayerInstructions = new [] { passThroughLayer };
                    instructions.Add(passThroughInstruction);
                }

                if (i + 1 < clipsCount)
                {
                    // Add transition from clip i to clip i+1.
//					if (videoComposition.CustomVideoCompositorClass != null) {
//						var videoInstruction = new CustomVideoCompositionInstruction (new NSNumber [] {
//							compositionVideoTracks [0].TrackID,
//							compositionVideoTracks [1].TrackID
//						}, transitionTimeRanges [1]);
//
//						if (alternatingIndex == 0) {
//							videoInstruction.ForegroundTrackID = compositionVideoTracks [alternatingIndex].TrackID;
//							videoInstruction.BackgroundTrackID = compositionVideoTracks [1 - alternatingIndex].TrackID;
//						}
//
//						instructions.Add (videoInstruction);
//					} else {
//						var transitionInstruction = AVMutableVideoCompositionInstruction.Create () as AVMutableVideoCompositionInstruction;
//						transitionInstruction.TimeRange = transitionTimeRanges [i];
//						var fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [alternatingIndex]);
//						var toLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [1 - alternatingIndex]);
//						transitionInstruction.LayerInstructions = new [] { toLayer, fromLayer };
//						instructions.Add (transitionInstruction);
//					}
                    // TODO: remove following call if previous works
                    if (videoComposition.CustomVideoCompositorClass.Name != "nil")
                    {
                        NSNumber[] sources = new NSNumber[] {
                            new NSNumber(compositionVideoTracks [0].TrackID),
                            new NSNumber(compositionVideoTracks [1].TrackID)
                        };
                        var videoInstructions = new CustomVideoCompositionInstruction(sources, transitionTimeRanges [i]);
                        if (alternatingIndex == 0)
                        {
                            videoInstructions.ForegroundTrackID = compositionVideoTracks [alternatingIndex].TrackID;
                            videoInstructions.BackgroundTrackID = compositionVideoTracks [1 - alternatingIndex].TrackID;
                        }

                        instructions.Add(videoInstructions);
                        Console.WriteLine("Add transition from clip i to clip i+1");
                    }
                    else
                    {
                        AVMutableVideoCompositionInstruction transitionInstruction = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                        transitionInstruction.TimeRange = transitionTimeRanges [i];
                        AVMutableVideoCompositionLayerInstruction fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);
                        AVMutableVideoCompositionLayerInstruction toLayer   = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [1 - alternatingIndex]);
                        transitionInstruction.LayerInstructions = new AVVideoCompositionLayerInstruction[] {
                            fromLayer,
                            toLayer,
                        };
                        instructions.Add(transitionInstruction);
                    }
                }
            }

            videoComposition.Instructions = instructions.ToArray();
        }
コード例 #7
0
        void ReadSampleBuffers(AVAsset asset)
        {
            NSError error;
            assetReader = AVAssetReader.FromAsset (asset, out error);

            if (error != null)
                Console.WriteLine ("Error creating Asset Reader: {0}", error.Description);

            AVAssetTrack[] videoTracks = asset.TracksWithMediaType (AVMediaType.Video);
            AVAssetTrack videoTrack = videoTracks [0];
            CreateDecompressionSession (videoTrack);
            var videoTrackOutput = AVAssetReaderTrackOutput.Create (videoTrack, (AVVideoSettingsUncompressed)null);

            if (assetReader.CanAddOutput (videoTrackOutput))
                assetReader.AddOutput (videoTrackOutput);

            if (!assetReader.StartReading ())
                return;

            while (assetReader.Status == AVAssetReaderStatus.Reading) {
                CMSampleBuffer sampleBuffer = videoTrackOutput.CopyNextSampleBuffer ();
                if (sampleBuffer != null) {
                    VTDecodeFrameFlags flags = VTDecodeFrameFlags.EnableAsynchronousDecompression;
                    VTDecodeInfoFlags flagOut;
                    decompressionSession.DecodeFrame (sampleBuffer, flags, IntPtr.Zero, out flagOut);

                    sampleBuffer.Dispose ();
                    if (presentationTimes.Count >= 5)
                        bufferSemaphore.Wait ();

                } else if (assetReader.Status == AVAssetReaderStatus.Failed) {
                    Console.WriteLine ("Asset Reader failed with error: {0}", assetReader.Error.Description);
                } else if (assetReader.Status == AVAssetReaderStatus.Completed) {
                    Console.WriteLine("Reached the end of the video.");
                    ChangeStatus ();
                    ReadSampleBuffers (asset);
                }
            }
        }