static AVAssetTrack FetchFirstAudioTrack(AVPlayer player)
        {
            AVAssetTrack[] tracks          = player.CurrentItem.Asset.Tracks;
            AVAssetTrack   firstAudioTrack = tracks.FirstOrDefault(t => t.MediaType == AVMediaType.Audio);

            return(firstAudioTrack);
        }
예제 #2
0
        void SetupReaderAndWriter()
        {
            NSError error = null;

            // Create asset reader and asset writer
            assetReader = AVAssetReader.FromAsset(asset, out error);
            if (assetReader == null)
            {
                throw new NSErrorException(error);
            }

            assetWriter = AVAssetWriter.FromUrl(outputURL, AVFileType.QuickTimeMovie, out error);
            if (assetWriter == null)
            {
                throw new NSErrorException(error);
            }

            // Create asset reader outputs and asset writer inputs for the first audio track and first video track of the asset
            // Grab first audio track and first video track, if the asset has them
            AVAssetTrack audioTrack = asset.TracksWithMediaType(AVMediaType.Audio).FirstOrDefault();
            AVAssetTrack videoTrack = asset.TracksWithMediaType(AVMediaType.Video).FirstOrDefault();

            SetupAssetReaderWriterForAudio(audioTrack);
            SetupAssetReaserWriterForVideo(videoTrack);
        }
예제 #3
0
        private void SetupReaderAndWriter()
        {
            AVAsset localAsset     = _asset;
            NSUrl   localOutputURL = _outputURL;
            NSError error          = null;

            // Create asset reader and asset writer
            _assetReader = new AVAssetReader(localAsset, out error);
            if (_assetReader == null)
            {
                throw new NSErrorException(error);
            }

            _assetWriter = new AVAssetWriter(localOutputURL, AVFileType.QuickTimeMovie, out error);
            if (_assetWriter == null)
            {
                throw new NSErrorException(error);
            }

            // Create asset reader outputs and asset writer inputs for the first audio track and first video track of the asset
            // Grab first audio track and first video track, if the asset has them
            AVAssetTrack audioTrack = localAsset.TracksWithMediaType(AVMediaType.Audio).FirstOrDefault();
            AVAssetTrack videoTrack = localAsset.TracksWithMediaType(AVMediaType.Video).FirstOrDefault();

            SetupAssetReaderWriterForAudio(audioTrack);
            SetupAssetReaserWriterForVideo(videoTrack);
        }
예제 #4
0
        void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack)
        {
            if (audioTrack == null)
            {
                return;
            }

            // Decompress to Linear PCM with the asset reader
            // To read the media data from a specific asset track in the format in which it was stored, pass null to the settings parameter.
            AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create(audioTrack, (AudioSettings)null);

            if (assetReader.CanAddOutput(output))
            {
                assetReader.AddOutput(output);
            }

            AVAssetWriterInput input = AVAssetWriterInput.Create(audioTrack.MediaType, (AudioSettings)null);

            if (assetWriter.CanAddInput(input))
            {
                assetWriter.AddInput(input);
            }

            // Create and save an instance of ReadWriteSampleBufferChannel,
            // which will coordinate the work of reading and writing sample buffers
            audioSampleBufferChannel = new AudioChannel(output, input);
        }
예제 #5
0
        void CreateDecompressionSession(AVAssetTrack videoTrack)
        {
            CMFormatDescription[] formatDescriptions = videoTrack.FormatDescriptions;
            var formatDescription = (CMVideoFormatDescription)formatDescriptions [0];

            videoPreferredTransform = videoTrack.PreferredTransform;
            decompressionSession    = VTDecompressionSession.Create(DidDecompress, formatDescription);
        }
예제 #6
0
        void ReadSampleBuffers(AVAsset asset)
        {
            NSError error;

            assetReader = AVAssetReader.FromAsset(asset, out error);

            if (error != null)
            {
                Console.WriteLine("Error creating Asset Reader: {0}", error.Description);
            }

            AVAssetTrack[] videoTracks = asset.TracksWithMediaType(AVMediaType.Video);
            AVAssetTrack   videoTrack  = videoTracks [0];

            CreateDecompressionSession(videoTrack);
            var videoTrackOutput = AVAssetReaderTrackOutput.Create(videoTrack, (AVVideoSettingsUncompressed)null);

            if (assetReader.CanAddOutput(videoTrackOutput))
            {
                assetReader.AddOutput(videoTrackOutput);
            }

            if (!assetReader.StartReading())
            {
                return;
            }

            while (assetReader.Status == AVAssetReaderStatus.Reading)
            {
                CMSampleBuffer sampleBuffer = videoTrackOutput.CopyNextSampleBuffer();
                if (sampleBuffer != null)
                {
                    VTDecodeFrameFlags flags = VTDecodeFrameFlags.EnableAsynchronousDecompression;
                    VTDecodeInfoFlags  flagOut;
                    decompressionSession.DecodeFrame(sampleBuffer, flags, IntPtr.Zero, out flagOut);

                    sampleBuffer.Dispose();
                    if (presentationTimes.Count >= 5)
                    {
                        bufferSemaphore.Wait();
                    }
                }
                else if (assetReader.Status == AVAssetReaderStatus.Failed)
                {
                    Console.WriteLine("Asset Reader failed with error: {0}", assetReader.Error.Description);
                }
                else if (assetReader.Status == AVAssetReaderStatus.Completed)
                {
                    Console.WriteLine("Reached the end of the video.");
                    ChangeStatus();
                    ReadSampleBuffers(asset);
                }
            }
        }
		public AudioTapProcessor (AVAssetTrack audioTrack)
		{
			if (audioTrack == null)
				throw new ArgumentNullException ("audioTrack");

			if (audioTrack.MediaType != AVMediaType.Audio)
				throw new ArithmeticException ("MediaType is not AVMediaType.Audio");

			audioAssetTrack = audioTrack;
			centerFrequency = 4980f / 23980f; // equals 5000 Hz (assuming sample rate is 48k)
			bandwidth = 500f / 11900f; // equals 600 Cents
		}
        AudioTapProcessor CreateTapProcessor()
        {
            AudioTapProcessor processor = null;

            AVAssetTrack firstAudioAssetTrack = FetchFirstAudioTrack(Player);

            if (firstAudioAssetTrack != null)
            {
                processor            = new AudioTapProcessor(firstAudioAssetTrack);
                processor.Controller = this;
            }

            return(processor);
        }
예제 #9
0
        private void SetupAssetReaserWriterForVideo(AVAssetTrack videoTrack)
        {
            throw new NotImplementedException("64 bit");

            /*
             * if (videoTrack == null)
             *      return;
             *
             * // Decompress to ARGB with the asset reader
             * // TODO: https://trello.com/c/Kvzxo86W. Set kCVPixelBufferIOSurfacePropertiesKey to empty dictionary
             * //														(id)kCVPixelBufferIOSurfacePropertiesKey : @{}
             * var decompSettings = new AVVideoSettingsUncompressed {
             *      PixelFormatType = CVPixelFormatType.CV32BGRA,
             *      AllocateWithIOSurface = true
             * };
             * AVAssetReaderOutput output = new AVAssetReaderTrackOutput(videoTrack, decompSettings);
             * _assetReader.AddOutput (output);
             *
             * // Get the format description of the track, to fill in attributes of the video stream that we don't want to change
             * CMFormatDescription formatDescription = videoTrack.FormatDescriptions.FirstOrDefault ();
             *
             * // Grab track dimensions from format description
             * SizeF trackDimensions = formatDescription != null
             *      ? formatDescription.GetVideoPresentationDimensions (false, false)
             *      : videoTrack.NaturalSize;
             *
             * // Grab clean aperture, pixel aspect ratio from format description
             * AVVideoCodecSettings compressionSettings = null;
             * if (formatDescription != null) {
             *      var cleanApertureDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.CleanApertureKey);
             *      var pixelAspectRatioDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.PixelAspectRatioKey);
             *      compressionSettings = CreateCodecSettingsFor (cleanApertureDescr, pixelAspectRatioDescr);
             * }
             *
             * // Compress to H.264 with the asset writer
             * var videoSettings = new AVVideoSettingsCompressed {
             *      Codec = AVVideoCodec.H264,
             *      Width = (int)trackDimensions.Width,
             *      Height = (int)trackDimensions.Height,
             *      CodecSettings = compressionSettings
             * };
             * AVAssetWriterInput input = AVAssetWriterInput.FromType (videoTrack.MediaType, videoSettings.Dictionary);
             * input.Transform = videoTrack.PreferredTransform;
             * _assetWriter.AddInput (input);
             *
             * // Create and save an instance of AAPLRWSampleBufferChannel,
             * // which will coordinate the work of reading and writing sample buffers
             * _videoSampleBufferChannel = new ReadWriteSampleBufferChannel (output, input, true);
             */
        }
예제 #10
0
            public AudioTapProcessor(AVAssetTrack audioTrack)
            {
                if (audioTrack == null)
                {
                    throw new ArgumentNullException("audioTrack");
                }

                if (audioTrack.MediaType != AVMediaType.Audio)
                {
                    throw new ArithmeticException("MediaType is not AVMediaType.Audio");
                }

                audioAssetTrack = audioTrack;
            }
예제 #11
0
        void SetupAssetReaserWriterForVideo(AVAssetTrack videoTrack)
        {
            if (videoTrack == null)
            {
                return;
            }

            // Decompress to ARGB with the asset reader
            var decompSettings = new AVVideoSettingsUncompressed {
                PixelFormatType       = CVPixelFormatType.CV32BGRA,
                AllocateWithIOSurface = null
            };
            AVAssetReaderOutput output = new AVAssetReaderTrackOutput(videoTrack, decompSettings);

            assetReader.AddOutput(output);

            // Get the format description of the track, to fill in attributes of the video stream that we don't want to change
            var formatDescription = (CMVideoFormatDescription)videoTrack.FormatDescriptions.FirstOrDefault();
            // Grab track dimensions from format description
            CGSize trackDimensions = formatDescription != null
                                ? formatDescription.GetPresentationDimensions(false, false)
                                : videoTrack.NaturalSize;

            // Grab clean aperture, pixel aspect ratio from format description
            AVVideoCodecSettings compressionSettings = null;

            if (formatDescription != null)
            {
                var cleanApertureDescr    = (NSDictionary)formatDescription.GetExtension(CVImageBuffer.CleanApertureKey);
                var pixelAspectRatioDescr = (NSDictionary)formatDescription.GetExtension(CVImageBuffer.PixelAspectRatioKey);
                compressionSettings = CreateCodecSettingsFor(cleanApertureDescr, pixelAspectRatioDescr);
            }

            // Compress to H.264 with the asset writer
            var videoSettings = new AVVideoSettingsCompressed {
                Codec         = AVVideoCodec.H264,
                Width         = (int)trackDimensions.Width,
                Height        = (int)trackDimensions.Height,
                CodecSettings = compressionSettings
            };
            AVAssetWriterInput input = new AVAssetWriterInput(videoTrack.MediaType, videoSettings);

            input.Transform = videoTrack.PreferredTransform;
            assetWriter.AddInput(input);

            // Create and save an instance of ReadWriteSampleBufferChannel,
            // which will coordinate the work of reading and writing sample buffers
            videoSampleBufferChannel = new VideoChannel(output, input, transformer);
        }
        public AudioTapProcessor(AVAssetTrack audioTrack)
        {
            if (audioTrack == null)
            {
                throw new ArgumentNullException("audioTrack");
            }

            if (audioTrack.MediaType != AVMediaType.Audio)
            {
                throw new ArithmeticException("MediaType is not AVMediaType.Audio");
            }

            audioAssetTrack = audioTrack;
            centerFrequency = 4980f / 23980f;      // equals 5000 Hz (assuming sample rate is 48k)
            bandwidth       = 500f / 11900f;       // equals 600 Cents
        }
예제 #13
0
        private void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack)
        {
            if (audioTrack == null)
            {
                return;
            }

            // Decompress to Linear PCM with the asset reader
            AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create(audioTrack, (AudioSettings)null);

            _assetReader.AddOutput(output);

            AVAssetWriterInput input = AVAssetWriterInput.Create(audioTrack.MediaType, (AudioSettings)null);

            _assetWriter.AddInput(input);

            // Create and save an instance of AAPLRWSampleBufferChannel,
            // which will coordinate the work of reading and writing sample buffers
            _audioSampleBufferChannel = new ReadWriteSampleBufferChannel(output, input, false);
        }
예제 #14
0
		void SetupAssetReaderWriterForAudio (AVAssetTrack audioTrack)
		{
			if (audioTrack == null)
				return;

			// Decompress to Linear PCM with the asset reader
			// To read the media data from a specific asset track in the format in which it was stored, pass null to the settings parameter.
			AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create (audioTrack, (AudioSettings)null);
			if (assetReader.CanAddOutput (output))
				assetReader.AddOutput (output);

			AVAssetWriterInput input = AVAssetWriterInput.Create (audioTrack.MediaType, (AudioSettings)null);
			if (assetWriter.CanAddInput (input))
				assetWriter.AddInput (input);

			// Create and save an instance of ReadWriteSampleBufferChannel,
			// which will coordinate the work of reading and writing sample buffers
			audioSampleBufferChannel = new AudioChannel(output, input);
		}
예제 #15
0
 void CreateDecompressionSession(AVAssetTrack videoTrack)
 {
     CMFormatDescription[] formatDescriptions = videoTrack.FormatDescriptions;
     var formatDescription = (CMVideoFormatDescription)formatDescriptions [0];
     videoPreferredTransform = videoTrack.PreferredTransform;
     decompressionSession = VTDecompressionSession.Create (DidDecompress, formatDescription);
 }
예제 #16
0
        private void BuildTransitionComposition(AVMutableComposition composition, AVMutableVideoComposition videoComposition, AVMutableAudioMix audioMix)
        {
            CMTime nextClipStartTime = CMTime.Zero;
            int    clipsCount        = Clips.Count;

            // Make transitionDuration no greater than half the shortest clip duration.
            CMTime transitionDuration = TransitionDuration;

            Console.WriteLine("Clips Count:" + clipsCount);
            Console.WriteLine("Clips Range Count:" + ClipTimeRanges.Count);

            for (int i = 0; i < clipsCount; i++)
            {
                NSValue clipTimeRange = ClipTimeRanges [i];
                if (clipTimeRange != null)
                {
                    CMTime halfClipDuration = clipTimeRange.CMTimeRangeValue.Duration;
                    halfClipDuration.TimeScale *= 2;
                    transitionDuration          = CMTime.GetMinimum(transitionDuration, halfClipDuration);
                }
            }

            // Add two video tracks and two audio tracks.
            var compositionVideoTracks = new AVMutableCompositionTrack [] {
                composition.AddMutableTrack(AVMediaType.Video, 0),
                composition.AddMutableTrack(AVMediaType.Video, 0)
            };
            var compositionAudioTracks = new AVMutableCompositionTrack [] {
                composition.AddMutableTrack(AVMediaType.Audio, 0),
                composition.AddMutableTrack(AVMediaType.Audio, 0)
            };

            var passThroughTimeRanges = new CMTimeRange[clipsCount];
            var transitionTimeRanges  = new CMTimeRange[clipsCount];

            // Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
            for (int i = 0; i < clipsCount; i++)
            {
                int         alternatingIndex = i % 2;
                AVAsset     asset            = Clips [i];
                NSValue     clipTimeRange    = ClipTimeRanges [i];
                CMTimeRange timeRangeInAsset;
                if (clipTimeRange != null)
                {
                    timeRangeInAsset = clipTimeRange.CMTimeRangeValue;
                }
                else
                {
                    timeRangeInAsset          = new CMTimeRange();
                    timeRangeInAsset.Start    = CMTime.Zero;
                    timeRangeInAsset.Duration = asset.Duration;
                }
                NSError      error;
                AVAssetTrack clipVideoTrack = asset.TracksWithMediaType(AVMediaType.Video) [0];
                compositionVideoTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipVideoTrack, nextClipStartTime, out error);

                AVAssetTrack clipAudioTrack = asset.TracksWithMediaType(AVMediaType.Audio) [0];
                compositionAudioTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipAudioTrack, nextClipStartTime, out error);

                // Remember the time range in which this clip should pass through.
                // First clip ends with a transition.
                // Second clip begins with a transition.
                // Exclude that transition from the pass through time ranges
                CMTimeRange timeRange = new CMTimeRange();
                timeRange.Start           = nextClipStartTime;
                timeRange.Duration        = timeRangeInAsset.Duration;
                passThroughTimeRanges [i] = timeRange;

                if (i > 0)
                {
                    passThroughTimeRanges[i].Start    = CMTime.Add(passThroughTimeRanges[i].Start, transitionDuration);
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }

                if (i + 1 < clipsCount)
                {
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }

                // The end of this clip will overlap the start of the next by transitionDuration.
                // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
                nextClipStartTime = CMTime.Add(nextClipStartTime, timeRangeInAsset.Duration);
                nextClipStartTime = CMTime.Subtract(nextClipStartTime, transitionDuration);

                // Remember the time range for the transition to the next item
                if (i + 1 < clipsCount)
                {
                    transitionTimeRanges [i] = new CMTimeRange()
                    {
                        Start    = nextClipStartTime,
                        Duration = transitionDuration
                    };
                }
            }

            List <AVVideoCompositionInstruction>    instructions  = new List <AVVideoCompositionInstruction> ();
            List <AVMutableAudioMixInputParameters> trackMixArray = new List <AVMutableAudioMixInputParameters> ();

            // Set up the video composition if we are to perform crossfade transitions between clips.
            for (int i = 0; i < clipsCount; i++)
            {
                int alternatingIndex = i % 2;
                AVMutableVideoCompositionInstruction passThroughInstructions = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                passThroughInstructions.TimeRange = passThroughTimeRanges [i];

                AVMutableVideoCompositionLayerInstruction passThroughLayerInstructions = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);

                passThroughInstructions.LayerInstructions = new AVVideoCompositionLayerInstruction[] { passThroughLayerInstructions };
                instructions.Add(passThroughInstructions);

                if (i + 1 < clipsCount)
                {
                    var transitionInstruction = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                    transitionInstruction.TimeRange = transitionTimeRanges [i];
                    var fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);
                    var toLayer   = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [1 - alternatingIndex]);


                    // Fade in the toLayer by setting a ramp from 0.0 to 1.0.
                    toLayer.SetOpacityRamp(0.0f, 1.0f, transitionTimeRanges [i]);
                    transitionInstruction.LayerInstructions = new AVVideoCompositionLayerInstruction[]
                    {
                        toLayer,
                        fromLayer,
                    };
                    instructions.Add(transitionInstruction);

                    // Add AudioMix to fade in the volume ramps
                    var trackMix = AVMutableAudioMixInputParameters.FromTrack(compositionAudioTracks[0]);
                    trackMix.SetVolumeRamp(1f, 0f, transitionTimeRanges[0]);
                    trackMixArray.Add(trackMix);

                    trackMix = AVMutableAudioMixInputParameters.FromTrack(compositionAudioTracks[1]);
                    trackMix.SetVolumeRamp(0f, 1f, transitionTimeRanges[0]);
                    trackMix.SetVolumeRamp(1f, 1f, passThroughTimeRanges[1]);
                    trackMixArray.Add(trackMix);
                }
            }

            videoComposition.Instructions = instructions.ToArray();
            audioMix.InputParameters      = trackMixArray.ToArray();
        }
예제 #17
0
        void buildTransitionComposition(AVMutableComposition composition, AVMutableVideoComposition videoComposition)
        {
            CMTime nextClipStartTime = CMTime.Zero;
            int    clipsCount        = Clips.Count;

            // Make transitionDuration no greater than half the shortest clip duration.
            CMTime transitionDuration = TransitionDuration;

            foreach (var clipTimeRange in ClipTimeRanges)
            {
                if (clipTimeRange == null)
                {
                    continue;
                }

                CMTime halfClipDuration = clipTimeRange.CMTimeRangeValue.Duration;
                halfClipDuration.TimeScale *= 2;
                transitionDuration          = CMTime.GetMinimum(transitionDuration, halfClipDuration);
            }

            // Add two video tracks and two audio tracks.
            var compositionVideoTracks = new AVMutableCompositionTrack [2];
            var compositionAudioTracks = new AVMutableCompositionTrack [2];

            compositionVideoTracks [0] = composition.AddMutableTrack(AVMediaType.Video, 0);
            compositionVideoTracks [1] = composition.AddMutableTrack(AVMediaType.Video, 0);
            compositionAudioTracks [0] = composition.AddMutableTrack(AVMediaType.Audio, 0);
            compositionAudioTracks [1] = composition.AddMutableTrack(AVMediaType.Audio, 0);

            var passThroughTimeRanges = new CMTimeRange[clipsCount];
            var transitionTimeRanges  = new CMTimeRange[clipsCount];

            // Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
            for (int i = 0; i < clipsCount; i++)
            {
                int         alternatingIndex = i % 2;
                AVAsset     asset            = Clips [i];
                NSValue     clipTimeRange    = ClipTimeRanges [i];
                CMTimeRange timeRangeInAsset;
                if (clipTimeRange != null)
                {
                    timeRangeInAsset = clipTimeRange.CMTimeRangeValue;
                }
                else
                {
                    timeRangeInAsset = new CMTimeRange {
                        Start    = CMTime.Zero,
                        Duration = asset.Duration
                    };
                }
                NSError      error          = new NSError();
                AVAssetTrack clipVideoTrack = asset.TracksWithMediaType(AVMediaType.Video) [0];
                compositionVideoTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipVideoTrack, nextClipStartTime, out error);

                AVAssetTrack clipAudioTrack = asset.TracksWithMediaType(AVMediaType.Audio) [0];
                compositionAudioTracks [alternatingIndex].InsertTimeRange(timeRangeInAsset, clipAudioTrack, nextClipStartTime, out error);

                // Remember the time range in which this clip should pass through.
                // First clip ends with a transition.
                // Second clip begins with a transition.
                // Exclude that transition from the pass through time ranges
                passThroughTimeRanges [i] = new CMTimeRange {
                    Start    = nextClipStartTime,
                    Duration = timeRangeInAsset.Duration
                };

                if (i > 0)
                {
                    passThroughTimeRanges[i].Start    = CMTime.Add(passThroughTimeRanges[i].Start, transitionDuration);
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }
                if (i + 1 < clipsCount)
                {
                    passThroughTimeRanges[i].Duration = CMTime.Subtract(passThroughTimeRanges[i].Duration, transitionDuration);
                }

                // The end of this clip will overlap the start of the next by transitionDuration.
                // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
                nextClipStartTime = CMTime.Add(nextClipStartTime, timeRangeInAsset.Duration);
                nextClipStartTime = CMTime.Subtract(nextClipStartTime, transitionDuration);

                // Remember the time range for the transition to the next item.

                if (i + 1 < clipsCount)
                {
                    transitionTimeRanges [i] = new CMTimeRange()
                    {
                        Start    = nextClipStartTime,
                        Duration = transitionDuration
                    };
                }
            }

            // Set up the video composition to perform cross dissolve or diagonal wipe transitions between clips.
            var instructions = new List <AVVideoCompositionInstruction> ();

            // Cycle between "pass through A", "transition from A to B", "pass through B"
            for (int i = 0; i < clipsCount; i++)
            {
                int alternatingIndex = i % 2;

//				if (videoComposition.CustomVideoCompositorClass != null) {
//					var videoInstruction = new CustomVideoCompositionInstruction (compositionVideoTracks [alternatingIndex].TrackID, passThroughTimeRanges [i]);
//					instructions.Add (videoInstruction);
//				} else {
//					// Pass through clip i.
//					var passThroughInstruction = AVMutableVideoCompositionInstruction.Create () as AVMutableVideoCompositionInstruction;
//					passThroughInstruction.TimeRange = passThroughTimeRanges [i];
//					var passThroughLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [alternatingIndex]);
//					passThroughInstruction.LayerInstructions = new [] { passThroughLayer };
//					instructions.Add (passThroughInstruction);
//
//				}
                //TODO: Remove following call if previous works
                if (videoComposition.CustomVideoCompositorClass.Name != "nil")
                {
                    var videoInstruction = new CustomVideoCompositionInstruction(compositionVideoTracks [alternatingIndex].TrackID, passThroughTimeRanges [i]);
                    instructions.Add(videoInstruction);
                }
                else
                {
                    // Pass through clip i.
                    var passThroughInstruction = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                    passThroughInstruction.TimeRange = passThroughTimeRanges [i];
                    var passThroughLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);
                    passThroughInstruction.LayerInstructions = new [] { passThroughLayer };
                    instructions.Add(passThroughInstruction);
                }

                if (i + 1 < clipsCount)
                {
                    // Add transition from clip i to clip i+1.
//					if (videoComposition.CustomVideoCompositorClass != null) {
//						var videoInstruction = new CustomVideoCompositionInstruction (new NSNumber [] {
//							compositionVideoTracks [0].TrackID,
//							compositionVideoTracks [1].TrackID
//						}, transitionTimeRanges [1]);
//
//						if (alternatingIndex == 0) {
//							videoInstruction.ForegroundTrackID = compositionVideoTracks [alternatingIndex].TrackID;
//							videoInstruction.BackgroundTrackID = compositionVideoTracks [1 - alternatingIndex].TrackID;
//						}
//
//						instructions.Add (videoInstruction);
//					} else {
//						var transitionInstruction = AVMutableVideoCompositionInstruction.Create () as AVMutableVideoCompositionInstruction;
//						transitionInstruction.TimeRange = transitionTimeRanges [i];
//						var fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [alternatingIndex]);
//						var toLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack (compositionVideoTracks [1 - alternatingIndex]);
//						transitionInstruction.LayerInstructions = new [] { toLayer, fromLayer };
//						instructions.Add (transitionInstruction);
//					}
                    // TODO: remove following call if previous works
                    if (videoComposition.CustomVideoCompositorClass.Name != "nil")
                    {
                        NSNumber[] sources = new NSNumber[] {
                            new NSNumber(compositionVideoTracks [0].TrackID),
                            new NSNumber(compositionVideoTracks [1].TrackID)
                        };
                        var videoInstructions = new CustomVideoCompositionInstruction(sources, transitionTimeRanges [i]);
                        if (alternatingIndex == 0)
                        {
                            videoInstructions.ForegroundTrackID = compositionVideoTracks [alternatingIndex].TrackID;
                            videoInstructions.BackgroundTrackID = compositionVideoTracks [1 - alternatingIndex].TrackID;
                        }

                        instructions.Add(videoInstructions);
                        Console.WriteLine("Add transition from clip i to clip i+1");
                    }
                    else
                    {
                        AVMutableVideoCompositionInstruction transitionInstruction = AVMutableVideoCompositionInstruction.Create() as AVMutableVideoCompositionInstruction;
                        transitionInstruction.TimeRange = transitionTimeRanges [i];
                        AVMutableVideoCompositionLayerInstruction fromLayer = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [alternatingIndex]);
                        AVMutableVideoCompositionLayerInstruction toLayer   = AVMutableVideoCompositionLayerInstruction.FromAssetTrack(compositionVideoTracks [1 - alternatingIndex]);
                        transitionInstruction.LayerInstructions = new AVVideoCompositionLayerInstruction[] {
                            fromLayer,
                            toLayer,
                        };
                        instructions.Add(transitionInstruction);
                    }
                }
            }

            videoComposition.Instructions = instructions.ToArray();
        }
		public AVAssetReaderVideoCompositionOutput FromTracks (AVAssetTrack [] videoTracks, AVVideoSettings videoSettings)
		{
			return WeakFromTracks (videoTracks, videoSettings == null ? null : videoSettings.ToDictionary ());
		}
		public AVAssetReaderVideoCompositionOutput (AVAssetTrack [] videoTracks, AVVideoSettings videoSettings)
		: this (videoTracks, videoSettings == null ? null : videoSettings.ToDictionary ())
		{
		}
예제 #20
0
		private void SetupAssetReaserWriterForVideo (AVAssetTrack videoTrack)
		{
			throw new NotImplementedException("64 bit");
			/*
			if (videoTrack == null)
				return;

			// Decompress to ARGB with the asset reader
			// TODO: https://trello.com/c/Kvzxo86W. Set kCVPixelBufferIOSurfacePropertiesKey to empty dictionary
			//														(id)kCVPixelBufferIOSurfacePropertiesKey : @{}
			var decompSettings = new AVVideoSettingsUncompressed {
				PixelFormatType = CVPixelFormatType.CV32BGRA,
				AllocateWithIOSurface = true
			};
			AVAssetReaderOutput output = new AVAssetReaderTrackOutput(videoTrack, decompSettings);
			_assetReader.AddOutput (output);

			// Get the format description of the track, to fill in attributes of the video stream that we don't want to change
			CMFormatDescription formatDescription = videoTrack.FormatDescriptions.FirstOrDefault ();

			// Grab track dimensions from format description
			SizeF trackDimensions = formatDescription != null
				? formatDescription.GetVideoPresentationDimensions (false, false)
				: videoTrack.NaturalSize;

			// Grab clean aperture, pixel aspect ratio from format description
			AVVideoCodecSettings compressionSettings = null;
			if (formatDescription != null) {
				var cleanApertureDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.CleanApertureKey);
				var pixelAspectRatioDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.PixelAspectRatioKey);
				compressionSettings = CreateCodecSettingsFor (cleanApertureDescr, pixelAspectRatioDescr);
			}

			// Compress to H.264 with the asset writer
			var videoSettings = new AVVideoSettingsCompressed {
				Codec = AVVideoCodec.H264,
				Width = (int)trackDimensions.Width,
				Height = (int)trackDimensions.Height,
				CodecSettings = compressionSettings
			};
			AVAssetWriterInput input = AVAssetWriterInput.FromType (videoTrack.MediaType, videoSettings.Dictionary);
			input.Transform = videoTrack.PreferredTransform;
			_assetWriter.AddInput (input);

			// Create and save an instance of AAPLRWSampleBufferChannel,
			// which will coordinate the work of reading and writing sample buffers
			_videoSampleBufferChannel = new ReadWriteSampleBufferChannel (output, input, true);
			*/
		}
예제 #21
0
		void SetupAssetReaserWriterForVideo (AVAssetTrack videoTrack)
		{
			if (videoTrack == null)
				return;

			// Decompress to ARGB with the asset reader
			var decompSettings = new AVVideoSettingsUncompressed {
				PixelFormatType = CVPixelFormatType.CV32BGRA,
				AllocateWithIOSurface = null
			};
			AVAssetReaderOutput output = new AVAssetReaderTrackOutput (videoTrack, decompSettings);
			assetReader.AddOutput (output);

			// Get the format description of the track, to fill in attributes of the video stream that we don't want to change
			var formatDescription = (CMVideoFormatDescription)videoTrack.FormatDescriptions.FirstOrDefault ();
			// Grab track dimensions from format description
			CGSize trackDimensions = formatDescription != null
				? formatDescription.GetPresentationDimensions (false, false)
				: videoTrack.NaturalSize;

			// Grab clean aperture, pixel aspect ratio from format description
			AVVideoCodecSettings compressionSettings = null;
			if (formatDescription != null) {
				var cleanApertureDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.CleanApertureKey);
				var pixelAspectRatioDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.PixelAspectRatioKey);
				compressionSettings = CreateCodecSettingsFor (cleanApertureDescr, pixelAspectRatioDescr);
			}

			// Compress to H.264 with the asset writer
			var videoSettings = new AVVideoSettingsCompressed {
				Codec = AVVideoCodec.H264,
				Width = (int)trackDimensions.Width,
				Height = (int)trackDimensions.Height,
				CodecSettings = compressionSettings
			};
			AVAssetWriterInput input = new AVAssetWriterInput (videoTrack.MediaType, videoSettings);
			input.Transform = videoTrack.PreferredTransform;
			assetWriter.AddInput (input);

			// Create and save an instance of ReadWriteSampleBufferChannel,
			// which will coordinate the work of reading and writing sample buffers
			videoSampleBufferChannel = new VideoChannel (output, input, transformer);
		}
예제 #22
0
		private void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack)
		{
			if (audioTrack == null)
				return;

			// Decompress to Linear PCM with the asset reader
			AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create (audioTrack, (AudioSettings)null);
			_assetReader.AddOutput (output);

			AVAssetWriterInput input = AVAssetWriterInput.Create (audioTrack.MediaType, (AudioSettings)null);
			_assetWriter.AddInput (input);

			// Create and save an instance of AAPLRWSampleBufferChannel,
			// which will coordinate the work of reading and writing sample buffers
			_audioSampleBufferChannel = new ReadWriteSampleBufferChannel (output, input, false);
		}