public void StartContentEditing(PHContentEditingInput contentEditingInput, UIImage placeholderImage)
 {
     // Present content for editing and keep the contentEditingInput for use when closing the edit session.
     // If you returned true from CanHandleAdjustmentData(), contentEditingInput has the original image and adjustment data.
     // If you returned false, the contentEditingInput has past edits "baked in".
     input = contentEditingInput;
 }
Exemplo n.º 2
0
        void ApplyVideoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // Load AVAsset to process from input.
            var avAsset = input.AudiovisualAsset;

            if (avAsset == null)
            {
                throw new InvalidProgramException("can't get AV asset to edit");
            }

            // Set up a video composition to apply the filter.
            var composition = AVVideoComposition.CreateVideoComposition(avAsset, request => {
                filter.Image = request.SourceImage;
                var filtered = filter.OutputImage;
                request.Finish(filtered, null);
            });

            // Export the video composition to the output URL.
            // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44443
            var export = new AVAssetExportSession(avAsset, AVAssetExportSession.PresetHighestQuality)
            {
                OutputFileType   = AVFileType.QuickTimeMovie,
                OutputUrl        = output.RenderedContentUrl,
                VideoComposition = composition
            };

            export.ExportAsynchronously(completion);
        }
 public void StartContentEditing(PHContentEditingInput contentEditingInput, UIImage placeholderImage)
 {
     // Present content for editing and keep the contentEditingInput for use when closing the edit session.
     // If you returned true from CanHandleAdjustmentData(), contentEditingInput has the original image and adjustment data.
     // If you returned false, the contentEditingInput has past edits "baked in".
     input = contentEditingInput;
 }
 public void Linker()
 {
     using (var cei = new PHContentEditingInput())
         using (var lpec = new PHLivePhotoEditingContext(cei)) {
             // not much but it means the linker cannot remove it
             Assert.Null(lpec.FrameProcessor2, "FrameProcessor2");
         }
 }
        void TryDisposeContentInput()
        {
            if (contentEditingInput == null)
            {
                return;
            }

            contentEditingInput.Dispose();
            contentEditingInput = null;
        }
        static string FetchAdjustmentFilterName(PHContentEditingInput contentEditingInput)
        {
            string filterName = null;

            try {
                PHAdjustmentData adjustmentData = contentEditingInput.AdjustmentData;
                if (adjustmentData != null)
                {
                    filterName = (NSString)NSKeyedUnarchiver.UnarchiveObject(adjustmentData.Data);
                }
            } catch (Exception exception) {
                Console.WriteLine("Exception decoding adjustment data: {0}", exception);
            }

            return(filterName);
        }
        public void StartContentEditing(PHContentEditingInput input, UIImage placeholderImage)
        {
            // Present content for editing and keep the contentEditingInput for use when closing the edit session.
            // If you returned true from CanHandleAdjustmentData(), contentEditingInput has the original image and adjustment data.
            // If you returned false, the contentEditingInput has past edits "baked in".
            contentEditingInput = input;

            // Load input image
            switch (contentEditingInput.MediaType)
            {
            case PHAssetMediaType.Image:
                inputImage = contentEditingInput.DisplaySizeImage;
                break;

            case PHAssetMediaType.Video:
                inputImage = ImageFor(contentEditingInput.AvAsset, 0);
                break;

            default:
                break;
            }

            // Load adjustment data, if any
            try {
                PHAdjustmentData adjustmentData = contentEditingInput.AdjustmentData;
                if (adjustmentData != null)
                {
                    selectedFilterName = (string)(NSString)NSKeyedUnarchiver.UnarchiveObject(adjustmentData.Data);
                }
            } catch (Exception exception) {
                Console.WriteLine("Exception decoding adjustment data: {0}", exception);
            }

            if (string.IsNullOrWhiteSpace(selectedFilterName))
            {
                selectedFilterName = "CISepiaTone";
            }

            initialFilterName = selectedFilterName;

            // Update filter and background image
            UpdateFilter();
            UpdateFilterPreview();
            BackgroundImageView.Image = placeholderImage;
        }
Exemplo n.º 8
0
        void ApplyPhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // Load the full size image.
            var inputImage = new CIImage(input.FullSizeImageUrl);

            // Apply the filter.
            filter.Image = inputImage.CreateWithOrientation(input.FullSizeImageOrientation);
            var outputImage = filter.OutputImage;

            // Write the edited image as a JPEG.
            // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44503
            NSError error;

            if (!ciContext.WriteJpegRepresentation(outputImage, output.RenderedContentUrl, inputImage.ColorSpace(), new NSDictionary(), out error))
            {
                throw new InvalidProgramException($"can't apply filter to image: {error.LocalizedDescription}");
            }

            completion();
        }
Exemplo n.º 9
0
        public unsafe void FrameProcessingBlock2()
        {
            using (var cei = new PHContentEditingInput())
                using (var lpec = new PHLivePhotoEditingContext(cei)) {
                    // not much but it means the linker cannot remove it
                    Assert.Null(lpec.FrameProcessor2, "FrameProcessor2");
                }

            var t = typeof(NSObject).Assembly.GetType("ObjCRuntime.Trampolines/SDPHLivePhotoFrameProcessingBlock2");

            Assert.NotNull(t, "SDPHLivePhotoFrameProcessingBlock2");

            var m = t.GetMethod("Invoke", BindingFlags.Static | BindingFlags.NonPublic);

            Assert.NotNull(m, "Invoke");
            var d = m.CreateDelegate(typeof(DPHLivePhotoFrameProcessingBlock2));

            Action userDelegate = new Action(() => Console.WriteLine("Hello world!"));

            BlockLiteral bl = new BlockLiteral();

            bl.SetupBlock(d, managed);
            try {
                var block = &bl;
                var b     = (IntPtr)block;

                // simulate a call that does not produce an error
                var args = new object [] { b, IntPtr.Zero, IntPtr.Zero };
                error_faker = null;
                Assert.That(m.Invoke(null, args), Is.EqualTo(IntPtr.Zero), "1");

                // simulate a call that does produce an error
                error_faker = new NSError((NSString)"domain", 42);
                Assert.That(m.Invoke(null, args), Is.EqualTo(IntPtr.Zero), "2");
                Assert.That(args [2], Is.EqualTo(error_faker.Handle), "error");
            }
            finally {
                bl.CleanupBlock();
            }
        }
Exemplo n.º 10
0
        void ApplyLivePhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // This app filters assets only for output. In an app that previews
            // filters while editing, create a livePhotoContext early and reuse it
            // to render both for previewing and for final output.
            var livePhotoContext = new PHLivePhotoEditingContext(input);

            livePhotoContext.FrameProcessor = (frame, _) => {
                filter.Image = frame.Image;
                return(filter.OutputImage);
            };
            livePhotoContext.SaveLivePhoto(output, null, (success, error) => {
                if (success)
                {
                    completion();
                }
                else
                {
                    Console.WriteLine("can't output live photo");
                }
            });
        }
        public void StartContentEditing(PHContentEditingInput input, UIImage placeholderImage)
        {
            // Present content for editing and keep the contentEditingInput for use when closing the edit session.
            // If you returned true from CanHandleAdjustmentData(), contentEditingInput has the original image and adjustment data.
            // If you returned false, the contentEditingInput has past edits "baked in".
            contentEditingInput = input;

            // Load input image
            switch (contentEditingInput.MediaType)
            {
            case PHAssetMediaType.Image:
                inputImage = contentEditingInput.DisplaySizeImage;
                break;

            case PHAssetMediaType.Video:
                inputImage = ImageFor(contentEditingInput.AvAsset, 0);
                break;

            default:
                break;
            }

            // Load adjustment data, if any
            selectedFilterName = FetchAdjustmentFilterName(contentEditingInput);
            if (string.IsNullOrWhiteSpace(selectedFilterName))
            {
                selectedFilterName = "CISepiaTone";
            }

            initialFilterName = selectedFilterName;

            // Update filter and background image
            UpdateFilter();
            UpdateFilterPreview();
            BackgroundImageView.Image = placeholderImage;
        }
 public void StartContentEditing(PHContentEditingInput contentEditingInput, UIImage placeholderImage)
 {
     input = contentEditingInput;
 }
 public void StartContentEditing(PHContentEditingInput contentEditingInput, UIImage placeholderImage)
 {
     input = contentEditingInput;
 }
        void TryDisposeContentInput()
        {
            if (contentEditingInput == null)
                return;

            contentEditingInput.Dispose ();
            contentEditingInput = null;
        }
        static string FetchAdjustmentFilterName(PHContentEditingInput contentEditingInput)
        {
            string filterName = null;

            try {
                PHAdjustmentData adjustmentData = contentEditingInput.AdjustmentData;
                if (adjustmentData != null)
                    filterName = (NSString)NSKeyedUnarchiver.UnarchiveObject (adjustmentData.Data);
            } catch (Exception exception) {
                Console.WriteLine ("Exception decoding adjustment data: {0}", exception);
            }

            return filterName;
        }
        public void StartContentEditing(PHContentEditingInput input, UIImage placeholderImage)
        {
            // Present content for editing and keep the contentEditingInput for use when closing the edit session.
            // If you returned true from CanHandleAdjustmentData(), contentEditingInput has the original image and adjustment data.
            // If you returned false, the contentEditingInput has past edits "baked in".
            contentEditingInput = input;

            // Load input image
            switch (contentEditingInput.MediaType) {
                case PHAssetMediaType.Image:
                    inputImage = contentEditingInput.DisplaySizeImage;
                    break;

                case PHAssetMediaType.Video:
                    inputImage = ImageFor (contentEditingInput.AvAsset, 0);
                    break;

                default:
                    break;
            }

            // Load adjustment data, if any
            selectedFilterName = FetchAdjustmentFilterName (contentEditingInput);
            if (string.IsNullOrWhiteSpace (selectedFilterName))
                selectedFilterName = "CISepiaTone";

            initialFilterName = selectedFilterName;

            // Update filter and background image
            UpdateFilter ();
            UpdateFilterPreview ();
            BackgroundImageView.Image = placeholderImage;
        }
		public void StartContentEditing (PHContentEditingInput input, UIImage placeholderImage)
		{
			// Present content for editing and keep the contentEditingInput for use when closing the edit session.
			// If you returned true from CanHandleAdjustmentData(), contentEditingInput has the original image and adjustment data.
			// If you returned false, the contentEditingInput has past edits "baked in".
			contentEditingInput = input;

			// Load input image
			switch (contentEditingInput.MediaType) {
			case PHAssetMediaType.Image:
				inputImage = contentEditingInput.DisplaySizeImage;
				break;

			case PHAssetMediaType.Video:
				inputImage = ImageFor (contentEditingInput.AvAsset, 0);
				break;

			default:
				break;
			}

			// Load adjustment data, if any
			try {
				PHAdjustmentData adjustmentData = contentEditingInput.AdjustmentData;
				if (adjustmentData != null)
					selectedFilterName = (string)(NSString)NSKeyedUnarchiver.UnarchiveObject (adjustmentData.Data);
			} catch (Exception exception) {
				Console.WriteLine ("Exception decoding adjustment data: {0}", exception);
			}

			if (string.IsNullOrWhiteSpace (selectedFilterName))
				selectedFilterName = "CISepiaTone";

			initialFilterName = selectedFilterName;

			// Update filter and background image
			UpdateFilter ();
			UpdateFilterPreview ();
			BackgroundImageView.Image = placeholderImage;
		}
		void ApplyPhotoFilter (CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
		{
			// Load the full size image.
			var inputImage = new CIImage (input.FullSizeImageUrl);

			// Apply the filter.
			filter.Image = inputImage.CreateWithOrientation (input.FullSizeImageOrientation);
			var outputImage = filter.OutputImage;

			// Write the edited image as a JPEG.
			// TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44503
			NSError error;
			if (!ciContext.WriteJpegRepresentation (outputImage, output.RenderedContentUrl, inputImage.ColorSpace (), new NSDictionary(), out error))
				throw new InvalidProgramException ($"can't apply filter to image: {error.LocalizedDescription}");

			completion ();
		}
		void ApplyLivePhotoFilter (CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
		{
			// This app filters assets only for output. In an app that previews
			// filters while editing, create a livePhotoContext early and reuse it
			// to render both for previewing and for final output.
			var livePhotoContext = new PHLivePhotoEditingContext (input);

			livePhotoContext.FrameProcessor = (frame, _) => {
				filter.Image = frame.Image;
				return filter.OutputImage;
			};
			livePhotoContext.SaveLivePhoto (output, null, (success, error) => {
				if (success)
					completion ();
				else
					Console.WriteLine ("can't output live photo");
			});
		}
		void ApplyVideoFilter (CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
		{
			// Load AVAsset to process from input.
			var avAsset = input.AudiovisualAsset;
			if (avAsset == null)
				throw new InvalidProgramException ("can't get AV asset to edit");

			// Set up a video composition to apply the filter.
			var composition = AVVideoComposition.CreateVideoComposition (avAsset, request => {
				filter.Image = request.SourceImage;
				var filtered = filter.OutputImage;
				request.Finish (filtered, null);
			});

			// Export the video composition to the output URL.
			// TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44443
			var export = new AVAssetExportSession (avAsset, AVAssetExportSession.PresetHighestQuality) {
				OutputFileType = AVFileType.QuickTimeMovie,
				OutputUrl = output.RenderedContentUrl,
				VideoComposition = composition
			};
			export.ExportAsynchronously (completion);
		}