public MemoryStream CompressVideo( string sourceFilePath, string destinationFilePath, bool deleteSourceFile ) { try { string downloadPath = Environment.GetFolderPath(Environment.SpecialFolder.Personal); string fileName = Path.GetFileNameWithoutExtension( sourceFilePath ) + ".mp4"; string downloadFilePath = Path.Combine(downloadPath, fileName ); var asset = AVAsset.FromUrl( NSUrl.FromFilename( sourceFilePath ) ); AVAssetExportSession export = new AVAssetExportSession (asset, AVAssetExportSession.PresetLowQuality ); export.OutputUrl = NSUrl.FromFilename( downloadFilePath ); export.OutputFileType = AVFileType.Mpeg4; export.ShouldOptimizeForNetworkUse = true; export.ExportTaskAsync().Wait(); MemoryStream ms = new MemoryStream(); FileStream file = new FileStream( downloadFilePath, FileMode.Open, FileAccess.Read); file.CopyTo ( ms ); file.Close(); return ms; } catch (Exception ex) { System.Diagnostics.Debug.WriteLine ( ex.Message ); return null; } }
/// <summary> /// Compress the video /// </summary> /// <param name="inputPath">Arquivo de Origem</param> /// <param name="outputPath">Arquivo de Destino</param> /// <returns></returns> public async Task CompressVideo(string inputPath, string outputPath, int bitrateMode = 10) { AVAssetExportSessionPreset quality = AVAssetExportSessionPreset.Preset1280x720; float bitrate = 0; //Delete compress video if exist if (File.Exists(outputPath)) { File.Delete(outputPath); } //Buscar o bitrate do video try { NSUrl source = NSUrl.FromFilename(inputPath); var videoAsset = new AVUrlAsset(source); var videoTrack = videoAsset.Tracks.First(x => x.MediaType == AVMediaType.Video); bitrate = videoTrack.EstimatedDataRate; bitrate /= 1024; } catch { } //Define a qualidade bitrateMode = bitrateMode == 10 ? bitrateMode10 : bitrateMode2; if (bitrate > 0 && bitrate > bitrateMode) { float reduce = (float)bitrate / (float)bitrateMode; if (reduce > 6) { quality = AVAssetExportSessionPreset.LowQuality; } else if (reduce > 1.1) { quality = AVAssetExportSessionPreset.MediumQuality; } } //Comprime o vídeo try { var asset = AVAsset.FromUrl(NSUrl.FromFilename(inputPath)); AVAssetExportSession export = new AVAssetExportSession(asset, quality); export.OutputUrl = NSUrl.FromFilename(outputPath); export.OutputFileType = AVFileType.Mpeg4; export.ShouldOptimizeForNetworkUse = true; await RunExportAsync(export); } catch { System.Diagnostics.Debug.WriteLine("Erro ao comprimir video"); } return; }
private void UpdateProgress(AVAssetExportSession session) { if (session?.Status == AVAssetExportSessionStatus.Exporting) { this.exportProgressView.Progress = session.Progress; } }
public Task <bool> CompressVideo(string inputPath, string outputPath) { var task = new TaskCompletionSource <bool>(); NSString urlString = new NSString(inputPath); NSUrl myFileUrl = new NSUrl(urlString); var export = new AVAssetExportSession(AVAsset.FromUrl(myFileUrl), AVAssetExportSession.PresetMediumQuality); string videoFilename = outputPath; export.OutputUrl = NSUrl.FromFilename(videoFilename); export.OutputFileType = AVFileType.Mpeg4; export.ShouldOptimizeForNetworkUse = true; export.ExportAsynchronously(() => { if (export.Status == AVAssetExportSessionStatus.Completed) { var videoData = NSData.FromUrl(NSUrl.FromString(export.OutputUrl.ToString())); NSError err = null; if (videoData.Save(videoFilename, false, out err)) { task.SetResult(true); } else { task.SetResult(true); } } else { task.SetResult(false); } }); return(task.Task); }
private static void _OnExportDone() { var oldExporter = _exporter; _exporter = null; _avAsset = null; if (oldExporter.status == AVAssetExportSessionStatus.Completed) { if (_tmpPath != _outputPath) { if (File.Exists(_outputPath)) { File.Delete(_outputPath); } File.Move(_tmpPath, _outputPath); } if (_exportCompletedHandlers != null) { _exportCompletedHandlers(null, new MediaExportedEventArgs(_outputPath)); } } else { if (_exportFailedHandlers != null) { _exportFailedHandlers(null, new U3DXTErrorEventArgs(oldExporter.error)); } } }
public AVAssetExportSession AssetExportSession(string presetName) { var session = new AVAssetExportSession(Composition, presetName); session.VideoComposition = VideoComposition; return(session); }
void ApplyVideoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion) { // Load AVAsset to process from input. var avAsset = input.AudiovisualAsset; if (avAsset == null) { throw new InvalidProgramException("can't get AV asset to edit"); } // Set up a video composition to apply the filter. var composition = AVVideoComposition.CreateVideoComposition(avAsset, request => { filter.Image = request.SourceImage; var filtered = filter.OutputImage; request.Finish(filtered, null); }); // Export the video composition to the output URL. // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44443 var export = new AVAssetExportSession(avAsset, AVAssetExportSession.PresetHighestQuality) { OutputFileType = AVFileType.QuickTimeMovie, OutputUrl = output.RenderedContentUrl, VideoComposition = composition }; export.ExportAsynchronously(completion); }
void exportCompleted(AVAssetExportSession session) { exportProgressView.Hidden = true; currentTimeLabel.Hidden = false; var outputUrl = session.OutputUrl; progressTimer.Invalidate(); progressTimer = null; if (session.Status != AVAssetExportSessionStatus.Completed) { Console.WriteLine("exportSession error:{0}", session.Error.LocalizedDescription); reportError(session.Error); return; } exportProgressView.Progress = 1f; var library = new ALAssetsLibrary(); library.WriteVideoToSavedPhotosAlbum(outputUrl, (assetURL, error) => { if (error != null) { Console.WriteLine("writeVideoToAssetsLibrary failed: {0}", error.LocalizedDescription); reportError(error); } }); Player.Play(); playPauseButton.Enabled = true; transitionButton.Enabled = true; scrubber.Enabled = true; exportButton.Enabled = true; }
void HandleVideo(MediaFile file) { if (file == null) { return; } var mediaUrl = file.Path; var documentsDirectory = Environment.GetFolderPath(Environment.SpecialFolder.Personal); var outputFile = Path.Combine(documentsDirectory, Guid.NewGuid().ToString() + ".mp4"); AVUrlAsset asset = new AVUrlAsset(NSUrl.CreateFileUrl(new string[] { mediaUrl })); AVAssetExportSession exportSession = new AVAssetExportSession(asset, AVAssetExportSession.Preset1280x720); var fileUrl = NSUrl.CreateFileUrl(new string[] { outputFile }); exportSession.OutputUrl = NSUrl.CreateFileUrl(new string[] { outputFile }); exportSession.OutputFileType = AVFileType.Mpeg4; LoadingScreen.Show(); LoadingScreen.SetText("Converting"); exportSession.ExportAsynchronously(() => { InvokeOnMainThread(() => { if (exportSession.Error != null) { int i = 3; } AVUrlAsset asset2 = new AVUrlAsset(NSUrl.CreateFileUrl(new string[] { mediaUrl })); AVAssetImageGenerator generator = new AVAssetImageGenerator(asset2); generator.AppliesPreferredTrackTransform = true; var thumbTime = new CMTime(0, 30); NSValue[] vals = new NSValue[] { NSValue.FromCMTime(thumbTime) }; CGSize maxSize = new CGSize(800, 600); //generator.MaximumSize = maxSize; generator.GenerateCGImagesAsynchronously(vals, (requestedTime, imageRef, actualTime, result, error) => { var previewImage = System.IO.Path.Combine(documentsDirectory, Guid.NewGuid() + ".jpg"); NSError err; UIImage.FromImage(new CGImage(imageRef)).AsJPEG(.75f).Save(previewImage, false, out err); InvokeOnMainThread(() => { LoadingScreen.Hide(); VideoPicked?.Invoke(outputFile, previewImage); }); }); }); }); }
/// <summary> /// Exports the audio of a media item to storage. /// </summary> /// <returns><c>true</c> if audio started exporting, <c>false</c> if it is exporting another audio or the media item has DRM.</returns> /// <param name="mediaItem">Media item.</param> /// <param name="outputFolder">Absolute output folder or specify null to use Documents folder.</param> /// <param name="outputFile">Output file name or specify null to use <c>[artist] - [title].[extension]</c>.</param> /// <param name="overwrite">Whether to overwrite the output file.</param> public static bool ExportAudio(MPMediaItem mediaItem, string outputFolder = null, string outputFile = null, bool overwrite = false) { if (_exporter != null) { return(false); } if (mediaItem == null) { return(false); } if (outputFolder == null) { outputFolder = Application.persistentDataPath; } NSURL assetURL = mediaItem.Value(MPMediaItem.PropertyAssetURL) as NSURL; if (assetURL == null) { return(false); } if (outputFile == null) { string artist = mediaItem.Value(MPMediaItem.PropertyArtist) as string; string title = mediaItem.Value(MPMediaItem.PropertyTitle) as string; string extension = Path.GetExtension(assetURL.AbsoluteString().Split('?')[0]); outputFile = artist + " - " + title + extension; } _outputPath = outputFolder + "/" + outputFile; if (!overwrite && File.Exists(_outputPath)) { return(false); } _avAsset = AVURLAsset.Asset(assetURL); _exporter = new AVAssetExportSession(_avAsset, AVAssetExportSession.AVAssetExportPresetPassthrough); _exporter.outputFileType = "com.apple.quicktime-movie"; _exporter.shouldOptimizeForNetworkUse = true; string tmpExt = UTType.CopyPreferredTag(_exporter.outputFileType, UTType.kUTTagClassFilenameExtension); _tmpPath = Application.persistentDataPath + "/" + UUID.Generate() + "." + tmpExt; _exporter.outputURL = new NSURL(_tmpPath, false); _exporter.ExportAsynchronously(_OnExportDone); return(true); }
public async Task <Stream> ConvertToMP4(string videoLocation) { string finishedPath = Environment.GetFolderPath(Environment.SpecialFolder.Personal); string finishedFilePath = Path.Combine(finishedPath, $"{Guid.NewGuid()}.mp4"); var asset = AVAsset.FromUrl(NSUrl.FromFilename(videoLocation)); AVAssetExportSession exportSession = new AVAssetExportSession(asset, AVAssetExportSession.PresetLowQuality); exportSession.OutputUrl = NSUrl.FromFilename(finishedFilePath); exportSession.OutputFileType = AVFileType.Mpeg4; exportSession.ShouldOptimizeForNetworkUse = true; await exportSession.ExportTaskAsync(); return(File.Open(finishedFilePath, FileMode.Open)); }
private async Task RunExportAsync(AVAssetExportSession exp) { await exp.ExportTaskAsync(); if (exp.Status == AVAssetExportSessionStatus.Completed) { Success(this, true); } else if (exp.Status == AVAssetExportSessionStatus.Failed) { Fail(this, exp.Error.Description); } else { Fail(this, "Fail to compress video. Error unknown"); } }
private void OnExportCompleted(AVAssetExportSession session) { this.exportProgressView.Hidden = true; this.currentTimeLabel.Hidden = false; var outputURL = session.OutputUrl; this.progressTimer.Invalidate(); this.progressTimer.Dispose(); this.progressTimer = null; if (session.Status != AVAssetExportSessionStatus.Completed) { Console.WriteLine($"exportSession error:{session.Error}"); this.ReportError(session.Error); } else { this.exportProgressView.Progress = 1f; // Save the exported movie to the camera roll PHPhotoLibrary.RequestAuthorization((status) => { if (status == PHAuthorizationStatus.Authorized) { PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => PHAssetChangeRequest.FromVideo(outputURL), (successfully, error) => { if (error != null) { Console.WriteLine($"writeVideoToAssestsLibrary failed: {error}"); this.ReportError(error); } base.InvokeOnMainThread(() => { this.playPauseButton.Enabled = true; this.transitionButton.Enabled = true; this.scrubber.Enabled = true; this.exportButton.Enabled = true; }); }); } }); } }
public static async Task <NSUrl> CropVideoAsync(NSUrl url, nfloat startTime, nfloat durationTime) { // get output url var outputURL = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments); if (!Directory.Exists(outputURL)) { Directory.CreateDirectory(outputURL); } outputURL = Path.Combine(outputURL, "output.mp4"); try { File.Delete(outputURL); } catch { Debug.WriteLine("Export failed to remove destination file"); return(null); } // set up for export var asset = new AVUrlAsset(url, (AVUrlAssetOptions)null); var exportSession = new AVAssetExportSession(asset, AVAssetExportSession.PresetHighestQuality); exportSession.OutputUrl = new NSUrl(outputURL, false); exportSession.ShouldOptimizeForNetworkUse = true; exportSession.OutputFileType = AVFileType.Mpeg4; exportSession.TimeRange = new CMTimeRange { Start = CMTime.FromSeconds(startTime, 600), Duration = CMTime.FromSeconds(durationTime, 600) }; // export await exportSession.ExportTaskAsync(); if (exportSession.Status != AVAssetExportSessionStatus.Completed) { Debug.WriteLine("Export failed: " + exportSession.Status); return(null); } return(exportSession.OutputUrl); }
void GetAssetFromUrl(Foundation.NSUrl url) { try { var inputPath = url; var outputPath = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) + "/output.mp3"; var outputURL = new NSUrl(outputPath); NSData data = NSData.FromUrl(outputURL); //compress the video file var asset = new AVUrlAsset(inputPath, (AVUrlAssetOptions)null); var exportSession = AVAssetExportSession.FromAsset(asset, "AVAssetExportPresetLowQuality"); exportSession.OutputUrl = outputURL; exportSession.OutputFileType = AVFileType.CoreAudioFormat; exportSession.ExportAsynchronously(() => { Console.WriteLine(exportSession.Status); //prints status "Failed".... exportSession.Dispose(); }); //var asset = new ALAssetsLibrary(); //UIImage image; //asset.AssetForUrl( // url, // (ALAsset obj) => // { // var assetRep = obj.DefaultRepresentation; // var filename = assetRep.Filename; // }, // (NSError err) => // { // Console.WriteLine(err); // } // ); } catch (Exception ex) { } }
public async Task <OperationResult> TrimVideo(string sourcePath, string destinationPath, double startTime, double endTime) { if (string.IsNullOrEmpty(sourcePath) || !File.Exists(sourcePath)) { return(OperationResult.AsFailure("Invalid video file path specified")); } var url = NSUrl.CreateFileUrl(sourcePath, false, null); var asset = AVAsset.FromUrl(url); var session = new AVAssetExportSession(asset, AVAssetExportSession.PresetPassthrough); session.OutputUrl = NSUrl.FromFilename(destinationPath); session.OutputFileType = AVFileType.Mpeg4; var cmStartTime = CMTime.FromSeconds(startTime, asset.Duration.TimeScale); var duration = CMTime.FromSeconds(endTime - startTime, asset.Duration.TimeScale); var range = new CMTimeRange(); range.Start = cmStartTime; range.Duration = duration; session.TimeRange = range; await session.ExportTaskAsync(); if (session.Status == AVAssetExportSessionStatus.Cancelled) { return(OperationResult.AsCancel()); } else if (session.Status == AVAssetExportSessionStatus.Failed) { return(OperationResult.AsFailure(session.Error.LocalizedDescription)); } else { return(OperationResult.AsSuccess()); } }
private void compressVideo(NSUrl inputURL, NSUrl outputURL) { NSUrl url = inputURL; var urlAsset = new AVUrlAsset(inputURL); AVAssetExportSession exportSession = new AVAssetExportSession(urlAsset, AVAssetExportSessionPreset.MediumQuality); exportSession.OutputUrl = outputURL; exportSession.OutputFileType = AVFileType.QuickTimeMovie; exportSession.ShouldOptimizeForNetworkUse = true; exportSession.ExportAsynchronously(() => { NSData data = NSData.FromUrl(outputURL); byte[] dataBytes = new byte[data.Length]; System.Runtime.InteropServices.Marshal.Copy(data.Bytes, dataBytes, 0, Convert.ToInt32(data.Length)); UIApplication.SharedApplication.InvokeOnMainThread(delegate { (Element as CustomVideoCamera).SetPhotoResult(outputURL.ToString(), dataBytes, 0, 0); activityIndicator.StopAnimating(); }); }); }
void exportCompleted (AVAssetExportSession session) { exportProgressView.Hidden = true; currentTimeLabel.Hidden = false; var outputUrl = session.OutputUrl; progressTimer.Invalidate (); progressTimer = null; if (session.Status != AVAssetExportSessionStatus.Completed) { Console.WriteLine ("exportSession error:{0}", session.Error.LocalizedDescription); reportError (session.Error); return; } exportProgressView.Progress = 1f; var library = new ALAssetsLibrary (); library.WriteVideoToSavedPhotosAlbum (outputUrl, (assetURL, error) => { if (error != null) { Console.WriteLine ("writeVideoToAssetsLibrary failed: {0}", error.LocalizedDescription); reportError (error); } }); Player.Play (); playPauseButton.Enabled = true; transitionButton.Enabled = true; scrubber.Enabled = true; exportButton.Enabled = true; }
async void FinishedPickingAssets(object sender, MultiAssetEventArgs args) { IList <MediaFile> results = new List <MediaFile>(); TaskCompletionSource <IList <MediaFile> > tcs = new TaskCompletionSource <IList <MediaFile> >(); var options = new Photos.PHImageRequestOptions() { NetworkAccessAllowed = true }; options.Synchronous = false; options.ResizeMode = PHImageRequestOptionsResizeMode.Fast; options.DeliveryMode = PHImageRequestOptionsDeliveryMode.HighQualityFormat; bool completed = false; for (var i = 0; i < args.Assets.Length; i++) { var asset = args.Assets[i]; string fileName = string.Empty; if (UIDevice.CurrentDevice.CheckSystemVersion(9, 0)) { fileName = PHAssetResource.GetAssetResources(asset).FirstOrDefault().OriginalFilename; } switch (asset.MediaType) { case PHAssetMediaType.Video: PHImageManager.DefaultManager.RequestImageForAsset(asset, new SizeF(150.0f, 150.0f), PHImageContentMode.AspectFill, options, async(img, info) => { var startIndex = fileName.IndexOf(".", StringComparison.CurrentCulture); string path = ""; if (startIndex != -1) { path = FileHelper.GetOutputPath(MediaFileType.Image, TemporalDirectoryName, $"{fileName.Substring(0, startIndex)}-THUMBNAIL.JPG"); } else { path = FileHelper.GetOutputPath(MediaFileType.Image, TemporalDirectoryName, string.Empty); } if (!File.Exists(path)) { img.AsJPEG().Save(path, true); } TaskCompletionSource <string> tvcs = new TaskCompletionSource <string>(); var vOptions = new PHVideoRequestOptions(); vOptions.NetworkAccessAllowed = true; vOptions.Version = PHVideoRequestOptionsVersion.Original; vOptions.DeliveryMode = PHVideoRequestOptionsDeliveryMode.FastFormat; PHImageManager.DefaultManager.RequestAvAsset(asset, vOptions, (avAsset, audioMix, vInfo) => { var vPath = FileHelper.GetOutputPath(MediaFileType.Video, TemporalDirectoryName, fileName); if (!File.Exists(vPath)) { AVAssetExportSession exportSession = new AVAssetExportSession(avAsset, AVAssetExportSession.PresetHighestQuality); exportSession.OutputUrl = NSUrl.FromFilename(vPath); exportSession.OutputFileType = AVFileType.QuickTimeMovie; exportSession.ExportAsynchronously(() => { Console.WriteLine(exportSession.Status); tvcs.TrySetResult(vPath); //exportSession.Dispose(); }); } }); var videoUrl = await tvcs.Task; var meFile = new MediaFile() { Type = MediaFileType.Video, Path = videoUrl, PreviewPath = path }; results.Add(meFile); OnMediaPicked?.Invoke(this, meFile); if (args.Assets.Length == results.Count && !completed) { completed = true; tcs.TrySetResult(results); } }); break; default: Photos.PHImageManager.DefaultManager.RequestImageData(asset, options, (data, dataUti, orientation, info) => { string path = FileHelper.GetOutputPath(MediaFileType.Image, TemporalDirectoryName, fileName); if (!File.Exists(path)) { Debug.WriteLine(dataUti); var imageData = data; //var image = UIImage.LoadFromData(imageData); //if (imageScale < 1.0f) //{ // //begin resizing image // image = image.ResizeImageWithAspectRatio((float)imageScale); //} //if (imageQuality < 100) //{ // imageData = image.AsJPEG(Math.Min(imageQuality,100)); //} imageData?.Save(path, true); } var meFile = new MediaFile() { Type = MediaFileType.Image, Path = path, PreviewPath = path }; results.Add(meFile); OnMediaPicked?.Invoke(this, meFile); if (args.Assets.Length == results.Count && !completed) { completed = true; tcs.TrySetResult(results); } }); break; } } mediaPickTcs?.TrySetResult(await tcs.Task); }
private void OnAudioPicked(object sender, ItemsPickedEventArgs e) { var media = new List <AttachmentMediaFile>(); var picker = sender as MPMediaPickerController; picker.ItemsPicked -= OnAudioPicked; picker.DismissViewController(true, null); if (e.MediaItemCollection.Items != null) { foreach (var item in e.MediaItemCollection.Items) { if (!item.IsCloudItem) { try { MPMediaItem song; song = item; NSUrl assetURL = song.AssetURL; var songAsset = new AVUrlAsset(assetURL); AVAssetExportSession exporter = new AVAssetExportSession(songAsset, AVAssetExportSessionPreset.AppleM4A); exporter.OutputFileType = @"com.apple.m4a-audio"; var documents = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments); var exportFile = documents + "/exported.m4a"; if (File.Exists(exportFile)) { File.Delete(exportFile); } var exportURL = NSUrl.CreateFileUrl(new string[] { exportFile }); exporter.OutputUrl = exportURL; exporter.ExportAsynchronously(() => { Console.WriteLine("#####################" + exporter.Status); //prints status "Failed".... var status = exporter.Status; switch (status) { case AVAssetExportSessionStatus.Failed: var err = exporter.Error; Console.WriteLine(err); break; case AVAssetExportSessionStatus.Completed: Debug.WriteLine("##########***************"); var data = NSData.FromUrl(exportURL); var array = ToByte(data); MessagingCenter.Send <string, byte[]>(item.Title, "NotificationRecieved", array); break; } exporter.Dispose(); }); media.Add(new AttachmentMediaFile(item.AssetURL.AbsoluteString, AttachmentMediaFileType.Audio, null, item.Title)); } catch (Exception ex) { // throw ; } } } } _audioPickedTask.TrySetResult(media); }
private static void _OnExportDone() { var oldExporter = _exporter; _exporter = null; _avAsset = null; if (oldExporter.status == AVAssetExportSessionStatus.Completed) { if (_tmpPath != _outputPath) { if (File.Exists(_outputPath)) File.Delete(_outputPath); File.Move(_tmpPath, _outputPath); } if (_exportCompletedHandlers != null) _exportCompletedHandlers(null, new MediaExportedEventArgs(_outputPath)); } else { if (_exportFailedHandlers != null) _exportFailedHandlers(null, new U3DXTErrorEventArgs(oldExporter.error)); } }
/// <summary> /// Exports the audio of a media item to storage. /// </summary> /// <returns><c>true</c> if audio started exporting, <c>false</c> if it is exporting another audio or the media item has DRM.</returns> /// <param name="mediaItem">Media item.</param> /// <param name="outputFolder">Absolute output folder or specify null to use Documents folder.</param> /// <param name="outputFile">Output file name or specify null to use <c>[artist] - [title].[extension]</c>.</param> /// <param name="overwrite">Whether to overwrite the output file.</param> public static bool ExportAudio(MPMediaItem mediaItem, string outputFolder = null, string outputFile = null, bool overwrite = false) { if (_exporter != null) return false; if (mediaItem == null) return false; if (outputFolder == null) outputFolder = Application.persistentDataPath; NSURL assetURL = mediaItem.Value(MPMediaItem.PropertyAssetURL) as NSURL; if (assetURL == null) return false; if (outputFile == null) { string artist = mediaItem.Value(MPMediaItem.PropertyArtist) as string; string title = mediaItem.Value(MPMediaItem.PropertyTitle) as string; string extension = Path.GetExtension(assetURL.AbsoluteString().Split('?')[0]); outputFile = artist + " - " + title + extension; } _outputPath = outputFolder + "/" + outputFile; if (!overwrite && File.Exists(_outputPath)) return false; _avAsset = AVURLAsset.Asset(assetURL); _exporter = new AVAssetExportSession(_avAsset, AVAssetExportSession.AVAssetExportPresetPassthrough); _exporter.outputFileType = "com.apple.quicktime-movie"; _exporter.shouldOptimizeForNetworkUse = true; string tmpExt = UTType.CopyPreferredTag(_exporter.outputFileType, UTType.kUTTagClassFilenameExtension); _tmpPath = Application.persistentDataPath + "/" + UUID.Generate() + "." + tmpExt; _exporter.outputURL = new NSURL(_tmpPath, false); _exporter.ExportAsynchronously(_OnExportDone); return true; }
void ApplyVideoFilter (CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion) { // Load AVAsset to process from input. var avAsset = input.AudiovisualAsset; if (avAsset == null) throw new InvalidProgramException ("can't get AV asset to edit"); // Set up a video composition to apply the filter. var composition = AVVideoComposition.CreateVideoComposition (avAsset, request => { filter.Image = request.SourceImage; var filtered = filter.OutputImage; request.Finish (filtered, null); }); // Export the video composition to the output URL. // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44443 var export = new AVAssetExportSession (avAsset, AVAssetExportSession.PresetHighestQuality) { OutputFileType = AVFileType.QuickTimeMovie, OutputUrl = output.RenderedContentUrl, VideoComposition = composition }; export.ExportAsynchronously (completion); }
void UpdateToolbars() { // Enable editing buttons if the asset can be edited. EditButton.Enabled = Asset.CanPerformEditOperation(PHAssetEditOperation.Content); FavoriteButton.Enabled = Asset.CanPerformEditOperation(PHAssetEditOperation.Properties); FavoriteButton.Title = Asset.Favorite ? "♥︎" : "♡"; // Enable the trash button if the asset can be deleted. if (AssetCollection != null) { TrashButton.Enabled = AssetCollection.CanPerformEditOperation(PHCollectionEditOperation.RemoveContent); } else { TrashButton.Enabled = Asset.CanPerformEditOperation(PHAssetEditOperation.Delete); } // Set the appropriate toolbarItems based on the mediaType of the asset. if (Asset.MediaType == PHAssetMediaType.Video) { #if __TVOS__ NavigationItem.LeftBarButtonItems = new UIBarButtonItem[] { PlayButton, FavoriteButton, TrashButton }; #elif __IOS__ ToolbarItems = new UIBarButtonItem[] { FavoriteButton, Space, PlayButton, Space, TrashButton }; if (NavigationController != null) { NavigationController.ToolbarHidden = false; } #endif } else { #if __TVOS__ // In tvOS, PHLivePhotoView doesn't do playback gestures, // so add a play button for Live Photos. if (Asset.PlaybackStyle == PHAssetPlaybackStyle.LivePhoto) { NavigationItem.LeftBarButtonItems = new UIBarButtonItem[] { LivePhotoPlayButton, FavoriteButton, TrashButton } } ; else { NavigationItem.LeftBarButtonItems = new UIBarButtonItem[] { FavoriteButton, TrashButton } }; #elif __IOS__ // In iOS, present both stills and Live Photos the same way, because // PHLivePhotoView provides the same gesture-based UI as in Photos app. ToolbarItems = new UIBarButtonItem[] { FavoriteButton, Space, TrashButton }; if (NavigationController != null) { NavigationController.ToolbarHidden = false; } #endif } } void UpdateStillImage() { // Prepare the options to pass when fetching the (photo, or video preview) image. var options = new PHImageRequestOptions { DeliveryMode = PHImageRequestOptionsDeliveryMode.HighQualityFormat, NetworkAccessAllowed = true, ProgressHandler = (double progress, NSError error, out bool stop, NSDictionary info) => { stop = false; // Handler might not be called on the main queue, so re-dispatch for UI work. DispatchQueue.MainQueue.DispatchSync(() => { ProgressView.Progress = (float)progress; }); } }; ProgressView.Hidden = false; PHImageManager.DefaultManager.RequestImageForAsset(Asset, GetTargetSize(), PHImageContentMode.AspectFit, options, (image, info) => { // Hide the progress view now the request has completed. ProgressView.Hidden = true; // If successful, show the image view and display the image. if (image == null) { return; } // Now that we have the image, show it. ImageView.Hidden = false; ImageView.Image = image; }); } void UpdateLivePhoto() { // Prepare the options to pass when fetching the live photo. var options = new PHLivePhotoRequestOptions { DeliveryMode = PHImageRequestOptionsDeliveryMode.HighQualityFormat, NetworkAccessAllowed = true, ProgressHandler = (double progress, NSError error, out bool stop, NSDictionary dictionary) => { stop = false; // Handler might not be called on the main queue, so re-dispatch for UI work. DispatchQueue.MainQueue.DispatchSync(() => ProgressView.Progress = (float)progress); } }; ProgressView.Hidden = false; // Request the live photo for the asset from the default PHImageManager. PHImageManager.DefaultManager.RequestLivePhoto(Asset, GetTargetSize(), PHImageContentMode.AspectFit, options, (livePhoto, info) => { // Hide the progress view now the request has completed. ProgressView.Hidden = true; // If successful, show the live photo view and display the live photo. if (livePhoto == null) { return; } // Now that we have the Live Photo, show it. ImageView.Hidden = true; AnimatedImageView.Hidden = true; LivePhotoView.Hidden = false; LivePhotoView.LivePhoto = livePhoto; // Playback a short section of the live photo; similar to the Photos share sheet. if (!isPlayingHint) { isPlayingHint = true; LivePhotoView.StartPlayback(PHLivePhotoViewPlaybackStyle.Hint); } }); } void UpdateAnimatedImage() { // Prepare the options to pass when fetching the (photo, or video preview) image. var options = new PHImageRequestOptions { DeliveryMode = PHImageRequestOptionsDeliveryMode.HighQualityFormat, Version = PHImageRequestOptionsVersion.Original, NetworkAccessAllowed = true, ProgressHandler = (double progress, NSError error, out bool stop, NSDictionary info) => { stop = false; // Handler might not be called on the main queue, so re-dispatch for UI work. DispatchQueue.MainQueue.DispatchSync(() => { ProgressView.Progress = (float)progress; }); } }; ProgressView.Hidden = false; PHImageManager.DefaultManager.RequestImageData(Asset, options, (data, dataUti, orientation, info) => { // Hide the progress view now the request has completed. ProgressView.Hidden = true; // If successful, show the image view and display the image. if (data == null) { return; } var animatedImage = new AnimatedImage(data); LivePhotoView.Hidden = true; ImageView.Hidden = true; AnimatedImageView.Hidden = false; AnimatedImageView.AnimatedImage = animatedImage; AnimatedImageView.IsPlaying = true; }); } #endregion #region Asset editing void RevertAsset(UIAlertAction action) { PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => { var request = PHAssetChangeRequest.ChangeRequest(Asset); request.RevertAssetContentToOriginal(); }, (success, error) => { if (!success) { Console.WriteLine($"can't revert asset: {error.LocalizedDescription}"); } }); } void ApplyFilter(CIFilter filter) { // Set up a handler to make sure we can handle prior edits. var options = new PHContentEditingInputRequestOptions(); options.CanHandleAdjustmentData = (adjustmentData => { return(adjustmentData.FormatIdentifier == formatIdentifier && adjustmentData.FormatVersion == formatVersion); }); // Prepare for editing. Asset.RequestContentEditingInput(options, (input, requestStatusInfo) => { if (input == null) { throw new InvalidProgramException($"can't get content editing input: {requestStatusInfo}"); } // This handler gets called on the main thread; dispatch to a background queue for processing. DispatchQueue.GetGlobalQueue(DispatchQueuePriority.Default).DispatchAsync(() => { // Create a PHAdjustmentData object that describes the filter that was applied. var adjustmentData = new PHAdjustmentData( formatIdentifier, formatVersion, NSData.FromString(filter.Name, NSStringEncoding.UTF8)); // NOTE: // This app's filter UI is fire-and-forget. That is, the user picks a filter, // and the app applies it and outputs the saved asset immediately. There's // no UI state for having chosen but not yet committed an edit. This means // there's no role for reading adjustment data -- you do that to resume // in-progress edits, and this sample app has no notion of "in-progress". // // However, it's still good to write adjustment data so that potential future // versions of the app (or other apps that understand our adjustement data // format) could make use of it. // Create content editing output, write the adjustment data. var output = new PHContentEditingOutput(input) { AdjustmentData = adjustmentData }; // Select a filtering function for the asset's media type. Action <CIFilter, PHContentEditingInput, PHContentEditingOutput, Action> applyFunc; if (Asset.MediaSubtypes.HasFlag(PHAssetMediaSubtype.PhotoLive)) { applyFunc = ApplyLivePhotoFilter; } else if (Asset.MediaType == PHAssetMediaType.Image) { applyFunc = ApplyPhotoFilter; } else { applyFunc = ApplyVideoFilter; } // Apply the filter. applyFunc(filter, input, output, () => { // When rendering is done, commit the edit to the Photos library. PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => { var request = PHAssetChangeRequest.ChangeRequest(Asset); request.ContentEditingOutput = output; }, (success, error) => { if (!success) { Console.WriteLine($"can't edit asset: {error.LocalizedDescription}"); } }); }); }); }); } void ApplyPhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion) { // Load the full size image. var inputImage = new CIImage(input.FullSizeImageUrl); if (inputImage == null) { throw new InvalidProgramException("can't load input image to edit"); } // Apply the filter. filter.Image = inputImage.CreateWithOrientation(input.FullSizeImageOrientation); var outputImage = filter.OutputImage; // Write the edited image as a JPEG. // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44503 NSError error; if (!ciContext.WriteJpegRepresentation(outputImage, output.RenderedContentUrl, inputImage.ColorSpace, new NSDictionary(), out error)) { throw new InvalidProgramException($"can't apply filter to image: {error.LocalizedDescription}"); } completion(); } void ApplyLivePhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion) { // This app filters assets only for output. In an app that previews // filters while editing, create a livePhotoContext early and reuse it // to render both for previewing and for final output. var livePhotoContext = new PHLivePhotoEditingContext(input); livePhotoContext.FrameProcessor2 = (IPHLivePhotoFrame frame, ref NSError _) => { filter.Image = frame.Image; return(filter.OutputImage); }; livePhotoContext.SaveLivePhoto(output, (PHLivePhotoEditingOption)null, (success, error) => { if (success) { completion(); } else { Console.WriteLine("can't output live photo"); } }); // Applying edits to a Live Photo currently crashes // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=58227 } void ApplyVideoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion) { // Load AVAsset to process from input. var avAsset = input.AudiovisualAsset; if (avAsset == null) { throw new InvalidProgramException("can't get AV asset to edit"); } // Set up a video composition to apply the filter. var composition = AVVideoComposition.CreateVideoComposition(avAsset, request => { filter.Image = request.SourceImage; var filtered = filter.OutputImage; request.Finish(filtered, null); }); // Export the video composition to the output URL. var export = new AVAssetExportSession(avAsset, AVAssetExportSessionPreset.HighestQuality) { OutputFileType = AVFileType.QuickTimeMovie, OutputUrl = output.RenderedContentUrl, VideoComposition = composition }; export.ExportAsynchronously(completion); }
public Task <OperationResult> AddAudioToVideoTrack(string videoFilePath, string audioFilePath, string outputFilePath, float volume = 1, float fadeOutDuration = 0) { var tcs = new TaskCompletionSource <OperationResult>(); var composition = AVMutableComposition.Create(); var videoCompositionTrack = composition.AddMutableTrack(AVMediaType.Video, 0); var audioCompositionTrack = composition.AddMutableTrack(AVMediaType.Audio, 0); var videoUrl = NSUrl.FromFilename(videoFilePath); var videoAsset = AVAsset.FromUrl(videoUrl); var videoAssetTrack = videoAsset.TracksWithMediaType(AVMediaType.Video).First(); var audioUrl = NSUrl.FromFilename(audioFilePath); var audioAsset = AVAsset.FromUrl(audioUrl); var audioAssetTrack = audioAsset.TracksWithMediaType(AVMediaType.Audio).First(); CGSize size = videoAssetTrack.NaturalSize; CMTime time = CMTime.Zero; var range = new CMTimeRange { Start = CMTime.Zero, Duration = videoAssetTrack.TimeRange.Duration }; NSError error = null; videoCompositionTrack.InsertTimeRange(range, videoAssetTrack, time, out error); if (error != null) { Console.WriteLine("Error adding video composition track: " + error.LocalizedDescription); } error = null; audioCompositionTrack.InsertTimeRange(range, audioAssetTrack, time, out error); if (error != null) { Console.WriteLine("Error adding audio composition track: " + error.LocalizedDescription); } var audioMix = AVMutableAudioMix.Create(); var audioInputParams = AVMutableAudioMixInputParameters.FromTrack(audioCompositionTrack); audioInputParams.SetVolume(volume, CMTime.Zero); if (fadeOutDuration > 0) { var fadeOutStartTime = CMTime.Subtract(videoAssetTrack.TimeRange.Duration, CMTime.FromSeconds(fadeOutDuration, audioAssetTrack.NaturalTimeScale)); var fadeOutRange = new CMTimeRange { Start = fadeOutStartTime, Duration = CMTime.FromSeconds(fadeOutDuration, audioAssetTrack.NaturalTimeScale) }; audioInputParams.SetVolumeRamp(volume, 0.0f, fadeOutRange); } audioMix.InputParameters = new[] { audioInputParams }; var session = new AVAssetExportSession(composition, AVAssetExportSession.PresetHighestQuality); session.OutputUrl = NSUrl.FromFilename(outputFilePath); session.OutputFileType = AVFileType.Mpeg4; session.AudioMix = audioMix; session.ExportAsynchronously(() => { if (session.Status == AVAssetExportSessionStatus.Failed) { tcs.SetResult(OperationResult.AsFailure(session.Error.LocalizedDescription)); } else { tcs.SetResult(OperationResult.AsSuccess()); } }); return(tcs.Task); }
public AVAssetExportSession AssetExportSession (string presetName) { var session = new AVAssetExportSession (Composition, presetName); session.VideoComposition = VideoComposition; session.AudioMix = AudioMix; return session; }