private void TrimClip_Click(object sender, RoutedEventArgs e) { // Get the first clip in the MediaComposition // We know this beforehand because it's the only clip in the composition // that we created from the passed video file MediaClip clip = composition.Clips.FirstOrDefault(); // Trim the end of the clip (you can use TrimTimeFromStart to trim from the beginning) clip.TrimTimeFromEnd = TimeSpan.FromMilliseconds((long)EndTrimSlider.Value); // Rewind the MediaElement EditorMediaElement.Position = TimeSpan.Zero; // Update the video source with the trimmed clip mediaStreamSource = composition.GeneratePreviewMediaStreamSource((int)EditorMediaElement.ActualWidth, (int)EditorMediaElement.ActualHeight); // Set the MediaElement's source EditorMediaElement.SetMediaStreamSource(mediaStreamSource); // Update the UI EndTrimSlider.Value = 0; StatusTextBlock.Text = "Clip trimmed! Trim again or click Save."; StatusTextBlock.Foreground = new SolidColorBrush(Colors.LawnGreen); SaveButton.IsEnabled = true; }
private async void AddAudio_Click(object sender, RoutedEventArgs e) { // Create the original MediaComposition var clip = await MediaClip.CreateFromFileAsync(pickedFile); composition = new MediaComposition(); composition.Clips.Add(clip); // Add background audio var picker = new Windows.Storage.Pickers.FileOpenPicker(); picker.SuggestedStartLocation = Windows.Storage.Pickers.PickerLocationId.MusicLibrary; picker.FileTypeFilter.Add(".mp3"); picker.FileTypeFilter.Add(".wav"); picker.FileTypeFilter.Add(".flac"); var audioFile = await picker.PickSingleFileAsync(); if (audioFile == null) { rootPage.NotifyUser("File picking cancelled", NotifyType.ErrorMessage); return; } var backgroundTrack = await BackgroundAudioTrack.CreateFromFileAsync(audioFile); composition.BackgroundAudioTracks.Add(backgroundTrack); // Render to MediaElement mediaElement.Position = TimeSpan.Zero; mediaStreamSource = composition.GeneratePreviewMediaStreamSource((int)mediaElement.ActualWidth, (int)mediaElement.ActualHeight); mediaElement.SetMediaStreamSource(mediaStreamSource); rootPage.NotifyUser("Background audio added", NotifyType.StatusMessage); }
public async void CaptureVideo() { //<SnippetCaptureVideo> CameraCaptureUI captureUI = new CameraCaptureUI(); captureUI.VideoSettings.Format = CameraCaptureUIVideoFormat.Mp4; StorageFile videoFile = await captureUI.CaptureFileAsync(CameraCaptureUIMode.Video); if (videoFile == null) { // User cancelled photo capture return; } //</SnippetCaptureVideo> //<SnippetAddToComposition> MediaClip mediaClip = await MediaClip.CreateFromFileAsync(videoFile); mediaComposition.Clips.Add(mediaClip); mediaStreamSource = mediaComposition.GeneratePreviewMediaStreamSource( (int)mediaElement.ActualWidth, (int)mediaElement.ActualHeight); //</SnippetAddToComposition> //<SnippetSetMediaElementSource> mediaElement.SetMediaStreamSource(mediaStreamSource); //</SnippetSetMediaElementSource> }
private async void Button_Click(object sender, RoutedEventArgs e) { var picker = new Windows.Storage.Pickers.FileOpenPicker(); picker.SuggestedStartLocation = Windows.Storage.Pickers.PickerLocationId.VideosLibrary; picker.FileTypeFilter.Add(".mp4"); pickedFile = await picker.PickSingleFileAsync(); var storageItemAccessList = Windows.Storage.AccessCache.StorageApplicationPermissions.FutureAccessList; storageItemAccessList.Add(pickedFile); var clip = await MediaClip.CreateFromFileAsync(pickedFile); if (pickedFile == null) { return; } else { composition = new MediaComposition(); composition.Clips.Add(clip); mediaElement1.Position = TimeSpan.Zero; mediaStreamSource = composition.GeneratePreviewMediaStreamSource(500, 200); mediaElement1.SetMediaStreamSource(mediaStreamSource); import_btn.IsEnabled = false; } }
private async void VideoEditDialog_Loaded(object sender, RoutedEventArgs e) { try { Composition = new MediaComposition(); VideoClip = await MediaClip.CreateFromFileAsync(VideoFile); Composition.Clips.Add(VideoClip); PreviewSource = MediaSource.CreateFromMediaStreamSource(Composition.GeneratePreviewMediaStreamSource(640, 360)); MediaPlay.Source = PreviewSource; CutRange.Maximum = VideoClip.OriginalDuration.TotalMilliseconds; CutRange.RangeMax = CutRange.Maximum; } catch { Hide(); QueueContentDialog dialog = new QueueContentDialog { Title = Globalization.GetString("Common_Dialog_ErrorTitle"), Content = Globalization.GetString("QueueDialog_EditErrorWhenOpen_Content"), CloseButtonText = Globalization.GetString("Common_Dialog_CloseButton") }; _ = await dialog.ShowAsync().ConfigureAwait(false); } }
protected override async void OnNavigatedTo(NavigationEventArgs e) { var videoFile = e.Parameter as StorageFile; if (videoFile != null) { StatusTextBlock.Text = videoFile.DisplayName; // Create a MediaClip from the file var clip = await MediaClip.CreateFromFileAsync(videoFile); // Set the End Trim slider's maximum value so that the user can trim from the end // You can also do this from the start EndTrimSlider.Maximum = clip.OriginalDuration.TotalMilliseconds; // Create a MediaComposition containing the clip and set it on the MediaElement. composition = new MediaComposition(); composition.Clips.Add(clip); // start the MediaElement at the beginning EditorMediaElement.Position = TimeSpan.Zero; // Create the media source and assign it to the media player mediaStreamSource = composition.GeneratePreviewMediaStreamSource((int)EditorMediaElement.ActualWidth, (int)EditorMediaElement.ActualHeight); // Set the MediaElement's source EditorMediaElement.SetMediaStreamSource(mediaStreamSource); TrimClipButton.IsEnabled = true; } }
private async void CreateOverlays() { var baseVideoClip = await MediaClip.CreateFromFileAsync(baseVideoFile); composition = new MediaComposition(); composition.Clips.Add(baseVideoClip); var overlayVideoClip = await MediaClip.CreateFromFileAsync(overlayVideoFile); // Overlay video in upper left corner, retain its native aspect ratio Rect videoOverlayPosition; var encodingProperties = overlayVideoClip.GetVideoEncodingProperties(); videoOverlayPosition.Height = mediaElement.ActualHeight / 3; videoOverlayPosition.Width = (double)encodingProperties.Width / (double)encodingProperties.Height * videoOverlayPosition.Height; videoOverlayPosition.X = 0; videoOverlayPosition.Y = 0; var videoOverlay = new MediaOverlay(overlayVideoClip); videoOverlay.Position = videoOverlayPosition; videoOverlay.Opacity = 0.75; var overlayLayer = new MediaOverlayLayer(); overlayLayer.Overlays.Add(videoOverlay); composition.OverlayLayers.Add(overlayLayer); // Render to MediaElement mediaElement.Position = TimeSpan.Zero; mediaStreamSource = composition.GeneratePreviewMediaStreamSource((int)mediaElement.ActualWidth, (int)mediaElement.ActualHeight); mediaElement.SetMediaStreamSource(mediaStreamSource); rootPage.NotifyUser("Overlays created", NotifyType.StatusMessage); }
public void UpdateSource() { MediaStreamSource streamSource = mediaComposition.GeneratePreviewMediaStreamSource((int)mediaPlayerElement.ActualWidth, (int)mediaPlayerElement.ActualHeight); var source = MediaSource.CreateFromMediaStreamSource(streamSource); mediaPlayerElement.Source = source; }
// </SnippetDeclareMediaStreamSource> // <SnippetUpdateMediaElementSource> public void UpdateMediaElementSource() { mediaStreamSource = composition.GeneratePreviewMediaStreamSource( (int)mediaPlayerElement.ActualWidth, (int)mediaPlayerElement.ActualHeight); mediaPlayerElement.Source = MediaSource.CreateFromMediaStreamSource(mediaStreamSource); }
private void preview() { MediaStreamSource mediaStreamSource = mComposition.GeneratePreviewMediaStreamSource( (int)mPlayerElement.ActualWidth, (int)mPlayerElement.ActualHeight); mPlayerElement.Source = MediaSource.CreateFromMediaStreamSource(mediaStreamSource); }
private async void AddAudioTrack_Click(object sender, RoutedEventArgs e) { var backgroundTrack = await BackgroundAudioTrack.CreateFromFileAsync(audioFile); composition.BackgroundAudioTracks.Add(backgroundTrack); // Render to MediaElement mediaElement.Position = TimeSpan.Zero; mediaStreamSource = composition.GeneratePreviewMediaStreamSource((int)mediaElement.ActualWidth, (int)mediaElement.ActualHeight); mediaElement.SetMediaStreamSource(mediaStreamSource); }
/** * プレビューモードを開始する。 * * トリミングモード: * MediaPlayerのソースに、オリジナルのソース(StorageFileから生成したもの)をセットした状態。 * トリミング操作は、常にこのモードで行い、再生は行わない。 * プレビューモード: * MediaPlayerのソースに、MediaComposition から生成したストリームを指定し、トリミング後の動画を再生テストするモード * トリミング操作を行うと、自動的にこのモードはキャンセルされ、全体表示モードに戻る。 */ private async Task startPreview(bool play) { if (mPreviewing) { if (!IsPlaying) { mPlayer.Play(); } return; } if (mTrimmingSlider.TrimmedRange < 100 || mComposition.Clips.Count != 1) { return; } mComposition.Clips[0].TrimTimeFromStart = TimeSpan.FromMilliseconds(mTrimmingSlider.TrimStart); mComposition.Clips[0].TrimTimeFromEnd = TimeSpan.FromMilliseconds(mTrimmingSlider.TrimEnd); try { mPreviewing = true; MediaStreamSource mediaStreamSource = mComposition.GeneratePreviewMediaStreamSource( (int)mPlayerElement.ActualWidth, (int)mPlayerElement.ActualHeight); var loader = await WvvMediaLoader.LoadAsync(mPlayer, MediaSource.CreateFromMediaStreamSource(mediaStreamSource), this); if (null != loader) { if (mPreviewing) { mPlayer.PlaybackSession.Position = TimeSpan.FromMilliseconds(mTrimmingSlider.CurrentPosition); if (play) { mPlayer.Play(); } } } else { mPreviewing = false; } } catch (Exception e) { CmLog.error(e, "WvvTrimmingView.startPreview: Error"); mPlayer.Pause(); mPreviewing = false; Error.SetError(e); } }
private async void Button_Click(object sender, RoutedEventArgs e) { CameraCaptureUI video = new CameraCaptureUI(); video.VideoSettings.Format = CameraCaptureUIVideoFormat.Mp4; video.VideoSettings.MaxResolution = CameraCaptureUIMaxVideoResolution.HighDefinition; storeFile = await video.CaptureFileAsync(CameraCaptureUIMode.Video); if (storeFile != null) { stream = await storeFile.OpenAsync(FileAccessMode.Read); MediaClip media = await MediaClip.CreateFromFileAsync(storeFile); MediaComposition mComposition = new MediaComposition(); mComposition.Clips.Add(media); MediaStreamSource source = mComposition.GeneratePreviewMediaStreamSource((int)demoVideo.ActualWidth, (int)demoVideo.ActualHeight); demoVideo.SetMediaStreamSource(source); DateTime dt = DateTime.Now; string dtstr = dt.ToString("ddMyyyy"); string filename = "videorecordedat" + dtstr; //FileSavePicker save = new FileSavePicker(); //save.FileTypeChoices.Add("Video", new List<string>() { ".mp4", ".wmv" }); //save.DefaultFileExtension = ".mp4"; //save.SuggestedFileName = "video"+filename; //save.SuggestedStartLocation = PickerLocationId.VideosLibrary; //save.SuggestedSaveFile = storeFile; //var s = await save.PickSaveFileAsync(); using (var reader = new DataReader(stream.GetInputStreamAt(0))) { await reader.LoadAsync((uint)stream.Size); byte[] buffer = new byte[(int)stream.Size]; reader.ReadBytes(buffer); await FileIO.WriteBytesAsync(storeFile, buffer); await Upload(storeFile, buffer); } this.Frame.Navigate(typeof(BlankPage3)); } }
private async void button1_Click(object sender, RoutedEventArgs e) { CameraCaptureUI cc = new CameraCaptureUI(); cc.VideoSettings.Format = CameraCaptureUIVideoFormat.Mp4; cc.VideoSettings.MaxResolution = CameraCaptureUIMaxVideoResolution.HighDefinition; StorageFile sf = await cc.CaptureFileAsync(CameraCaptureUIMode.Video); if (sf != null) { MediaClip mc = await MediaClip.CreateFromFileAsync(sf); MediaComposition mcomp = new MediaComposition(); mcomp.Clips.Add(mc); MediaStreamSource mss = mcomp.GeneratePreviewMediaStreamSource((int)mediaElement.ActualWidth, (int)mediaElement.ActualHeight); mediaElement.SetMediaStreamSource(mss); } }
private async void merge_btn_Click(object sender, RoutedEventArgs e) { var clip = await MediaClip.CreateFromFileAsync(pickedFile); var secondClip = await MediaClip.CreateFromFileAsync(secondVideoFile); composition = new MediaComposition(); composition.Clips.Add(clip); composition.Clips.Add(secondClip); // Render to MediaElement. mediaElement1.Position = TimeSpan.Zero; mediaStreamSource = composition.GeneratePreviewMediaStreamSource(500, 200); mediaElement1.SetMediaStreamSource(mediaStreamSource); }
private void showVideo_Click(object sender, RoutedEventArgs e) { GC.Collect(); //FileSavePicker savePicker = new FileSavePicker(); //savePicker.SuggestedStartLocation = PickerLocationId.VideosLibrary; //savePicker.FileTypeChoices.Add("MP4 File", new List<string>() { ".mp4" }); //savePicker.SuggestedFileName = "output"; //StorageFile videoFile = await savePicker.PickSaveFileAsync(); //await composition.RenderToFileAsync(videoFile); encodeImagesResult.Position = TimeSpan.Zero; IMediaSource mediaStreamSource = composition.GeneratePreviewMediaStreamSource(400, 400); encodeImagesResult.SetMediaStreamSource(mediaStreamSource); }
private async void AppendVideo_Click(object sender, RoutedEventArgs e) { var firstClip = await MediaClip.CreateFromFileAsync(firstVideoFile); var secondClip = await MediaClip.CreateFromFileAsync(secondVideoFile); composition = new MediaComposition(); composition.Clips.Add(firstClip); composition.Clips.Add(secondClip); // Render to MediaElement. mediaElement.Position = TimeSpan.Zero; mediaStreamSource = composition.GeneratePreviewMediaStreamSource((int)mediaElement.ActualWidth, (int)mediaElement.ActualHeight); mediaElement.SetMediaStreamSource(mediaStreamSource); // rootPage.NotifyUser("Clips appended", NotifyType.StatusMessage); }
private async void TrimClip_Click(object sender, RoutedEventArgs e) { // Trim the front and back 25% from the clip var clip = await MediaClip.CreateFromFileAsync(pickedFile); clip.TrimTimeFromStart = new TimeSpan((long)(clip.OriginalDuration.Ticks * 0.25)); clip.TrimTimeFromEnd = new TimeSpan((long)(clip.OriginalDuration.Ticks * 0.25)); // Create a MediaComposition containing the clip and set it on the MediaElement. composition = new MediaComposition(); composition.Clips.Add(clip); mediaElement.Position = TimeSpan.Zero; mediaStreamSource = composition.GeneratePreviewMediaStreamSource((int)mediaElement.ActualWidth, (int)mediaElement.ActualHeight); mediaElement.SetMediaStreamSource(mediaStreamSource); rootPage.NotifyUser("Clip trimmed", NotifyType.StatusMessage); save.IsEnabled = true; }
public async Task UpdateMediaElementSource() { try { cmpFile = await ApplicationData.Current.LocalFolder.GetFileAsync(mediaGroup.CompostionFileName + ".cmp"); mediaComposition = await MediaComposition.LoadAsync(cmpFile); mediaStreamSource = mediaComposition.GeneratePreviewMediaStreamSource( (int)mediaElement.ActualWidth, (int)mediaElement.ActualHeight); mediaElement.SetMediaStreamSource(mediaStreamSource); } catch (Exception) { } }
private void TrimClip_Click(object sender, RoutedEventArgs e) { try { // Show the overlay that contains the ProgressBar BusyOverlay.Visibility = Visibility.Visible; EncodingProgressTextBlock.Text = "trimming..."; // Get the first clip in the MediaComposition // We know this beforehand because it's the only clip in the composition // that we created from the passed video file MediaClip clip = composition.Clips.FirstOrDefault(); // Trim the end of the clip (you can use TrimTimeFromStart to trim from the beginning) clip.TrimTimeFromEnd = TimeSpan.FromMilliseconds((long)EndTrimSlider.Value); // Rewind the MediaElement EditorMediaElement.Position = TimeSpan.Zero; // Update the video source with the trimmed clip mediaStreamSource = composition.GeneratePreviewMediaStreamSource((int)EditorMediaElement.ActualWidth, (int)EditorMediaElement.ActualHeight); // Set the MediaElement's source EditorMediaElement.SetMediaStreamSource(mediaStreamSource); // Update the UI EndTrimSlider.Value = 0; StatusTextBlock.Text = "Trim Successful! Trim again or click the SAVE button to keep."; StatusTextBlock.Foreground = new SolidColorBrush(Colors.Green); SaveButton.IsEnabled = true; } catch (Exception exception) { Debug.WriteLine(exception); throw; } finally { BusyOverlay.Visibility = Visibility.Collapsed; EncodingProgressTextBlock.Text = ""; } }
private async void Capture_button_Click(object sender, RoutedEventArgs e) { // Declaring CameraCaptureUI to call the default Video capturing tool in the system CameraCaptureUI cc = new CameraCaptureUI(); cc.VideoSettings.Format = CameraCaptureUIVideoFormat.Mp4; cc.VideoSettings.MaxResolution = CameraCaptureUIMaxVideoResolution.HighDefinition; /* saving the video temporarily in the storage file object and setting the streaming source for our media element to show the information*/ sf = await cc.CaptureFileAsync(CameraCaptureUIMode.Video); if (sf != null) { rs = await sf.OpenAsync(FileAccessMode.Read); MediaClip mc = await MediaClip.CreateFromFileAsync(sf); MediaComposition mcomp = new MediaComposition(); mcomp.Clips.Add(mc); MediaStreamSource mss = mcomp.GeneratePreviewMediaStreamSource((int) video.ActualWidth, (int)video.ActualHeight); video.SetMediaStreamSource(mss); } }
private async Task SetVideo(MediaComposition mediaComposition, CancellationToken cancellationToken) { StorageFile temporaryFile = await CreateFile("Video", "temp (1)", "mp4", cancellationToken); IAsyncOperationWithProgress <TranscodeFailureReason, double> progress = mediaComposition.RenderToFileAsync(temporaryFile, MediaTrimmingPreference.Precise); CancelTask(cancellationToken); progress.Progress = new AsyncOperationProgressHandler <TranscodeFailureReason, double>(async(reason, progressInfo) => { if (cancellationToken.IsCancellationRequested) { reason.Cancel(); await ClearTempData(); } await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, new DispatchedHandler(async() => { ReportProgress(Stage6, Stage5ProgressBar, TextProgress5, progressInfo); if (progressInfo == 100) { Open.Content = "Browse File"; ItemSource = await GetProperties(temporaryFile, cancellationToken); Save.IsEnabled = true; } })); }); CancelTask(cancellationToken); MediaStreamSource mediaStreamSource = mediaComposition.GeneratePreviewMediaStreamSource(0, 0); MediaElement1.SetMediaStreamSource(mediaStreamSource); MediaElement1.Play(); await progress; CancelTask(cancellationToken); audioVideo.Add(temporaryFile); }
private async void AddEffectToMediaClip() { var picker = new Windows.Storage.Pickers.FileOpenPicker(); picker.SuggestedStartLocation = Windows.Storage.Pickers.PickerLocationId.VideosLibrary; picker.FileTypeFilter.Add(".mp4"); Windows.Storage.StorageFile pickedFile = await picker.PickSingleFileAsync(); if (pickedFile == null) { //ShowErrorMessage("File picking cancelled"); return; } // These files could be picked from a location that we won't have access to later var storageItemAccessList = Windows.Storage.AccessCache.StorageApplicationPermissions.FutureAccessList; storageItemAccessList.Add(pickedFile); //<SnippetAddEffectToComposition> MediaComposition composition = new MediaComposition(); var clip = await MediaClip.CreateFromFileAsync(pickedFile); composition.Clips.Add(clip); var videoEffectDefinition = new VideoEffectDefinition("VideoEffectComponent.ExampleVideoEffect", new PropertySet() { { "FadeValue", .5 } }); clip.VideoEffectDefinitions.Add(videoEffectDefinition); //</SnippetAddEffectToComposition> var mediaStreamSource = composition.GeneratePreviewMediaStreamSource( (int)PreviewControl.ActualWidth, (int)PreviewControl.ActualHeight); mediaElement.SetMediaStreamSource(mediaStreamSource); }
private void SetupMediaStreamSource() { VideoDisplay.SetMediaStreamSource(_composition.GeneratePreviewMediaStreamSource((int)VideoDisplay.ActualWidth, (int)VideoDisplay.ActualHeight)); }
private async void Apply_Effect(object sender, RoutedEventArgs e) { if (CurrentVideo == null || CurrentEffect == null) { return; } SecondVideo = VideoList.IndexOf(CurrentVideo) == VideoList.Count - 1 ? VideoList[0] : VideoList[VideoList.IndexOf(CurrentVideo) + 1]; ThirdVideo = VideoList.IndexOf(SecondVideo) == VideoList.Count - 1 ? VideoList[0] : VideoList[VideoList.IndexOf(SecondVideo) + 1]; MediaComposition comp = new MediaComposition(); StorageFile VideoFile1 = await StorageFile.GetFileFromApplicationUriAsync(new Uri(CurrentVideo)); StorageFile VideoFile2 = await StorageFile.GetFileFromApplicationUriAsync(new Uri(SecondVideo)); StorageFile VideoFile3 = await StorageFile.GetFileFromApplicationUriAsync(new Uri(ThirdVideo)); MediaClip mediaClip1 = await MediaClip.CreateFromFileAsync(VideoFile1); MediaClip mediaClip2 = await MediaClip.CreateFromFileAsync(VideoFile2); MediaClip mediaClip3 = await MediaClip.CreateFromFileAsync(VideoFile3); TransitionEffectParameter transitionEffectParameter1 = new TransitionEffectParameter(); TransitionEffectParameter transitionEffectParameter2 = new TransitionEffectParameter(); TransitionEffectParameter transitionEffectParameter3 = new TransitionEffectParameter(); transitionEffectParameter1.SetStartEffect(TransitionEffectType.TRANSITION_NONE, 0); transitionEffectParameter1.SetEndEffect(CurrentEffect.EffectType, 1); transitionEffectParameter1.SetVideoDuration((float)mediaClip1.OriginalDuration.TotalSeconds); transitionEffectParameter2.SetStartEffect(CurrentEffect.EffectType, 1); transitionEffectParameter2.SetEndEffect(CurrentEffect.EffectType, 1); transitionEffectParameter2.SetVideoDuration((float)mediaClip2.OriginalDuration.TotalSeconds); transitionEffectParameter3.SetStartEffect(CurrentEffect.EffectType, 1); transitionEffectParameter3.SetEndEffect(TransitionEffectType.TRANSITION_NONE, 0); transitionEffectParameter3.SetVideoDuration((float)mediaClip3.OriginalDuration.TotalSeconds); PropertySet configuration1 = new PropertySet(); configuration1.Add("TransitionEffectParameter", transitionEffectParameter1); mediaClip1.VideoEffectDefinitions.Add(new VideoEffectDefinition("TransitionEffectTransform.TransitionEffect", configuration1)); PropertySet configuration2 = new PropertySet(); configuration2.Add("TransitionEffectParameter", transitionEffectParameter2); mediaClip2.VideoEffectDefinitions.Add(new VideoEffectDefinition("TransitionEffectTransform.TransitionEffect", configuration2)); PropertySet configuration3 = new PropertySet(); configuration3.Add("TransitionEffectParameter", transitionEffectParameter3); mediaClip3.VideoEffectDefinitions.Add(new VideoEffectDefinition("TransitionEffectTransform.TransitionEffect", configuration3)); comp.Clips.Add(mediaClip1); comp.Clips.Add(mediaClip2); comp.Clips.Add(mediaClip3); Video.SetMediaStreamSource(comp.GeneratePreviewMediaStreamSource(320, 240)); }
private async void AppendVideos_Click(object sender, RoutedEventArgs e) { // Combine two video files together into one var firstClip = await MediaClip.CreateFromFileAsync(firstVideoFile); var secondClip = await MediaClip.CreateFromFileAsync(secondVideoFile); composition = new MediaComposition(); composition.Clips.Add(firstClip); composition.Clips.Add(secondClip); // Render to MediaElement. mediaElement.Position = TimeSpan.Zero; mediaStreamSource = composition.GeneratePreviewMediaStreamSource((int)mediaElement.ActualWidth, (int)mediaElement.ActualHeight); mediaElement.SetMediaStreamSource(mediaStreamSource); rootPage.NotifyUser("Clips appended", NotifyType.StatusMessage); }
public async Task <MediaStreamSource> GenerateHeatmap(bool scaleFovFlag, int scaleInPercentage, bool forceFov, int forcedFov, bool horizonFlag, SessionCollection sessions, Rect overlayPosition, Windows.UI.Color colorPickerColor, double heatmapOpacity, double startTime, double stopTime, MediaClip video) { CheckHistoryErrors(sessions); List <Heatmap.Coord> inputList = await Task.Factory.StartNew(() => TrimStaticHeatmap(sessions, startTime, stopTime, video) ); MediaOverlayLayer mediaOverlayLayer = new MediaOverlayLayer(); WriteableBitmap wb = await GenerateHeatmap(inputList, forceFov, forcedFov, scaleFovFlag, scaleInPercentage); CanvasDevice device = CanvasDevice.GetSharedDevice(); SoftwareBitmap swb = SoftwareBitmap.CreateCopyFromBuffer(wb.PixelBuffer, BitmapPixelFormat.Bgra8, wb.PixelWidth, wb.PixelHeight); swb = SoftwareBitmap.Convert(swb, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore); CanvasBitmap canvasBitmap = CanvasBitmap.CreateFromSoftwareBitmap(device, swb); var clip = MediaClip.CreateFromSurface(canvasBitmap, new TimeSpan(0, 0, 0, 0, 1)); MediaOverlay mediaOverlay = new MediaOverlay(clip) { Position = overlayPosition, Opacity = heatmapOpacity }; mediaOverlayLayer.Overlays.Add(mediaOverlay); if (horizonFlag) { CanvasBitmap cb = await CanvasBitmap.LoadAsync(CanvasDevice.GetSharedDevice(), new Uri("ms-appx:///Assets/horizon3840x2160.png")); MediaOverlay horizonOverlay = new MediaOverlay(MediaClip.CreateFromSurface(cb, new TimeSpan(0, 0, 0, 0, 1))) { Position = overlayPosition, Opacity = 1 }; mediaOverlayLayer.Overlays.Add(horizonOverlay); } MediaComposition mediaComposition = new MediaComposition(); mediaComposition.Clips.Add(MediaClip.CreateFromColor(colorPickerColor, new TimeSpan(0, 0, 0, 0, 1))); mediaComposition.OverlayLayers.Add(mediaOverlayLayer); return(mediaComposition.GeneratePreviewMediaStreamSource ( (int)overlayPosition.Width, (int)overlayPosition.Height )); }
/// <summary> /// Invoked when this page is about to be displayed in a Frame. /// </summary> /// <param name="e">Event data that describes how this page was reached. /// This parameter is typically used to configure the page.</param> protected override async void OnNavigatedTo(NavigationEventArgs e) { StorageFile source = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Input/Car.mp4")); // Select the largest centered square area in the input video var inputProfile = await MediaEncodingProfile.CreateFromFileAsync(source); uint inputWidth = inputProfile.Video.Width; uint inputHeight = inputProfile.Video.Height; uint outputLength = Math.Min(inputWidth, inputHeight); Rect cropArea = new Rect( (float)((inputWidth - outputLength) / 2), (float)((inputHeight - outputLength) / 2), (float)outputLength, (float)outputLength ); // Create the output encoding profile var outputProfile = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.HD720p); outputProfile.Video.Bitrate = inputProfile.Video.Bitrate; outputProfile.Video.FrameRate.Numerator = inputProfile.Video.FrameRate.Numerator; outputProfile.Video.FrameRate.Denominator = inputProfile.Video.FrameRate.Denominator; outputProfile.Video.Width = outputLength; outputProfile.Video.Height = outputLength; var definition = new LumiaEffectDefinition(new FilterChainFactory(() => { var filters = new List<IFilter>(); filters.Add(new CropFilter(cropArea)); return filters; })); definition.InputWidth = inputWidth; definition.InputHeight = inputHeight; definition.OutputWidth = outputLength; definition.OutputHeight = outputLength; var clip = await MediaClip.CreateFromFileAsync(source); clip.VideoEffectDefinitions.Add(definition); var composition = new MediaComposition(); composition.Clips.Add(clip); TextLog.Text = "Encoding using MediaComposition"; StorageFile destination1 = await KnownFolders.VideosLibrary.CreateFileAsync("Square_MC.mp4", CreationCollisionOption.ReplaceExisting); await composition.RenderToFileAsync(destination1, MediaTrimmingPreference.Fast, outputProfile); TextLog.Text = "Encoding using MediaTranscoder"; StorageFile destination2 = await KnownFolders.VideosLibrary.CreateFileAsync("Square_MT.mp4", CreationCollisionOption.ReplaceExisting); var transcoder = new MediaTranscoder(); transcoder.AddVideoEffect(definition.ActivatableClassId, true, definition.Properties); var transcode = await transcoder.PrepareFileTranscodeAsync(source, destination2, outputProfile); await transcode.TranscodeAsync(); TextLog.Text = "Starting MediaComposition preview"; PreviewMC.SetMediaStreamSource( composition.GeneratePreviewMediaStreamSource((int)outputLength, (int)outputLength) ); TextLog.Text = "Starting MediaElement preview"; PreviewME.AddVideoEffect(definition.ActivatableClassId, false, definition.Properties); PreviewME.Source = new Uri("ms-appx:///Input/Car.mp4"); PreviewME.Play(); TextLog.Text = "Done"; }
/// <summary> /// Invoked when this page is about to be displayed in a Frame. /// </summary> /// <param name="e">Event data that describes how this page was reached. /// This parameter is typically used to configure the page.</param> protected override async void OnNavigatedTo(NavigationEventArgs e) { StorageFile source = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Input/Car.mp4")); // Select the largest centered square area in the input video var inputProfile = await MediaEncodingProfile.CreateFromFileAsync(source); uint inputWidth = inputProfile.Video.Width; uint inputHeight = inputProfile.Video.Height; uint outputLength = Math.Min(inputWidth, inputHeight); Rect cropArea = new Rect( (float)((inputWidth - outputLength) / 2), (float)((inputHeight - outputLength) / 2), (float)outputLength, (float)outputLength ); // Create the output encoding profile var outputProfile = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.HD720p); outputProfile.Video.Bitrate = inputProfile.Video.Bitrate; outputProfile.Video.FrameRate.Numerator = inputProfile.Video.FrameRate.Numerator; outputProfile.Video.FrameRate.Denominator = inputProfile.Video.FrameRate.Denominator; outputProfile.Video.Width = outputLength; outputProfile.Video.Height = outputLength; var definition = new LumiaEffectDefinition(new FilterChainFactory(() => { var filters = new List <IFilter>(); filters.Add(new CropFilter(cropArea)); return(filters); })); definition.InputWidth = inputWidth; definition.InputHeight = inputHeight; definition.OutputWidth = outputLength; definition.OutputHeight = outputLength; var clip = await MediaClip.CreateFromFileAsync(source); clip.VideoEffectDefinitions.Add(definition); var composition = new MediaComposition(); composition.Clips.Add(clip); TextLog.Text = "Encoding using MediaComposition"; StorageFile destination1 = await KnownFolders.VideosLibrary.CreateFileAsync("Square_MC.mp4", CreationCollisionOption.ReplaceExisting); await composition.RenderToFileAsync(destination1, MediaTrimmingPreference.Fast, outputProfile); TextLog.Text = "Encoding using MediaTranscoder"; StorageFile destination2 = await KnownFolders.VideosLibrary.CreateFileAsync("Square_MT.mp4", CreationCollisionOption.ReplaceExisting); var transcoder = new MediaTranscoder(); transcoder.AddVideoEffect(definition.ActivatableClassId, true, definition.Properties); var transcode = await transcoder.PrepareFileTranscodeAsync(source, destination2, outputProfile); await transcode.TranscodeAsync(); TextLog.Text = "Starting MediaComposition preview"; PreviewMC.SetMediaStreamSource( composition.GeneratePreviewMediaStreamSource((int)outputLength, (int)outputLength) ); TextLog.Text = "Starting MediaElement preview"; PreviewME.AddVideoEffect(definition.ActivatableClassId, false, definition.Properties); PreviewME.Source = new Uri("ms-appx:///Input/Car.mp4"); PreviewME.Play(); TextLog.Text = "Done"; }