public void FadeBetweenImages() { // generates a little slide-show, with audio track and fades between images. string outputFile = "FadeBetweenImages.wmv"; using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(32, 160, 100); ITrack videoTrack = group.AddTrack(); IClip clip1 = videoTrack.AddImage("image1.jpg", 0, 2); // play first image for a little while IClip clip2 = videoTrack.AddImage("image2.jpg", 0, 2); // and the next IClip clip3 = videoTrack.AddImage("image3.jpg", 0, 2); // and finally the last IClip clip4 = videoTrack.AddImage("image4.jpg", 0, 2); // and finally the last double halfDuration = 0.5; // fade out and back in group.AddTransition(clip2.Offset - halfDuration, halfDuration, StandardTransitions.CreateFade(), true); group.AddTransition(clip2.Offset, halfDuration, StandardTransitions.CreateFade(), false); // again group.AddTransition(clip3.Offset - halfDuration, halfDuration, StandardTransitions.CreateFade(), true); group.AddTransition(clip3.Offset, halfDuration, StandardTransitions.CreateFade(), false); // and again group.AddTransition(clip4.Offset - halfDuration, halfDuration, StandardTransitions.CreateFade(), true); group.AddTransition(clip4.Offset, halfDuration, StandardTransitions.CreateFade(), false); // add some audio ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); IClip audio = audioTrack.AddAudio("testinput.wav", 0, videoTrack.Duration); // create an audio envelope effect, this will: // fade the audio from 0% to 100% in 1 second. // play at full volume until 1 second before the end of the track // fade back out to 0% volume audioTrack.AddEffect(0, audio.Duration, StandardEffects.CreateAudioEnvelope(1.0, 1.0, 1.0, audio.Duration)); // render our slideshow out to a windows media file using ( IRenderer renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo)) { renderer.Render(); } } }
/// <summary> /// Saves the content of this <see cref="CdgFile"/> as video. /// </summary> /// <param name="path"> /// A <see cref="string"/> indicating the file to which the video should be saved. /// </param> /// <returns> /// The Task. /// </returns> public void SaveAsVideo(string path) { Console.WriteLine("Saving..."); using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(32, 300, 300); ITrack videoTrack = group.AddTrack(); Bitmap frame = this.Next(); int frames = 0; while (frame != null && frames < 100) { Console.WriteLine("<!--Getting frame-->"); videoTrack.AddImage(frame, 0, 0.2D); frame = this.Next(); frames++; } try { Console.WriteLine(videoTrack.Duration); Console.WriteLine(this.stream.Name.Replace("cdg", "mp3")); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); ////IClip audio = audioTrack.AddAudio(this.stream.Name.Replace("cdg", "mp3"), 0, videoTrack.Duration); IRenderer renderer = new WindowsMediaRenderer(timeline, path, WindowsMediaProfiles.HighQualityVideo); renderer.Render(); } catch (Exception exception) { Console.WriteLine(exception); } } Console.WriteLine("Saving finished."); }
public void AddInMemoryImageClipsToTrack() { var tempFiles = new StringCollection(); Action <IClip> addClip = delegate(IClip clip) { if (tempFiles.Contains(clip.File.FileName)) { Assert.Fail("TempFile: {0} duplicated", clip.File.FileName); } }; string outputFile = "AddInMemoryImageClipsToTrack.wmv"; Image image = Image.FromFile("..\\..\\image1.jpg"); using (ITimeline timeline = new DefaultTimeline()) { timeline.AddAudioGroup().AddTrack().AddAudio("..\\..\\testinput.wav", 0, 7.5); ITrack videoTrack = timeline.AddVideoGroup(24, 320, 200).AddTrack(); addClip(videoTrack.AddImage(image)); // 0->1 addClip(videoTrack.AddImage(image, 1)); // 2->3 addClip(videoTrack.AddImage(image, 1, 0.5)); // 4->4.5 addClip(videoTrack.AddImage(image, InsertPosition.Absolute, 5, 0, 1)); // 5->6 IClip clip = videoTrack.AddImage("named", image, InsertPosition.Absolute, 7, 0.5, 1); // 7->7.5 addClip(clip); Assert.AreEqual("named", clip.Name); Assert.AreEqual(7.5, videoTrack.Duration); using ( var renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo) ) { renderer.Render(); } } foreach (string file in tempFiles) { Assert.IsFalse(File.Exists(file)); } }
/// <summary> /// Render the video from the music file/background image, asynchronously. /// /// Attach event handlers to audioProgress[0] and videoProgress[0] to monitor /// progress, AFTER this function has been called! /// </summary> public void Render(ProgressBar pbarAudio, ProgressBar pbarVideo) { Console.WriteLine("\nBegin render..."); using (ITimeline timeline = new DefaultTimeline()) { Console.Write("Create group..."); // create our primary group IGroup group = timeline.AddVideoGroup("background", 15.0, 32, 720, 480); Console.WriteLine("done."); Console.Write("Create tracks..."); // add a video and audio track to the group ITrack videoTrack = group.AddTrack(); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); Console.WriteLine("done."); // add the audio/bg image Console.Write("Create audio track..."); IClip audio = audioTrack.AddAudio(this.mp3path); Console.WriteLine("done."); Console.Write("Create video track... length: " + audio.Duration.ToString() + "..."); IClip clip1 = videoTrack.AddImage(this.backgroundImage, 0, audio.Duration); //videoTrack.AddImage(this.backgroundImage, 0, audio.Duration); Console.WriteLine("done."); // set up progress indicators audioProgress[0] = new PercentageProgressParticipant(timeline); videoProgress[0] = new PercentageProgressParticipant(timeline); Console.WriteLine("Timeline duration: " + timeline.Duration); Console.WriteLine("Audio duration: " + audio.Duration); Console.WriteLine("IMG duration: " + videoTrack.Duration); // render our video out Console.Write("Render Start..."); using (renderer = new Splicer.Renderer.WindowsMediaRenderer( timeline, videopath, WindowsMediaProfiles.HighQualityVideo, videoProgress, audioProgress)) { this.pbarA = pbarAudio; this.pbarV = pbarVideo; audioProgress[0].ProgressChanged += new EventHandler <Splicer.Renderer.ProgressChangedEventArgs>(listMusicItem_ProgressChangedAudio); videoProgress[0].ProgressChanged += new EventHandler <Splicer.Renderer.ProgressChangedEventArgs>(listMusicItem_ProgressChangedVideo); renderer.Render(); } Console.WriteLine("Render Completed."); //AsyncCallback cb = new AsyncCallback(CallBack); //IAsyncResult ar = renderer.BeginRender(cb, renderer.State); } }
public static IClip add_Image(this ITrack videoTrack, Image imageToAdd) { try { return(videoTrack.AddImage(imageToAdd)); //,0,2);//,0 ,2); } catch (Exception ex) { ex.log("in API_Cropper videoTrack add_Image"); return(null); } }
public static IClip add_Image(this ITrack videoTrack, string imageToAdd) { try { "adding image: {0}".debug(imageToAdd); return(videoTrack.AddImage(imageToAdd, 0, 1)); //,0 /*offset*/ ,2/*clipEnd*/); } catch (Exception ex) { ex.log("in API_Cropper videoTrack add_Image"); return(null); } }
public async Task MakeVideo(double BPM, double BPM_Multiplier, double offset, string videoTitle, string audioFilePath, double videoDuration = 30) { string temporaryVideoFileName = tempFilesPath + videoMakerIOUtilities.ReturnNameWithExtension(videoTitle, true); string VideoFileName = finalFilesPath + videoMakerIOUtilities.ReturnNameWithExtension(videoTitle, false); double FrameTimeFromBPM = videoMakerTimingUtilities.ReturnFrameTimeBasedOnBPM(BPM, BPM_Multiplier); await Task.Run(() => { if (videoMakerIOUtilities.CheckIfImageFilesExist(imageFilesPath: $@"jpg2\", 19)) { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(32, 738, 650); ITrack videoTrack = group.AddTrack(); videoTrack.AddImage($@"jpg2\1.jpg", 0, offset); int framesNumber = videoMakerTimingUtilities.ReturnNumberOfFrames(videoDuration, offset, FrameTimeFromBPM); for (int i = 1; i <= framesNumber; i++) { int picNumber = i % 18; videoTrack.AddImage($@"jpg2\{picNumber + 1}.jpg", 0, FrameTimeFromBPM); } ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); audioTrack.AddAudio(audioFilePath, 0, videoTrack.Duration + 2.75); using (WindowsMediaRenderer renderer = new WindowsMediaRenderer(timeline, temporaryVideoFileName, WindowsMediaProfiles.HighQualityVideo)) { renderer.Render(); } } } } ); if (CutVideo(temporaryVideoFileName, VideoFileName, videoDuration)) { Messaging.ShowMessage($"Your video was generated successfully. Look for your {VideoFileName} file in app folder.", "Success"); } }
public void WatermarkVideoClip() { // this demonstrates one way of watermarking a video clip... string outputFile = "WatermarkVideoClip.wmv"; using (ITimeline timeline = new DefaultTimeline(15)) { // greate our default audio track timeline.AddAudioGroup().AddTrack(); // add a video group, 32bpp, 320x240 (32bpp required to allow for an alpha channel) IGroup videoGroup = timeline.AddVideoGroup(32, 320, 240); // add our default video track ITrack videoTrack = videoGroup.AddTrack(); // add another video track, this will be used to contain our watermark image ITrack watermarkTrack = videoGroup.AddTrack(); // add the video in "transitions.wmv" to the first video track, and the audio in "transitions.wmv" // to the first audio track. timeline.AddVideoWithAudio("transitions.wmv"); // add the watermark image in, and apply it for the duration of the videoContent // this image will be stretched to fit the video clip, and in this case is a transparent gif. IClip watermarkClip = watermarkTrack.AddImage("testlogo.gif", 0, videoTrack.Duration); // add a alpha setter effect to the image, this will adjust the alpha of the image to be 0.5 // of it's previous value - so the watermark is 50% transparent. watermarkClip.AddEffect(0, watermarkClip.Duration, StandardEffects.CreateAlphaSetterRamp(0.8)); // add a transition to the watermark track, this allows the video clip to "shine through" the watermark, // base on the values present in the alpha channel of the watermark track. watermarkTrack.AddTransition(0, videoTrack.Duration, StandardTransitions.CreateKey(KeyTransitionType.Alpha, null, null, null, null, null), false); using ( // render it to windows media var renderer = new WindowsMediaRenderer(timeline, outputFile, WindowsMediaProfiles.HighQualityVideo)) { renderer.Render(); } } }
private static void PopulateTimeline(ApplicationOptions options, ITimeline timeline) { IGroup group = timeline.AddVideoGroup(32, options.Width, options.Height); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); IClip audioClip = audioTrack.AddAudio(options.SoundtrackFile); ITrack videoTrack = group.AddTrack(); double step = audioClip.Duration / options.SourceImage.Length; for (int i = 0; i < options.SourceImage.Length; i++) { videoTrack.AddImage(options.SourceImage[i], 0, step); } }
static void Main(string[] args) { string folderPath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Pics"); using (ITimeline timeline = new DefaultTimeline(30)) { double halfDuration = 1; IGroup group = timeline.AddVideoGroup("video", 30, 32, 1920, 1080); ITrack videoTrack = group.AddTrack(); List <string> filePaths = Directory.EnumerateFiles(folderPath, "*.jpg").ToList(); for (int i = 0; i < filePaths.Count; i++) { IClip clip = videoTrack.AddImage(filePaths[i], 0, 10); if (i > 0) { group.AddTransition(clip.Offset - halfDuration, halfDuration, StandardTransitions.CreateFade(), true); group.AddTransition(clip.Offset, halfDuration, StandardTransitions.CreateFade(), false); } } string folderPath2 = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Audio"); string audioPath = Directory.EnumerateFiles(folderPath2, "*.wav").FirstOrDefault(x => x != null); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); IClip audio = audioTrack.AddAudio(audioPath, 0, videoTrack.Duration); audioTrack.AddEffect(0, audio.Duration, StandardEffects.CreateAudioEnvelope(1.0, 1.0, 1.0, audio.Duration)); using (var renderer = new WindowsMediaRenderer(timeline, "output.wmv", WindowsMediaProfiles.FullHD)) { renderer.Render(); } } Console.WriteLine("Hello World!"); }
public void AddImageOverloads() { // test all the overloads for AddVideo using (ITimeline timeline = new DefaultTimeline()) { ITrack track = timeline.AddVideoGroup(24, 320, 240).AddTrack(); IClip clip1 = track.AddImage("image1.jpg"); Assert.AreEqual(0, clip1.Offset); Assert.AreEqual(1, clip1.Duration); IClip clip2 = track.AddImage("image1.jpg", 1); Assert.AreEqual(2, clip2.Offset); Assert.AreEqual(1, clip2.Duration); IClip clip3 = track.AddImage("image1.jpg", 0, 0.5); Assert.AreEqual(3, clip3.Offset); Assert.AreEqual(0.5, clip3.Duration); IClip clip4 = track.AddImage("image1.jpg", 0, 0.5, 1.0); Assert.AreEqual(3.5, clip4.Offset); Assert.AreEqual(0.5, clip4.Duration); Assert.AreEqual(0.5, clip4.MediaStart); IClip clip5 = track.AddImage("image1.jpg", InsertPosition.Absoloute, 6, 0, -1); Assert.AreEqual(6, clip5.Offset); Assert.AreEqual(1, clip5.Duration); IClip clip6 = track.AddImage("myclip", "image1.jpg", InsertPosition.Absoloute, 8, 0, 0.5); Assert.AreEqual(8, clip6.Offset); Assert.AreEqual(0, clip6.MediaStart); Assert.AreEqual(0.5, clip6.Duration); Assert.AreEqual("myclip", clip6.Name); } }
private void makeMovie(int imageName) { int simpleCount = 0; string simpleAudio = ""; string videoPath = ""; string title = ""; string description = ""; string tags = ""; foreach (VideoListModel videoItem in vList.getData()) { if (simpleCount == 0) { title = videoItem.VideoName; description = videoItem.VideoDescription; tags = videoItem.VideoTags; simpleAudio = videoItem.AudioLocation; videoPath = Application.StartupPath + "\\VideoData\\video\\" + videoItem.VideoName + ".wmv"; } simpleCount++; } label9.Text = title; Console.WriteLine(Application.StartupPath + "\\VideoData\\images\\merged\\" + imageName.ToString() + ".jpg"); using (ITimeline timeline = new DefaultTimeline(1)) { IGroup group = timeline.AddVideoGroup(32, 1920, 1080); ITrack videoTrack = group.AddTrack(); IClip clip1 = videoTrack.AddImage(Application.StartupPath + "\\VideoData\\images\\merged\\" + imageName.ToString() + ".jpg", 0, 0); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); IClip audio = audioTrack.AddAudio(simpleAudio); IClip clip2 = videoTrack.AddImage(Application.StartupPath + "\\VideoData\\images\\merged\\" + imageName.ToString() + ".jpg", 0, audio.Duration); var participant = new PercentageProgressParticipant(timeline); participant.ProgressChanged += new EventHandler <Splicer.Renderer.ProgressChangedEventArgs>(participant_ProgressChanged); using ( WindowsMediaRenderer renderer = new WindowsMediaRenderer(timeline, videoPath, WindowsMediaProfiles.HighQualityVideo)) { renderer.Render(); } } progressBar1.Value = 0; vList.getData().RemoveAt(0); vList.saveData(); refreshList(); try { //2. Get credentials and upload the file Run(title, description, videoPath, tags); } catch (AggregateException ex) { foreach (var exception in ex.InnerExceptions) { Console.WriteLine(exception.Message); } } }
public static void createVideo(string outputFile, string tempFileName, int width, int height, bool append) { if (!append) { tempFileName = outputFile; } using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(32, width, height); ITrack videoTrack = group.AddTrack(); foreach (var filename in Directory.GetFiles(@"Output\")) { videoTrack.AddImage(filename, 0, (1.0 / 48.0)); } ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); try { using (AviFileRenderer renderer = new AviFileRenderer(timeline, tempFileName)) { renderer.Render(); } } catch (Exception ex) { throw; } } if (append) { using (ITimeline timeline = new DefaultTimeline()) { IGroup group = timeline.AddVideoGroup(32, width, height); ITrack videoTrack = group.AddTrack(); var firstVideoClip = group.AddTrack().AddVideo(outputFile); var secondVideoClip = group.AddTrack().AddVideo(tempFileName, firstVideoClip.Duration); ITrack audioTrack = timeline.AddAudioGroup().AddTrack(); try { using (AviFileRenderer renderer = new AviFileRenderer(timeline, "_" + outputFile)) { renderer.Render(); } Application.DoEvents(); System.Threading.Thread.Sleep(1000); File.Delete(outputFile); File.Delete(tempFileName); File.Move("_" + outputFile, outputFile); } catch (Exception ex) { throw; } } } }