private void ExportWavMp3() { var props = dialog.GetPropertyPage((int)ExportFormat.WavMp3); var format = props.GetPropertyValue <string>(1); var filename = ""; if (format == "MP3") { filename = lastExportFilename != null ? lastExportFilename : PlatformUtils.ShowSaveFileDialog("Export MP3 File", "MP3 Audio File (*.mp3)|*.mp3", ref Settings.LastExportFolder); } else { filename = lastExportFilename != null ? lastExportFilename : PlatformUtils.ShowSaveFileDialog("Export Wave File", "Wave Audio File (*.wav)|*.wav", ref Settings.LastExportFolder); } if (filename != null) { var songName = props.GetPropertyValue <string>(0); var sampleRate = Convert.ToInt32(props.GetPropertyValue <string>(2), CultureInfo.InvariantCulture); var bitRate = Convert.ToInt32(props.GetPropertyValue <string>(3), CultureInfo.InvariantCulture); var loopCount = props.GetPropertyValue <string>(4) != "Duration" ? props.GetPropertyValue <int>(5) : -1; var duration = props.GetPropertyValue <string>(4) == "Duration" ? props.GetPropertyValue <int>(6) : -1; var separateFiles = props.GetPropertyValue <bool>(7); var separateIntro = props.GetPropertyValue <bool>(8); var selectedChannels = props.GetPropertyValue <bool[]>(9); var song = project.GetSong(songName); var channelMask = 0; for (int i = 0; i < selectedChannels.Length; i++) { if (selectedChannels[i]) { channelMask |= (1 << i); } } WavMp3ExportUtils.Save(song, filename, sampleRate, loopCount, duration, channelMask, separateFiles, separateIntro, (samples, fn) => { if (format == "MP3") { Mp3File.Save(samples, fn, sampleRate, bitRate); } else { WaveFile.Save(samples, fn, sampleRate); } }); lastExportFilename = filename; } }
private void WavMp3Export(string filename, bool mp3) { var extension = mp3 ? "mp3" : "wav"; if (!ValidateExtension(filename, "." + extension)) { return; } var songIndex = ParseOption("export-song", 0); var sampleRate = ParseOption($"{extension}-export-rate", 44100); var loopCount = ParseOption($"{extension}-export-loop", 1); var duration = ParseOption($"{extension}-export-duration", 0); var mask = ParseOption($"{extension}-export-channels", 0xff, true); var separate = HasOption($"{extension}-export-separate-channels"); var intro = HasOption($"{extension}-export-separate-intro"); var bitrate = ParseOption($"{extension}-export-bitrate", 192); var song = GetProjectSong(songIndex); if (duration > 0) { loopCount = -1; } else { loopCount = Math.Max(1, loopCount); } if (song != null) { WavMp3ExportUtils.Save(song, filename, sampleRate, loopCount, duration, mask, separate, intro, (samples, fn) => { if (mp3) { Mp3File.Save(samples, fn, sampleRate, bitrate); } else { WaveFile.Save(samples, fn, sampleRate); } }); } }
public unsafe bool Save(Project originalProject, int songId, int loopCount, string ffmpegExecutable, string filename, int resX, int resY, bool halfFrameRate, int channelMask, int audioBitRate, int videoBitRate, int pianoRollZoom) { if (channelMask == 0 || loopCount < 1) { return(false); } Log.LogMessage(LogSeverity.Info, "Detecting FFmpeg..."); if (!DetectFFmpeg(ffmpegExecutable)) { return(false); } videoResX = resX; videoResY = resY; var project = originalProject.DeepClone(); var song = project.GetSong(songId); ExtendSongForLooping(song, loopCount); Log.LogMessage(LogSeverity.Info, "Initializing channels..."); var frameRateNumerator = song.Project.PalMode ? 5000773 : 6009883; if (halfFrameRate) { frameRateNumerator /= 2; } var frameRate = frameRateNumerator.ToString() + "/100000"; var numChannels = Utils.NumberOfSetBits(channelMask); var channelResXFloat = videoResX / (float)numChannels; var channelResX = videoResY; var channelResY = (int)channelResXFloat; var longestChannelName = 0.0f; var videoGraphics = RenderGraphics.Create(videoResX, videoResY, true); if (videoGraphics == null) { Log.LogMessage(LogSeverity.Error, "Error initializing off-screen graphics, aborting."); return(false); } var theme = RenderTheme.CreateResourcesForGraphics(videoGraphics); var bmpWatermark = videoGraphics.CreateBitmapFromResource("VideoWatermark"); // Generate WAV data for each individual channel for the oscilloscope. var channelStates = new List <VideoChannelState>(); List <short[]> channelsWavData = new List <short[]>(); var maxAbsSample = 0; for (int i = 0, channelIndex = 0; i < song.Channels.Length; i++) { if ((channelMask & (1 << i)) == 0) { continue; } var pattern = song.Channels[i].PatternInstances[0]; var state = new VideoChannelState(); state.videoChannelIndex = channelIndex; state.songChannelIndex = i; state.channel = song.Channels[i]; state.patternIndex = 0; state.channelText = state.channel.Name + (state.channel.IsExpansionChannel ? $" ({song.Project.ExpansionAudioShortName})" : ""); state.wav = new WavPlayer(SampleRate, 1, 1 << i).GetSongSamples(song, song.Project.PalMode, -1); state.graphics = RenderGraphics.Create(channelResX, channelResY, false); state.bitmap = videoGraphics.CreateBitmapFromOffscreenGraphics(state.graphics); channelStates.Add(state); channelIndex++; // Find maximum absolute value to rescale the waveform. foreach (short s in state.wav) { maxAbsSample = Math.Max(maxAbsSample, Math.Abs(s)); } // Measure the longest text. longestChannelName = Math.Max(longestChannelName, state.graphics.MeasureString(state.channelText, ThemeBase.FontBigUnscaled)); } // Tweak some cosmetic stuff that depends on resolution. var smallChannelText = longestChannelName + 32 + ChannelIconTextSpacing > channelResY * 0.8f; var bmpSuffix = smallChannelText ? "" : "@2x"; var font = smallChannelText ? ThemeBase.FontMediumUnscaled : ThemeBase.FontBigUnscaled; var textOffsetY = smallChannelText ? 1 : 4; var pianoRollScaleX = Utils.Clamp(resY / 1080.0f, 0.6f, 0.9f); var pianoRollScaleY = channelResY < VeryThinNoteThreshold ? 0.5f : (channelResY < ThinNoteThreshold ? 0.667f : 1.0f); var channelLineWidth = channelResY < ThinNoteThreshold ? 3 : 5; var gradientSizeY = 256 * (videoResY / 1080.0f); var gradientBrush = videoGraphics.CreateVerticalGradientBrush(0, gradientSizeY, Color.Black, Color.FromArgb(0, Color.Black)); foreach (var s in channelStates) { s.bmpIcon = videoGraphics.CreateBitmapFromResource(ChannelType.Icons[s.channel.Type] + bmpSuffix); } // Generate the metadata for the video so we know what's happening at every frame var metadata = new VideoMetadataPlayer(SampleRate, 1).GetVideoMetadata(song, song.Project.PalMode, -1); var oscScale = maxAbsSample != 0 ? short.MaxValue / (float)maxAbsSample : 1.0f; var oscLookback = (metadata[1].wavOffset - metadata[0].wavOffset) / 2; #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS var dummyControl = new DummyGLControl(); dummyControl.Move(0, 0, videoResX, videoResY); #endif // Setup piano roll and images. var pianoRoll = new PianoRoll(); #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS pianoRoll.Move(0, 0, channelResX, channelResY); #else pianoRoll.Width = channelResX; pianoRoll.Height = channelResY; #endif pianoRoll.StartVideoRecording(channelStates[0].graphics, song, pianoRollZoom, pianoRollScaleX, pianoRollScaleY, out var noteSizeY); // Build the scrolling data. var numVisibleNotes = (int)Math.Floor(channelResY / (float)noteSizeY); ComputeChannelsScroll(metadata, channelMask, numVisibleNotes); if (song.UsesFamiTrackerTempo) { SmoothFamitrackerScrolling(metadata); } else { SmoothFamiStudioScrolling(metadata, song); } var videoImage = new byte[videoResY * videoResX * 4]; var oscilloscope = new float[channelResY, 2]; // Start ffmpeg with pipe input. var tempFolder = Utils.GetTemporaryDiretory(); var tempAudioFile = Path.Combine(tempFolder, "temp.wav"); #if !DEBUG try #endif { Log.LogMessage(LogSeverity.Info, "Exporting audio..."); // Save audio to temporary file. WavMp3ExportUtils.Save(song, tempAudioFile, SampleRate, 1, -1, channelMask, false, false, (samples, fn) => { WaveFile.Save(samples, fn, SampleRate); }); var process = LaunchFFmpeg(ffmpegExecutable, $"-y -f rawvideo -pix_fmt argb -s {videoResX}x{videoResY} -r {frameRate} -i - -i \"{tempAudioFile}\" -c:v h264 -pix_fmt yuv420p -b:v {videoBitRate}K -c:a aac -b:a {audioBitRate}k \"{filename}\"", true, false); // Generate each of the video frames. using (var stream = new BinaryWriter(process.StandardInput.BaseStream)) { for (int f = 0; f < metadata.Length; f++) { if (Log.ShouldAbortOperation) { break; } if ((f % 100) == 0) { Log.LogMessage(LogSeverity.Info, $"Rendering frame {f} / {metadata.Length}"); } Log.ReportProgress(f / (float)(metadata.Length - 1)); if (halfFrameRate && (f & 1) != 0) { continue; } var frame = metadata[f]; // Render the piano rolls for each channels. foreach (var s in channelStates) { s.volume = frame.channelVolumes[s.songChannelIndex]; s.note = frame.channelNotes[s.songChannelIndex]; var color = Color.Transparent; if (s.note.IsMusical) { if (s.channel.Type == ChannelType.Dpcm) { var mapping = project.GetDPCMMapping(s.note.Value); if (mapping != null && mapping.Sample != null) { color = mapping.Sample.Color; } } else { color = Color.FromArgb(128 + s.volume * 127 / 15, s.note.Instrument != null ? s.note.Instrument.Color : ThemeBase.DarkGreyFillColor2); } } #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS s.graphics.BeginDraw(pianoRoll, channelResY); #else s.graphics.BeginDraw(); #endif pianoRoll.RenderVideoFrame(s.graphics, Channel.ChannelTypeToIndex(s.channel.Type), frame.playPattern, frame.playNote, frame.scroll[s.songChannelIndex], s.note.Value, color); s.graphics.EndDraw(); } // Render the full screen overlay. #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS videoGraphics.BeginDraw(dummyControl, videoResY); #else videoGraphics.BeginDraw(); #endif videoGraphics.Clear(Color.Black); // Composite the channel renders. foreach (var s in channelStates) { int channelPosX1 = (int)Math.Round((s.videoChannelIndex + 1) * channelResXFloat); videoGraphics.DrawRotatedFlippedBitmap(s.bitmap, channelPosX1, videoResY, s.bitmap.Size.Width, s.bitmap.Size.Height); } // Gradient videoGraphics.FillRectangle(0, 0, videoResX, gradientSizeY, gradientBrush); // Channel names + oscilloscope foreach (var s in channelStates) { int channelPosX0 = (int)Math.Round((s.videoChannelIndex + 0) * channelResXFloat); int channelPosX1 = (int)Math.Round((s.videoChannelIndex + 1) * channelResXFloat); var channelNameSizeX = videoGraphics.MeasureString(s.channelText, font); var channelIconPosX = channelPosX0 + channelResY / 2 - (channelNameSizeX + s.bmpIcon.Size.Width + ChannelIconTextSpacing) / 2; videoGraphics.FillRectangle(channelIconPosX, ChannelIconPosY, channelIconPosX + s.bmpIcon.Size.Width, ChannelIconPosY + s.bmpIcon.Size.Height, theme.DarkGreyLineBrush2); videoGraphics.DrawBitmap(s.bmpIcon, channelIconPosX, ChannelIconPosY); videoGraphics.DrawText(s.channelText, font, channelIconPosX + s.bmpIcon.Size.Width + ChannelIconTextSpacing, ChannelIconPosY + textOffsetY, theme.LightGreyFillBrush1); if (s.videoChannelIndex > 0) { videoGraphics.DrawLine(channelPosX0, 0, channelPosX0, videoResY, theme.BlackBrush, channelLineWidth); } var oscMinY = (int)(ChannelIconPosY + s.bmpIcon.Size.Height + 10); var oscMaxY = (int)(oscMinY + 100.0f * (resY / 1080.0f)); GenerateOscilloscope(s.wav, frame.wavOffset, (int)Math.Round(SampleRate * OscilloscopeWindowSize), oscLookback, oscScale, channelPosX0 + 10, oscMinY, channelPosX1 - 10, oscMaxY, oscilloscope); videoGraphics.AntiAliasing = true; videoGraphics.DrawLine(oscilloscope, theme.LightGreyFillBrush1); videoGraphics.AntiAliasing = false; } // Watermark. videoGraphics.DrawBitmap(bmpWatermark, videoResX - bmpWatermark.Size.Width, videoResY - bmpWatermark.Size.Height); videoGraphics.EndDraw(); // Readback + send to ffmpeg. videoGraphics.GetBitmap(videoImage); stream.Write(videoImage); // Dump debug images. // DumpDebugImage(videoImage, videoResX, videoResY, f); } } process.WaitForExit(); process.Dispose(); process = null; File.Delete(tempAudioFile); } #if !DEBUG catch (Exception e) { Log.LogMessage(LogSeverity.Error, "Error exporting video."); Log.LogMessage(LogSeverity.Error, e.Message); } finally #endif { pianoRoll.EndVideoRecording(); foreach (var c in channelStates) { c.bmpIcon.Dispose(); c.bitmap.Dispose(); c.graphics.Dispose(); } theme.Terminate(); bmpWatermark.Dispose(); gradientBrush.Dispose(); videoGraphics.Dispose(); } return(true); }