// Some OpenGL implementation applies sRGB to the alpha channel which is super // wrong. We can detect that assuming the gradient we draw should be at 50% // opacity in the middle. bool DetectBadOpenGLAlpha(GLControl ctrl, RenderGraphics g, byte[] videoImage) { var blackGradientBrush = g.CreateVerticalGradientBrush(0, 256, Color.FromArgb(255, 0, 0, 0), Color.FromArgb(0, 0, 0, 0)); g.BeginDraw(ctrl, videoResY); g.Clear(Color.FromArgb(0, 0, 0, 0)); g.FillRectangle(0, 0, videoResX, 256, blackGradientBrush); g.EndDraw(); g.GetBitmap(videoImage); blackGradientBrush.Dispose(); float midGradientAlpha = videoImage[128 * videoResX * 4 + 3] / 255.0f; float diff = Math.Abs(midGradientAlpha - 0.5f); return(diff > 0.05f); }
public unsafe bool Save(Project originalProject, int songId, int loopCount, string ffmpegExecutable, string filename, int channelMask, int audioBitRate, int videoBitRate, int pianoRollZoom, bool thinNotes) { if (channelMask == 0 || loopCount < 1) { return(false); } Log.LogMessage(LogSeverity.Info, "Detecting FFmpeg..."); if (!DetectFFmpeg(ffmpegExecutable)) { return(false); } var project = originalProject.DeepClone(); var song = project.GetSong(songId); ExtendSongForLooping(song, loopCount); Log.LogMessage(LogSeverity.Info, "Initializing channels..."); var frameRate = song.Project.PalMode ? "5000773/100000" : "6009883/100000"; var numChannels = Utils.NumberOfSetBits(channelMask); var channelResXFloat = videoResX / (float)numChannels; var channelResX = videoResY; var channelResY = (int)channelResXFloat; var channelGraphics = new RenderGraphics(channelResX, channelResY); var videoGraphics = new RenderGraphics(videoResX, videoResY); var theme = RenderTheme.CreateResourcesForGraphics(videoGraphics); var bmpWatermark = videoGraphics.CreateBitmapFromResource("VideoWatermark"); // Generate WAV data for each individual channel for the oscilloscope. var channelStates = new List <VideoChannelState>(); List <short[]> channelsWavData = new List <short[]>(); var maxAbsSample = 0; for (int i = 0, channelIndex = 0; i < song.Channels.Length; i++) { if ((channelMask & (1 << i)) == 0) { continue; } var pattern = song.Channels[i].PatternInstances[0]; var state = new VideoChannelState(); state.videoChannelIndex = channelIndex; state.songChannelIndex = i; state.channel = song.Channels[i]; state.patternIndex = 0; state.channelText = state.channel.Name + (state.channel.IsExpansionChannel ? $" ({song.Project.ExpansionAudioShortName})" : ""); state.bmp = videoGraphics.CreateBitmapFromResource(Channel.ChannelIcons[song.Channels[i].Type] + "@2x"); // HACK: Grab the 200% scaled version directly. state.wav = new WavPlayer(sampleRate, 1, 1 << i).GetSongSamples(song, song.Project.PalMode, -1); channelStates.Add(state); channelIndex++; // Find maximum absolute value to rescale the waveform. foreach (short s in state.wav) { maxAbsSample = Math.Max(maxAbsSample, Math.Abs(s)); } } // Generate the metadata for the video so we know what's happening at every frame var metadata = new VideoMetadataPlayer(sampleRate, 1).GetVideoMetadata(song, song.Project.PalMode, -1); var oscScale = maxAbsSample != 0 ? short.MaxValue / (float)maxAbsSample : 1.0f; var oscLookback = (metadata[1].wavOffset - metadata[0].wavOffset) / 2; #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS var dummyControl = new DummyGLControl(); dummyControl.Move(0, 0, videoResX, videoResY); #endif // Setup piano roll and images. var pianoRoll = new PianoRoll(); #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS pianoRoll.Move(0, 0, channelResX, channelResY); #else pianoRoll.Width = channelResX; pianoRoll.Height = channelResY; #endif pianoRoll.StartVideoRecording(channelGraphics, song, pianoRollZoom, thinNotes, out var noteSizeY); // Build the scrolling data. var numVisibleNotes = (int)Math.Floor(channelResY / (float)noteSizeY); ComputeChannelsScroll(metadata, channelMask, numVisibleNotes); if (song.UsesFamiTrackerTempo) { SmoothFamiTrackerTempo(metadata); } var videoImage = new byte[videoResY * videoResX * 4]; var channelImage = new byte[channelResY * channelResX * 4]; var oscilloscope = new float[channelResY, 2]; #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS var badAlpha = DetectBadOpenGLAlpha(dummyControl, videoGraphics, videoImage); #endif // Start ffmpeg with pipe input. var tempFolder = Utils.GetTemporaryDiretory(); var tempVideoFile = Path.Combine(tempFolder, "temp.h264"); var tempAudioFile = Path.Combine(tempFolder, "temp.wav"); try { var process = LaunchFFmpeg(ffmpegExecutable, $"-y -f rawvideo -pix_fmt argb -s {videoResX}x{videoResY} -r {frameRate} -i - -c:v libx264 -pix_fmt yuv420p -b:v {videoBitRate}M -an \"{tempVideoFile}\"", true, false); // Generate each of the video frames. using (var stream = new BinaryWriter(process.StandardInput.BaseStream)) { for (int f = 0; f < metadata.Length; f++) { if (Log.ShouldAbortOperation) { break; } if ((f % 100) == 0) { Log.LogMessage(LogSeverity.Info, $"Rendering frame {f} / {metadata.Length}"); } Log.ReportProgress(f / (float)(metadata.Length - 1)); var frame = metadata[f]; // Render the full screen overlay. #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS videoGraphics.BeginDraw(dummyControl, videoResY); #else videoGraphics.BeginDraw(); #endif videoGraphics.Clear(Color.FromArgb(0, 0, 0, 0)); foreach (var s in channelStates) { int channelPosX0 = (int)Math.Round((s.videoChannelIndex + 0) * channelResXFloat); int channelPosX1 = (int)Math.Round((s.videoChannelIndex + 1) * channelResXFloat); var channelNameSizeX = videoGraphics.MeasureString(s.channelText, ThemeBase.FontBigUnscaled); var channelIconPosX = channelPosX0 + channelResY / 2 - (channelNameSizeX + s.bmp.Size.Width + channelIconTextSpacing) / 2; videoGraphics.FillRectangle(channelIconPosX, channelIconPosY, channelIconPosX + s.bmp.Size.Width, channelIconPosY + s.bmp.Size.Height, theme.LightGreyFillBrush1); videoGraphics.DrawBitmap(s.bmp, channelIconPosX, channelIconPosY); videoGraphics.DrawText(s.channelText, ThemeBase.FontBigUnscaled, channelIconPosX + s.bmp.Size.Width + channelIconTextSpacing, channelTextPosY, theme.LightGreyFillBrush1); if (s.videoChannelIndex > 0) { videoGraphics.DrawLine(channelPosX0, 0, channelPosX0, videoResY, theme.BlackBrush, 5); } GenerateOscilloscope(s.wav, frame.wavOffset, (int)Math.Round(sampleRate * oscilloscopeWindowSize), oscLookback, oscScale, channelPosX0 + 10, 60, channelPosX1 - 10, 160, oscilloscope); videoGraphics.AntiAliasing = true; videoGraphics.DrawLine(oscilloscope, theme.LightGreyFillBrush1); videoGraphics.AntiAliasing = false; } videoGraphics.DrawBitmap(bmpWatermark, videoResX - bmpWatermark.Size.Width, videoResY - bmpWatermark.Size.Height); videoGraphics.EndDraw(); videoGraphics.GetBitmap(videoImage); // Render the piano rolls for each channels. foreach (var s in channelStates) { s.volume = frame.channelVolumes[s.songChannelIndex]; s.note = frame.channelNotes[s.songChannelIndex]; var color = Color.Transparent; if (s.note.IsMusical) { if (s.channel.Type == Channel.Dpcm) { color = Color.FromArgb(210, ThemeBase.MediumGreyFillColor1); } else { color = Color.FromArgb(128 + s.volume * 127 / 15, s.note.Instrument != null ? s.note.Instrument.Color : ThemeBase.DarkGreyFillColor2); } } #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS channelGraphics.BeginDraw(pianoRoll, channelResY); #else channelGraphics.BeginDraw(); #endif pianoRoll.RenderVideoFrame(channelGraphics, Channel.ChannelTypeToIndex(s.channel.Type), frame.playPattern, frame.playNote, frame.scroll[s.songChannelIndex], s.note.Value, color); channelGraphics.EndDraw(); channelGraphics.GetBitmap(channelImage); // Composite the channel image with the full screen video overlay on the CPU. int channelPosX = (int)Math.Round(s.videoChannelIndex * channelResXFloat); int channelPosY = 0; for (int y = 0; y < channelResY; y++) { for (int x = 0; x < channelResX; x++) { int videoIdx = (channelPosY + x) * videoResX * 4 + (channelPosX + y) * 4; int channelIdx = (channelResY - y - 1) * channelResX * 4 + (channelResX - x - 1) * 4; byte videoA = videoImage[videoIdx + 3]; byte gradientA = (byte)(x < 255 ? 255 - x : 0); // Doing the gradient on CPU to look same on GL/D2D. byte channelR = channelImage[channelIdx + 0]; byte channelG = channelImage[channelIdx + 1]; byte channelB = channelImage[channelIdx + 2]; if (videoA != 0 || gradientA != 0) { #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS // Fix bad sRGB alpha. if (badAlpha) { videoA = SRGBToLinear[videoA]; } #endif videoA = Math.Max(videoA, gradientA); int videoR = videoImage[videoIdx + 0]; int videoG = videoImage[videoIdx + 1]; int videoB = videoImage[videoIdx + 2]; // Integer alpha blend. // Note that alpha is pre-multiplied, so we if we multiply again, image will look aliased. channelR = (byte)((channelR * (255 - videoA) + videoR * 255 /*videoA*/) >> 8); channelG = (byte)((channelG * (255 - videoA) + videoG * 255 /*videoA*/) >> 8); channelB = (byte)((channelB * (255 - videoA) + videoB * 255 /*videoA*/) >> 8); } // We byteswap here to match what ffmpeg expect. videoImage[videoIdx + 3] = channelR; videoImage[videoIdx + 2] = channelG; videoImage[videoIdx + 1] = channelB; videoImage[videoIdx + 0] = 255; // To export images to debug. //videoImage[videoIdx + 0] = channelR; //videoImage[videoIdx + 1] = channelG; //videoImage[videoIdx + 2] = channelB; //videoImage[videoIdx + 3] = 255; } } var prevChannelEndPosX = (int)Math.Round((s.videoChannelIndex - 1) * channelResXFloat) + channelResY; // HACK: Since we round the channels positions, we can end up with columns of pixels that arent byteswapped. if (s.videoChannelIndex > 0 && channelPosX != prevChannelEndPosX) { for (int y = 0; y < videoResY; y++) { int videoIdx = y * videoResX * 4 + (channelPosX - 1) * 4; byte videoR = videoImage[videoIdx + 0]; byte videoG = videoImage[videoIdx + 1]; byte videoB = videoImage[videoIdx + 2]; videoImage[videoIdx + 3] = videoR; videoImage[videoIdx + 2] = videoG; videoImage[videoIdx + 1] = videoB; videoImage[videoIdx + 0] = 255; } } } stream.Write(videoImage); // Dump debug images. #if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS //var pb = new Gdk.Pixbuf(channelImage, true, 8, channelResX, channelResY, channelResX * 4); //pb.Save($"/home/mat/Downloads/channel.png", "png"); //var pb = new Gdk.Pixbuf(videoImage, true, 8, videoResX, videoResY, videoResX * 4); //pb.Save($"/home/mat/Downloads/frame_{f:D4}.png", "png"); #else //fixed (byte* vp = &videoImage[0]) //{ // var b = new System.Drawing.Bitmap(videoResX, videoResY, videoResX * 4, System.Drawing.Imaging.PixelFormat.Format32bppArgb, new IntPtr(vp)); // b.Save($"d:\\dump\\pr\\frame_{f:D4}.png"); //} #endif } } process.WaitForExit(); process.Dispose(); process = null; Log.LogMessage(LogSeverity.Info, "Exporting audio..."); // Save audio to temporary file. WaveFile.Save(song, tempAudioFile, sampleRate, 1, -1, channelMask); Log.LogMessage(LogSeverity.Info, "Mixing audio and video..."); // Run ffmpeg again to combine audio + video. process = LaunchFFmpeg(ffmpegExecutable, $"-y -i \"{tempVideoFile}\" -i \"{tempAudioFile}\" -c:v copy -c:a aac -b:a {audioBitRate}k \"{filename}\"", false, false); process.WaitForExit(); process.Dispose(); process = null; File.Delete(tempAudioFile); File.Delete(tempVideoFile); } catch (Exception e) { Log.LogMessage(LogSeverity.Error, "Error exporting video."); Log.LogMessage(LogSeverity.Error, e.Message); } finally { pianoRoll.EndVideoRecording(); foreach (var c in channelStates) { c.bmp.Dispose(); } theme.Terminate(); bmpWatermark.Dispose(); channelGraphics.Dispose(); videoGraphics.Dispose(); } return(true); }