public void AddPianoRollItem(SongEvent e, float futureTick)
    {
        Vector3[] keyWidth = PianoDescriptor.getPianoDescriptor().getKeyWidth(e.KeyID, false);
        float     girth    = .5f * (keyWidth[0] - keyWidth[1]).magnitude;
        float     duration = Note.GetDuration(e.NoteType);
        float     length   = PIXELPERBEAT * duration;

        float center  = e.EndPoint - Timing.CurrentMeasure - (duration / 2f);
        float z_depth = center * PIXELPERBEAT;

        Vector3 midpoint = ((keyWidth[0] + keyWidth[1]) / 2f);
        float   x_center = midpoint.x;
        float   y_center = midpoint.y;

        GameObject cube = GameObject.CreatePrimitive(PrimitiveType.Cube);

        cube.GetComponent <MeshRenderer>().sharedMaterial = PianoRollMatRef;
        cube.name = "rolling" + e.KeyID.ToString().PadLeft('0');
        cube.tag  = "PianoRoll";

        PianoRoll r = cube.AddComponent <PianoRoll>();

        r.e          = e;
        r.endPoint   = e.EndPoint;
        r.duration   = duration;
        r.startPoint = e.StartPoint;

        cube.transform.parent        = pianoRollContainer.transform;
        cube.transform.localPosition = new Vector3(x_center, y_center, z_depth);
        cube.transform.localScale    = new Vector3(girth, 0.01f, length);
        cube.transform.rotation      = GameObject.Find("MainApp").gameObject.transform.rotation;
    }
Пример #2
0
        public STT()
        {
            SDL.SDL_Init(SDL.SDL_INIT_EVERYTHING);
            window           = new Window();
            running          = true;
            desired.freq     = 44100;
            desired.samples  = 4096;
            desired.channels = 1;
            desired.format   = SDL.AUDIO_F32;
            desired.callback = Callback;
            device           = SDL.SDL_OpenAudioDevice(null, 0, ref desired, out var obtained, (int)SDL.SDL_AUDIO_ALLOW_ANY_CHANGE);
            SDL.SDL_PauseAudioDevice(device, 0);
            activeGen = 0;
            gens      = new List <Generator>();
            gens.Add(new SawGenerator());
            gens.Add(new SquareGenerator());
            gens.Add(new Organ());
            gens.Add(new SineGenerator());
            PianoRoll = new PianoRoll();

            PacketQueue = new ConcurrentQueue <byte[]>();

            Client = new UdpClient(25570);
            Console.WriteLine("Enter the other IP address:");
            string ip = Console.ReadLine();

            Client.Connect(ip, 25570);

            Client.BeginReceive(new AsyncCallback(onRecv), Client);
        }
Пример #3
0
 private void draw(double delta)
 {
     Renderer.Instance.SetDrawColor(23, 23, 23, 255);
     Renderer.Instance.Clear();
     PianoRoll.Draw();
     Renderer.Instance.Present();
 }
Пример #4
0
        public FamiStudioControls(FamiStudioForm parent)
        {
            toolbar         = new Toolbar();
            sequencer       = new Sequencer();
            pianoRoll       = new PianoRoll();
            projectExplorer = new ProjectExplorer();

            controls[ControlToolbar]         = toolbar;
            controls[ControlSequencer]       = sequencer;
            controls[ControlPianoRoll]       = pianoRoll;
            controls[ControlProjectExplorer] = projectExplorer;

            foreach (var ctrl in controls)
            {
                ctrl.ParentForm = parent;
            }
        }
Пример #5
0
        public FamiStudioControls(FamiStudioForm parent)
        {
            toolbar         = new Toolbar();
            sequencer       = new Sequencer();
            pianoRoll       = new PianoRoll();
            projectExplorer = new ProjectExplorer();
            quickAccessBar  = new QuickAccessBar();
            mobilePiano     = new MobilePiano();

            controls[0] = toolbar;
            controls[1] = sequencer;
            controls[2] = pianoRoll;
            controls[3] = projectExplorer;

            foreach (var ctrl in controls)
            {
                ctrl.ParentForm = parent;
            }
        }
Пример #6
0
        public unsafe bool Save(Project originalProject, int songId, int loopCount, string filename, int resX, int resY, bool halfFrameRate, int channelMask, int audioBitRate, int videoBitRate, float pianoRollZoom, bool stereo, float[] pan)
        {
            if (!Initialize(channelMask, loopCount))
            {
                return(false);
            }

            videoResX = resX;
            videoResY = resY;

            var project = originalProject.DeepClone();
            var song    = project.GetSong(songId);

            ExtendSongForLooping(song, loopCount);

            // Save audio to temporary file.
            Log.LogMessage(LogSeverity.Info, "Exporting audio...");

            var tempFolder    = Utils.GetTemporaryDiretory();
            var tempAudioFile = Path.Combine(tempFolder, "temp.wav");

            AudioExportUtils.Save(song, tempAudioFile, SampleRate, 1, -1, channelMask, false, false, stereo, pan, (samples, samplesChannels, fn) => { WaveFile.Save(samples, fn, SampleRate, samplesChannels); });

            // Start encoder, must be done before any GL calls on Android.
            GetFrameRateInfo(song.Project, halfFrameRate, out var frameRateNumer, out var frameRateDenom);

            if (!videoEncoder.BeginEncoding(videoResX, videoResY, frameRateNumer, frameRateDenom, videoBitRate, audioBitRate, tempAudioFile, filename))
            {
                Log.LogMessage(LogSeverity.Error, "Error starting video encoder, aborting.");
                return(false);
            }

            Log.LogMessage(LogSeverity.Info, "Initializing channels...");

            var numChannels        = Utils.NumberOfSetBits(channelMask);
            var channelResXFloat   = videoResX / (float)numChannels;
            var channelResX        = videoResY;
            var channelResY        = (int)channelResXFloat;
            var longestChannelName = 0.0f;

            var videoGraphics = RenderGraphics.Create(videoResX, videoResY, true);

            if (videoGraphics == null)
            {
                Log.LogMessage(LogSeverity.Error, "Error initializing off-screen graphics, aborting.");
                return(false);
            }

            var themeResources = new ThemeRenderResources(videoGraphics);
            var bmpWatermark   = videoGraphics.CreateBitmapFromResource("VideoWatermark");

            // Generate WAV data for each individual channel for the oscilloscope.
            var channelStates = new List <VideoChannelState>();
            var maxAbsSample  = 0;

            for (int i = 0, channelIndex = 0; i < song.Channels.Length; i++)
            {
                if ((channelMask & (1 << i)) == 0)
                {
                    continue;
                }

                var pattern = song.Channels[i].PatternInstances[0];
                var state   = new VideoChannelState();

                state.videoChannelIndex = channelIndex;
                state.songChannelIndex  = i;
                state.channel           = song.Channels[i];
                state.channelText       = state.channel.NameWithExpansion;
                state.wav      = new WavPlayer(SampleRate, 1, 1 << i).GetSongSamples(song, song.Project.PalMode, -1);
                state.graphics = RenderGraphics.Create(channelResX, channelResY, false);
                state.bitmap   = videoGraphics.CreateBitmapFromOffscreenGraphics(state.graphics);

                channelStates.Add(state);
                channelIndex++;

                // Find maximum absolute value to rescale the waveform.
                foreach (int s in state.wav)
                {
                    maxAbsSample = Math.Max(maxAbsSample, Math.Abs(s));
                }

                // Measure the longest text.
                longestChannelName = Math.Max(longestChannelName, state.graphics.MeasureString(state.channelText, themeResources.FontVeryLarge));

                Log.ReportProgress(0.0f);
            }

            // Tweak some cosmetic stuff that depends on resolution.
            var smallChannelText = longestChannelName + 32 + ChannelIconTextSpacing > channelResY * 0.8f;
            var bmpSuffix        = smallChannelText ? "" : "@2x";
            var font             = smallChannelText ? themeResources.FontMedium : themeResources.FontVeryLarge;
            var textOffsetY      = smallChannelText ? 1 : 4;
            var pianoRollScaleX  = Utils.Clamp(resY / 1080.0f, 0.6f, 0.9f);
            var pianoRollScaleY  = channelResY < VeryThinNoteThreshold ? 0.5f : (channelResY < ThinNoteThreshold ? 0.667f : 1.0f);
            var channelLineWidth = channelResY < ThinNoteThreshold ? 3 : 5;
            var gradientSizeY    = 256 * (videoResY / 1080.0f);
            var gradientBrush    = videoGraphics.CreateVerticalGradientBrush(0, gradientSizeY, Color.Black, Color.FromArgb(0, Color.Black));

            foreach (var s in channelStates)
            {
                s.bmpIcon = videoGraphics.CreateBitmapFromResource(ChannelType.Icons[s.channel.Type] + bmpSuffix);
            }

            // Generate the metadata for the video so we know what's happening at every frame
            var metadata = new VideoMetadataPlayer(SampleRate, 1).GetVideoMetadata(song, song.Project.PalMode, -1);

            var oscScale    = maxAbsSample != 0 ? short.MaxValue / (float)maxAbsSample : 1.0f;
            var oscLookback = (metadata[1].wavOffset - metadata[0].wavOffset) / 2;

            // Setup piano roll and images.
            var pianoRoll = new PianoRoll();

            pianoRoll.Move(0, 0, channelResX, channelResY);
            pianoRoll.SetThemeRenderResource(themeResources);
            pianoRoll.StartVideoRecording(channelStates[0].graphics, song, pianoRollZoom, pianoRollScaleX, pianoRollScaleY, out var noteSizeY);

            // Build the scrolling data.
            var numVisibleNotes = (int)Math.Floor(channelResY / (float)noteSizeY);

            ComputeChannelsScroll(metadata, channelMask, numVisibleNotes);

            if (song.UsesFamiTrackerTempo)
            {
                SmoothFamitrackerScrolling(metadata);
            }
            else
            {
                SmoothFamiStudioScrolling(metadata, song);
            }

            var oscWindowSize = (int)Math.Round(SampleRate * OscilloscopeWindowSize);

            var videoImage   = new byte[videoResY * videoResX * 4];
            var oscilloscope = new float[oscWindowSize, 2];
            var success      = true;

#if !DEBUG
            try
#endif
            {
                // Generate each of the video frames.
                for (int f = 0; f < metadata.Length; f++)
                {
                    if (Log.ShouldAbortOperation)
                    {
                        success = false;
                        break;
                    }

                    if ((f % 100) == 0)
                    {
                        Log.LogMessage(LogSeverity.Info, $"Rendering frame {f} / {metadata.Length}");
                    }

                    Log.ReportProgress(f / (float)(metadata.Length - 1));

                    if (halfFrameRate && (f & 1) != 0)
                    {
                        continue;
                    }

                    var frame = metadata[f];

                    // Render the piano rolls for each channels.
                    foreach (var s in channelStates)
                    {
                        var volume = frame.channelVolumes[s.songChannelIndex];
                        var note   = frame.channelNotes[s.songChannelIndex];

                        var color = Color.Transparent;

                        if (note.IsMusical)
                        {
                            if (s.channel.Type == ChannelType.Dpcm)
                            {
                                var mapping = project.GetDPCMMapping(note.Value);
                                if (mapping != null)
                                {
                                    color = mapping.Sample.Color;
                                }
                            }
                            else
                            {
                                color = Color.FromArgb(128 + volume * 127 / 15, note.Instrument != null ? note.Instrument.Color : Theme.LightGreyFillColor1);
                            }
                        }

                        s.graphics.BeginDrawFrame();
                        s.graphics.BeginDrawControl(new Rectangle(0, 0, channelResX, channelResY), channelResY);
                        pianoRoll.RenderVideoFrame(s.graphics, s.channel.Index, frame.playPattern, frame.playNote, frame.scroll[s.songChannelIndex], note.Value, color);
                        s.graphics.EndDrawControl();
                        s.graphics.EndDrawFrame();
                    }

                    // Render the full screen overlay.
                    videoGraphics.BeginDrawFrame();
                    videoGraphics.BeginDrawControl(new Rectangle(0, 0, videoResX, videoResY), videoResY);
                    videoGraphics.Clear(Color.Black);

                    var bg = videoGraphics.CreateCommandList();
                    var fg = videoGraphics.CreateCommandList();

                    // Composite the channel renders.
                    foreach (var s in channelStates)
                    {
                        int channelPosX0 = (int)Math.Round(s.videoChannelIndex * channelResXFloat);
                        bg.DrawBitmap(s.bitmap, channelPosX0, 0, s.bitmap.Size.Height, s.bitmap.Size.Width, 1.0f, 0, 0, 1, 1, true);
                    }

                    // Gradient
                    fg.FillRectangle(0, 0, videoResX, gradientSizeY, gradientBrush);

                    // Channel names + oscilloscope
                    foreach (var s in channelStates)
                    {
                        int channelPosX0 = (int)Math.Round((s.videoChannelIndex + 0) * channelResXFloat);
                        int channelPosX1 = (int)Math.Round((s.videoChannelIndex + 1) * channelResXFloat);

                        var channelNameSizeX = (int)videoGraphics.MeasureString(s.channelText, font);
                        var channelIconPosX  = channelPosX0 + channelResY / 2 - (channelNameSizeX + s.bmpIcon.Size.Width + ChannelIconTextSpacing) / 2;

                        fg.FillAndDrawRectangle(channelIconPosX, ChannelIconPosY, channelIconPosX + s.bmpIcon.Size.Width - 1, ChannelIconPosY + s.bmpIcon.Size.Height - 1, themeResources.DarkGreyLineBrush2, themeResources.LightGreyFillBrush1);
                        fg.DrawBitmap(s.bmpIcon, channelIconPosX, ChannelIconPosY, 1, Theme.LightGreyFillColor1);
                        fg.DrawText(s.channelText, font, channelIconPosX + s.bmpIcon.Size.Width + ChannelIconTextSpacing, ChannelIconPosY + textOffsetY, themeResources.LightGreyFillBrush1);

                        if (s.videoChannelIndex > 0)
                        {
                            fg.DrawLine(channelPosX0, 0, channelPosX0, videoResY, themeResources.BlackBrush, channelLineWidth);
                        }

                        var oscMinY = (int)(ChannelIconPosY + s.bmpIcon.Size.Height + 10);
                        var oscMaxY = (int)(oscMinY + 100.0f * (resY / 1080.0f));

                        // Intentionally flipping min/max Y since D3D is upside down compared to how we display waves typically.
                        GenerateOscilloscope(s.wav, frame.wavOffset, oscWindowSize, oscLookback, oscScale, channelPosX0 + 10, oscMaxY, channelPosX1 - 10, oscMinY, oscilloscope);

                        fg.DrawGeometry(oscilloscope, themeResources.LightGreyFillBrush1, 1, true);
                    }

                    // Watermark.
                    fg.DrawBitmap(bmpWatermark, videoResX - bmpWatermark.Size.Width, videoResY - bmpWatermark.Size.Height);

                    videoGraphics.DrawCommandList(bg);
                    videoGraphics.DrawCommandList(fg);
                    videoGraphics.EndDrawControl();
                    videoGraphics.EndDrawFrame();

                    // Readback
                    videoGraphics.GetBitmap(videoImage);

                    // Send to encoder.
                    videoEncoder.AddFrame(videoImage);

                    // Dump debug images.
                    // DumpDebugImage(videoImage, videoResX, videoResY, f);
                }

                videoEncoder.EndEncoding(!success);

                File.Delete(tempAudioFile);
            }
#if !DEBUG
            catch (Exception e)
            {
                Log.LogMessage(LogSeverity.Error, "Error exporting video.");
                Log.LogMessage(LogSeverity.Error, e.Message);
            }
            finally
#endif
            {
                pianoRoll.EndVideoRecording();
                foreach (var c in channelStates)
                {
                    c.bmpIcon.Dispose();
                    c.bitmap.Dispose();
                    c.graphics.Dispose();
                }
                themeResources.Dispose();
                bmpWatermark.Dispose();
                gradientBrush.Dispose();
                videoGraphics.Dispose();
            }

            return(true);
        }
Пример #7
0
        public unsafe bool Save(Project originalProject, int songId, int loopCount, string ffmpegExecutable, string filename, int resX, int resY, bool halfFrameRate, int channelMask, int audioBitRate, int videoBitRate, int pianoRollZoom)
        {
            if (channelMask == 0 || loopCount < 1)
            {
                return(false);
            }

            Log.LogMessage(LogSeverity.Info, "Detecting FFmpeg...");

            if (!DetectFFmpeg(ffmpegExecutable))
            {
                return(false);
            }

            videoResX = resX;
            videoResY = resY;

            var project = originalProject.DeepClone();
            var song    = project.GetSong(songId);

            ExtendSongForLooping(song, loopCount);

            Log.LogMessage(LogSeverity.Info, "Initializing channels...");

            var frameRateNumerator = song.Project.PalMode ? 5000773 : 6009883;

            if (halfFrameRate)
            {
                frameRateNumerator /= 2;
            }
            var frameRate = frameRateNumerator.ToString() + "/100000";

            var numChannels        = Utils.NumberOfSetBits(channelMask);
            var channelResXFloat   = videoResX / (float)numChannels;
            var channelResX        = videoResY;
            var channelResY        = (int)channelResXFloat;
            var longestChannelName = 0.0f;

            var videoGraphics = RenderGraphics.Create(videoResX, videoResY, true);

            if (videoGraphics == null)
            {
                Log.LogMessage(LogSeverity.Error, "Error initializing off-screen graphics, aborting.");
                return(false);
            }

            var theme        = RenderTheme.CreateResourcesForGraphics(videoGraphics);
            var bmpWatermark = videoGraphics.CreateBitmapFromResource("VideoWatermark");

            // Generate WAV data for each individual channel for the oscilloscope.
            var channelStates = new List <VideoChannelState>();

            List <short[]> channelsWavData = new List <short[]>();
            var            maxAbsSample    = 0;

            for (int i = 0, channelIndex = 0; i < song.Channels.Length; i++)
            {
                if ((channelMask & (1 << i)) == 0)
                {
                    continue;
                }

                var pattern = song.Channels[i].PatternInstances[0];
                var state   = new VideoChannelState();

                state.videoChannelIndex = channelIndex;
                state.songChannelIndex  = i;
                state.channel           = song.Channels[i];
                state.patternIndex      = 0;
                state.channelText       = state.channel.Name + (state.channel.IsExpansionChannel ? $" ({song.Project.ExpansionAudioShortName})" : "");
                state.wav      = new WavPlayer(SampleRate, 1, 1 << i).GetSongSamples(song, song.Project.PalMode, -1);
                state.graphics = RenderGraphics.Create(channelResX, channelResY, false);
                state.bitmap   = videoGraphics.CreateBitmapFromOffscreenGraphics(state.graphics);

                channelStates.Add(state);
                channelIndex++;

                // Find maximum absolute value to rescale the waveform.
                foreach (short s in state.wav)
                {
                    maxAbsSample = Math.Max(maxAbsSample, Math.Abs(s));
                }

                // Measure the longest text.
                longestChannelName = Math.Max(longestChannelName, state.graphics.MeasureString(state.channelText, ThemeBase.FontBigUnscaled));
            }

            // Tweak some cosmetic stuff that depends on resolution.
            var smallChannelText = longestChannelName + 32 + ChannelIconTextSpacing > channelResY * 0.8f;
            var bmpSuffix        = smallChannelText ? "" : "@2x";
            var font             = smallChannelText ? ThemeBase.FontMediumUnscaled : ThemeBase.FontBigUnscaled;
            var textOffsetY      = smallChannelText ? 1 : 4;
            var pianoRollScaleX  = Utils.Clamp(resY / 1080.0f, 0.6f, 0.9f);
            var pianoRollScaleY  = channelResY < VeryThinNoteThreshold ? 0.5f : (channelResY < ThinNoteThreshold ? 0.667f : 1.0f);
            var channelLineWidth = channelResY < ThinNoteThreshold ? 3 : 5;
            var gradientSizeY    = 256 * (videoResY / 1080.0f);
            var gradientBrush    = videoGraphics.CreateVerticalGradientBrush(0, gradientSizeY, Color.Black, Color.FromArgb(0, Color.Black));

            foreach (var s in channelStates)
            {
                s.bmpIcon = videoGraphics.CreateBitmapFromResource(ChannelType.Icons[s.channel.Type] + bmpSuffix);
            }

            // Generate the metadata for the video so we know what's happening at every frame
            var metadata = new VideoMetadataPlayer(SampleRate, 1).GetVideoMetadata(song, song.Project.PalMode, -1);

            var oscScale    = maxAbsSample != 0 ? short.MaxValue / (float)maxAbsSample : 1.0f;
            var oscLookback = (metadata[1].wavOffset - metadata[0].wavOffset) / 2;

#if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS
            var dummyControl = new DummyGLControl();
            dummyControl.Move(0, 0, videoResX, videoResY);
#endif

            // Setup piano roll and images.
            var pianoRoll = new PianoRoll();
#if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS
            pianoRoll.Move(0, 0, channelResX, channelResY);
#else
            pianoRoll.Width  = channelResX;
            pianoRoll.Height = channelResY;
#endif

            pianoRoll.StartVideoRecording(channelStates[0].graphics, song, pianoRollZoom, pianoRollScaleX, pianoRollScaleY, out var noteSizeY);

            // Build the scrolling data.
            var numVisibleNotes = (int)Math.Floor(channelResY / (float)noteSizeY);
            ComputeChannelsScroll(metadata, channelMask, numVisibleNotes);

            if (song.UsesFamiTrackerTempo)
            {
                SmoothFamitrackerScrolling(metadata);
            }
            else
            {
                SmoothFamiStudioScrolling(metadata, song);
            }

            var videoImage   = new byte[videoResY * videoResX * 4];
            var oscilloscope = new float[channelResY, 2];

            // Start ffmpeg with pipe input.
            var tempFolder    = Utils.GetTemporaryDiretory();
            var tempAudioFile = Path.Combine(tempFolder, "temp.wav");

#if !DEBUG
            try
#endif
            {
                Log.LogMessage(LogSeverity.Info, "Exporting audio...");

                // Save audio to temporary file.
                WavMp3ExportUtils.Save(song, tempAudioFile, SampleRate, 1, -1, channelMask, false, false, (samples, fn) => { WaveFile.Save(samples, fn, SampleRate); });

                var process = LaunchFFmpeg(ffmpegExecutable, $"-y -f rawvideo -pix_fmt argb -s {videoResX}x{videoResY} -r {frameRate} -i - -i \"{tempAudioFile}\" -c:v h264 -pix_fmt yuv420p -b:v {videoBitRate}K -c:a aac -b:a {audioBitRate}k \"{filename}\"", true, false);

                // Generate each of the video frames.
                using (var stream = new BinaryWriter(process.StandardInput.BaseStream))
                {
                    for (int f = 0; f < metadata.Length; f++)
                    {
                        if (Log.ShouldAbortOperation)
                        {
                            break;
                        }

                        if ((f % 100) == 0)
                        {
                            Log.LogMessage(LogSeverity.Info, $"Rendering frame {f} / {metadata.Length}");
                        }

                        Log.ReportProgress(f / (float)(metadata.Length - 1));

                        if (halfFrameRate && (f & 1) != 0)
                        {
                            continue;
                        }

                        var frame = metadata[f];

                        // Render the piano rolls for each channels.
                        foreach (var s in channelStates)
                        {
                            s.volume = frame.channelVolumes[s.songChannelIndex];
                            s.note   = frame.channelNotes[s.songChannelIndex];

                            var color = Color.Transparent;

                            if (s.note.IsMusical)
                            {
                                if (s.channel.Type == ChannelType.Dpcm)
                                {
                                    var mapping = project.GetDPCMMapping(s.note.Value);
                                    if (mapping != null && mapping.Sample != null)
                                    {
                                        color = mapping.Sample.Color;
                                    }
                                }
                                else
                                {
                                    color = Color.FromArgb(128 + s.volume * 127 / 15, s.note.Instrument != null ? s.note.Instrument.Color : ThemeBase.DarkGreyFillColor2);
                                }
                            }

#if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS
                            s.graphics.BeginDraw(pianoRoll, channelResY);
#else
                            s.graphics.BeginDraw();
#endif
                            pianoRoll.RenderVideoFrame(s.graphics, Channel.ChannelTypeToIndex(s.channel.Type), frame.playPattern, frame.playNote, frame.scroll[s.songChannelIndex], s.note.Value, color);
                            s.graphics.EndDraw();
                        }

                        // Render the full screen overlay.
#if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS
                        videoGraphics.BeginDraw(dummyControl, videoResY);
#else
                        videoGraphics.BeginDraw();
#endif
                        videoGraphics.Clear(Color.Black);

                        // Composite the channel renders.
                        foreach (var s in channelStates)
                        {
                            int channelPosX1 = (int)Math.Round((s.videoChannelIndex + 1) * channelResXFloat);
                            videoGraphics.DrawRotatedFlippedBitmap(s.bitmap, channelPosX1, videoResY, s.bitmap.Size.Width, s.bitmap.Size.Height);
                        }

                        // Gradient
                        videoGraphics.FillRectangle(0, 0, videoResX, gradientSizeY, gradientBrush);

                        // Channel names + oscilloscope
                        foreach (var s in channelStates)
                        {
                            int channelPosX0 = (int)Math.Round((s.videoChannelIndex + 0) * channelResXFloat);
                            int channelPosX1 = (int)Math.Round((s.videoChannelIndex + 1) * channelResXFloat);

                            var channelNameSizeX = videoGraphics.MeasureString(s.channelText, font);
                            var channelIconPosX  = channelPosX0 + channelResY / 2 - (channelNameSizeX + s.bmpIcon.Size.Width + ChannelIconTextSpacing) / 2;

                            videoGraphics.FillRectangle(channelIconPosX, ChannelIconPosY, channelIconPosX + s.bmpIcon.Size.Width, ChannelIconPosY + s.bmpIcon.Size.Height, theme.DarkGreyLineBrush2);
                            videoGraphics.DrawBitmap(s.bmpIcon, channelIconPosX, ChannelIconPosY);
                            videoGraphics.DrawText(s.channelText, font, channelIconPosX + s.bmpIcon.Size.Width + ChannelIconTextSpacing, ChannelIconPosY + textOffsetY, theme.LightGreyFillBrush1);

                            if (s.videoChannelIndex > 0)
                            {
                                videoGraphics.DrawLine(channelPosX0, 0, channelPosX0, videoResY, theme.BlackBrush, channelLineWidth);
                            }

                            var oscMinY = (int)(ChannelIconPosY + s.bmpIcon.Size.Height + 10);
                            var oscMaxY = (int)(oscMinY + 100.0f * (resY / 1080.0f));

                            GenerateOscilloscope(s.wav, frame.wavOffset, (int)Math.Round(SampleRate * OscilloscopeWindowSize), oscLookback, oscScale, channelPosX0 + 10, oscMinY, channelPosX1 - 10, oscMaxY, oscilloscope);

                            videoGraphics.AntiAliasing = true;
                            videoGraphics.DrawLine(oscilloscope, theme.LightGreyFillBrush1);
                            videoGraphics.AntiAliasing = false;
                        }

                        // Watermark.
                        videoGraphics.DrawBitmap(bmpWatermark, videoResX - bmpWatermark.Size.Width, videoResY - bmpWatermark.Size.Height);
                        videoGraphics.EndDraw();

                        // Readback + send to ffmpeg.
                        videoGraphics.GetBitmap(videoImage);
                        stream.Write(videoImage);

                        // Dump debug images.
                        // DumpDebugImage(videoImage, videoResX, videoResY, f);
                    }
                }

                process.WaitForExit();
                process.Dispose();
                process = null;

                File.Delete(tempAudioFile);
            }
#if !DEBUG
            catch (Exception e)
            {
                Log.LogMessage(LogSeverity.Error, "Error exporting video.");
                Log.LogMessage(LogSeverity.Error, e.Message);
            }
            finally
#endif
            {
                pianoRoll.EndVideoRecording();
                foreach (var c in channelStates)
                {
                    c.bmpIcon.Dispose();
                    c.bitmap.Dispose();
                    c.graphics.Dispose();
                }
                theme.Terminate();
                bmpWatermark.Dispose();
                gradientBrush.Dispose();
                videoGraphics.Dispose();
            }

            return(true);
        }
Пример #8
0
        public unsafe bool Save(Project originalProject, int songId, int loopCount, string ffmpegExecutable, string filename, int channelMask, int audioBitRate, int videoBitRate, int pianoRollZoom, bool thinNotes)
        {
            if (channelMask == 0 || loopCount < 1)
            {
                return(false);
            }

            Log.LogMessage(LogSeverity.Info, "Detecting FFmpeg...");

            if (!DetectFFmpeg(ffmpegExecutable))
            {
                return(false);
            }

            var project = originalProject.DeepClone();
            var song    = project.GetSong(songId);

            ExtendSongForLooping(song, loopCount);

            Log.LogMessage(LogSeverity.Info, "Initializing channels...");

            var frameRate        = song.Project.PalMode ? "5000773/100000" : "6009883/100000";
            var numChannels      = Utils.NumberOfSetBits(channelMask);
            var channelResXFloat = videoResX / (float)numChannels;
            var channelResX      = videoResY;
            var channelResY      = (int)channelResXFloat;

            var channelGraphics = new RenderGraphics(channelResX, channelResY);
            var videoGraphics   = new RenderGraphics(videoResX, videoResY);

            var theme        = RenderTheme.CreateResourcesForGraphics(videoGraphics);
            var bmpWatermark = videoGraphics.CreateBitmapFromResource("VideoWatermark");

            // Generate WAV data for each individual channel for the oscilloscope.
            var channelStates = new List <VideoChannelState>();

            List <short[]> channelsWavData = new List <short[]>();
            var            maxAbsSample    = 0;

            for (int i = 0, channelIndex = 0; i < song.Channels.Length; i++)
            {
                if ((channelMask & (1 << i)) == 0)
                {
                    continue;
                }

                var pattern = song.Channels[i].PatternInstances[0];
                var state   = new VideoChannelState();

                state.videoChannelIndex = channelIndex;
                state.songChannelIndex  = i;
                state.channel           = song.Channels[i];
                state.patternIndex      = 0;
                state.channelText       = state.channel.Name + (state.channel.IsExpansionChannel ? $" ({song.Project.ExpansionAudioShortName})" : "");
                state.bmp = videoGraphics.CreateBitmapFromResource(Channel.ChannelIcons[song.Channels[i].Type] + "@2x"); // HACK: Grab the 200% scaled version directly.
                state.wav = new WavPlayer(sampleRate, 1, 1 << i).GetSongSamples(song, song.Project.PalMode, -1);

                channelStates.Add(state);
                channelIndex++;

                // Find maximum absolute value to rescale the waveform.
                foreach (short s in state.wav)
                {
                    maxAbsSample = Math.Max(maxAbsSample, Math.Abs(s));
                }
            }

            // Generate the metadata for the video so we know what's happening at every frame
            var metadata = new VideoMetadataPlayer(sampleRate, 1).GetVideoMetadata(song, song.Project.PalMode, -1);

            var oscScale    = maxAbsSample != 0 ? short.MaxValue / (float)maxAbsSample : 1.0f;
            var oscLookback = (metadata[1].wavOffset - metadata[0].wavOffset) / 2;

#if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS
            var dummyControl = new DummyGLControl();
            dummyControl.Move(0, 0, videoResX, videoResY);
#endif

            // Setup piano roll and images.
            var pianoRoll = new PianoRoll();
#if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS
            pianoRoll.Move(0, 0, channelResX, channelResY);
#else
            pianoRoll.Width  = channelResX;
            pianoRoll.Height = channelResY;
#endif
            pianoRoll.StartVideoRecording(channelGraphics, song, pianoRollZoom, thinNotes, out var noteSizeY);

            // Build the scrolling data.
            var numVisibleNotes = (int)Math.Floor(channelResY / (float)noteSizeY);
            ComputeChannelsScroll(metadata, channelMask, numVisibleNotes);

            if (song.UsesFamiTrackerTempo)
            {
                SmoothFamiTrackerTempo(metadata);
            }

            var videoImage   = new byte[videoResY * videoResX * 4];
            var channelImage = new byte[channelResY * channelResX * 4];
            var oscilloscope = new float[channelResY, 2];

#if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS
            var badAlpha = DetectBadOpenGLAlpha(dummyControl, videoGraphics, videoImage);
#endif

            // Start ffmpeg with pipe input.
            var tempFolder    = Utils.GetTemporaryDiretory();
            var tempVideoFile = Path.Combine(tempFolder, "temp.h264");
            var tempAudioFile = Path.Combine(tempFolder, "temp.wav");

            try
            {
                var process = LaunchFFmpeg(ffmpegExecutable, $"-y -f rawvideo -pix_fmt argb -s {videoResX}x{videoResY} -r {frameRate} -i - -c:v libx264 -pix_fmt yuv420p -b:v {videoBitRate}M -an \"{tempVideoFile}\"", true, false);

                // Generate each of the video frames.
                using (var stream = new BinaryWriter(process.StandardInput.BaseStream))
                {
                    for (int f = 0; f < metadata.Length; f++)
                    {
                        if (Log.ShouldAbortOperation)
                        {
                            break;
                        }

                        if ((f % 100) == 0)
                        {
                            Log.LogMessage(LogSeverity.Info, $"Rendering frame {f} / {metadata.Length}");
                        }

                        Log.ReportProgress(f / (float)(metadata.Length - 1));

                        var frame = metadata[f];

                        // Render the full screen overlay.
#if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS
                        videoGraphics.BeginDraw(dummyControl, videoResY);
#else
                        videoGraphics.BeginDraw();
#endif
                        videoGraphics.Clear(Color.FromArgb(0, 0, 0, 0));

                        foreach (var s in channelStates)
                        {
                            int channelPosX0 = (int)Math.Round((s.videoChannelIndex + 0) * channelResXFloat);
                            int channelPosX1 = (int)Math.Round((s.videoChannelIndex + 1) * channelResXFloat);

                            var channelNameSizeX = videoGraphics.MeasureString(s.channelText, ThemeBase.FontBigUnscaled);
                            var channelIconPosX  = channelPosX0 + channelResY / 2 - (channelNameSizeX + s.bmp.Size.Width + channelIconTextSpacing) / 2;

                            videoGraphics.FillRectangle(channelIconPosX, channelIconPosY, channelIconPosX + s.bmp.Size.Width, channelIconPosY + s.bmp.Size.Height, theme.LightGreyFillBrush1);
                            videoGraphics.DrawBitmap(s.bmp, channelIconPosX, channelIconPosY);
                            videoGraphics.DrawText(s.channelText, ThemeBase.FontBigUnscaled, channelIconPosX + s.bmp.Size.Width + channelIconTextSpacing, channelTextPosY, theme.LightGreyFillBrush1);

                            if (s.videoChannelIndex > 0)
                            {
                                videoGraphics.DrawLine(channelPosX0, 0, channelPosX0, videoResY, theme.BlackBrush, 5);
                            }

                            GenerateOscilloscope(s.wav, frame.wavOffset, (int)Math.Round(sampleRate * oscilloscopeWindowSize), oscLookback, oscScale, channelPosX0 + 10, 60, channelPosX1 - 10, 160, oscilloscope);

                            videoGraphics.AntiAliasing = true;
                            videoGraphics.DrawLine(oscilloscope, theme.LightGreyFillBrush1);
                            videoGraphics.AntiAliasing = false;
                        }

                        videoGraphics.DrawBitmap(bmpWatermark, videoResX - bmpWatermark.Size.Width, videoResY - bmpWatermark.Size.Height);
                        videoGraphics.EndDraw();
                        videoGraphics.GetBitmap(videoImage);

                        // Render the piano rolls for each channels.
                        foreach (var s in channelStates)
                        {
                            s.volume = frame.channelVolumes[s.songChannelIndex];
                            s.note   = frame.channelNotes[s.songChannelIndex];

                            var color = Color.Transparent;

                            if (s.note.IsMusical)
                            {
                                if (s.channel.Type == Channel.Dpcm)
                                {
                                    color = Color.FromArgb(210, ThemeBase.MediumGreyFillColor1);
                                }
                                else
                                {
                                    color = Color.FromArgb(128 + s.volume * 127 / 15, s.note.Instrument != null ? s.note.Instrument.Color : ThemeBase.DarkGreyFillColor2);
                                }
                            }

#if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS
                            channelGraphics.BeginDraw(pianoRoll, channelResY);
#else
                            channelGraphics.BeginDraw();
#endif
                            pianoRoll.RenderVideoFrame(channelGraphics, Channel.ChannelTypeToIndex(s.channel.Type), frame.playPattern, frame.playNote, frame.scroll[s.songChannelIndex], s.note.Value, color);
                            channelGraphics.EndDraw();

                            channelGraphics.GetBitmap(channelImage);

                            // Composite the channel image with the full screen video overlay on the CPU.
                            int channelPosX = (int)Math.Round(s.videoChannelIndex * channelResXFloat);
                            int channelPosY = 0;

                            for (int y = 0; y < channelResY; y++)
                            {
                                for (int x = 0; x < channelResX; x++)
                                {
                                    int videoIdx   = (channelPosY + x) * videoResX * 4 + (channelPosX + y) * 4;
                                    int channelIdx = (channelResY - y - 1) * channelResX * 4 + (channelResX - x - 1) * 4;

                                    byte videoA    = videoImage[videoIdx + 3];
                                    byte gradientA = (byte)(x < 255 ? 255 - x : 0); // Doing the gradient on CPU to look same on GL/D2D.

                                    byte channelR = channelImage[channelIdx + 0];
                                    byte channelG = channelImage[channelIdx + 1];
                                    byte channelB = channelImage[channelIdx + 2];

                                    if (videoA != 0 || gradientA != 0)
                                    {
#if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS
                                        // Fix bad sRGB alpha.
                                        if (badAlpha)
                                        {
                                            videoA = SRGBToLinear[videoA];
                                        }
#endif
                                        videoA = Math.Max(videoA, gradientA);

                                        int videoR = videoImage[videoIdx + 0];
                                        int videoG = videoImage[videoIdx + 1];
                                        int videoB = videoImage[videoIdx + 2];

                                        // Integer alpha blend.
                                        // Note that alpha is pre-multiplied, so we if we multiply again, image will look aliased.
                                        channelR = (byte)((channelR * (255 - videoA) + videoR * 255 /*videoA*/) >> 8);
                                        channelG = (byte)((channelG * (255 - videoA) + videoG * 255 /*videoA*/) >> 8);
                                        channelB = (byte)((channelB * (255 - videoA) + videoB * 255 /*videoA*/) >> 8);
                                    }

                                    // We byteswap here to match what ffmpeg expect.
                                    videoImage[videoIdx + 3] = channelR;
                                    videoImage[videoIdx + 2] = channelG;
                                    videoImage[videoIdx + 1] = channelB;
                                    videoImage[videoIdx + 0] = 255;

                                    // To export images to debug.
                                    //videoImage[videoIdx + 0] = channelR;
                                    //videoImage[videoIdx + 1] = channelG;
                                    //videoImage[videoIdx + 2] = channelB;
                                    //videoImage[videoIdx + 3] = 255;
                                }
                            }

                            var prevChannelEndPosX = (int)Math.Round((s.videoChannelIndex - 1) * channelResXFloat) + channelResY;

                            // HACK: Since we round the channels positions, we can end up with columns of pixels that arent byteswapped.
                            if (s.videoChannelIndex > 0 && channelPosX != prevChannelEndPosX)
                            {
                                for (int y = 0; y < videoResY; y++)
                                {
                                    int videoIdx = y * videoResX * 4 + (channelPosX - 1) * 4;

                                    byte videoR = videoImage[videoIdx + 0];
                                    byte videoG = videoImage[videoIdx + 1];
                                    byte videoB = videoImage[videoIdx + 2];

                                    videoImage[videoIdx + 3] = videoR;
                                    videoImage[videoIdx + 2] = videoG;
                                    videoImage[videoIdx + 1] = videoB;
                                    videoImage[videoIdx + 0] = 255;
                                }
                            }
                        }

                        stream.Write(videoImage);

                        // Dump debug images.
#if FAMISTUDIO_LINUX || FAMISTUDIO_MACOS
                        //var pb = new Gdk.Pixbuf(channelImage, true, 8, channelResX, channelResY, channelResX * 4);
                        //pb.Save($"/home/mat/Downloads/channel.png", "png");
                        //var pb = new Gdk.Pixbuf(videoImage, true, 8, videoResX, videoResY, videoResX * 4);
                        //pb.Save($"/home/mat/Downloads/frame_{f:D4}.png", "png");
#else
                        //fixed (byte* vp = &videoImage[0])
                        //{
                        //    var b = new System.Drawing.Bitmap(videoResX, videoResY, videoResX * 4, System.Drawing.Imaging.PixelFormat.Format32bppArgb, new IntPtr(vp));
                        //    b.Save($"d:\\dump\\pr\\frame_{f:D4}.png");
                        //}
#endif
                    }
                }

                process.WaitForExit();
                process.Dispose();
                process = null;

                Log.LogMessage(LogSeverity.Info, "Exporting audio...");

                // Save audio to temporary file.
                WaveFile.Save(song, tempAudioFile, sampleRate, 1, -1, channelMask);

                Log.LogMessage(LogSeverity.Info, "Mixing audio and video...");

                // Run ffmpeg again to combine audio + video.
                process = LaunchFFmpeg(ffmpegExecutable, $"-y -i \"{tempVideoFile}\" -i \"{tempAudioFile}\" -c:v copy -c:a aac -b:a {audioBitRate}k \"{filename}\"", false, false);
                process.WaitForExit();
                process.Dispose();
                process = null;

                File.Delete(tempAudioFile);
                File.Delete(tempVideoFile);
            }
            catch (Exception e)
            {
                Log.LogMessage(LogSeverity.Error, "Error exporting video.");
                Log.LogMessage(LogSeverity.Error, e.Message);
            }
            finally
            {
                pianoRoll.EndVideoRecording();
                foreach (var c in channelStates)
                {
                    c.bmp.Dispose();
                }
                theme.Terminate();
                bmpWatermark.Dispose();
                channelGraphics.Dispose();
                videoGraphics.Dispose();
            }

            return(true);
        }