private WaveformRenderer CreateWaveformRenderer() { // This can be on a worker thread so we need to lock... lock (_settings) { var renderer = new WaveformRenderer { BackgroundColor = _settings.BackgroundColor, BackgroundImage = File.Exists(_settings.BackgroundImageFilename) ? Image.FromFile(_settings.BackgroundImageFilename) : null, Width = _settings.Width, Height = _settings.Height, Columns = _settings.Columns, FramesPerSecond = _settings.FrameRate, RenderingBounds = _settings.GetBounds() }; if (_settings.Channels.Count > 0) { // We don't support multiple sampling rates, but this lets us ignore "empty" tracks. renderer.SamplingRate = _settings.Channels.Max(c => c.SampleRate); } foreach (var channel in _settings.Channels) { renderer.AddChannel(channel); } return(renderer); } }
private static void Render(Settings settings, AudioLoader loader) { if (settings.OutputFile != null) { // Emit normalized data to a WAV file for later mixing if (settings.MasterAudioFile == null && !settings.NoMasterMix) { settings.MasterAudioFile = settings.OutputFile + ".wav"; loader.MixToFile(settings.MasterAudioFile, !settings.NoMasterMixReplayGain); } } Console.WriteLine("Generating background image..."); var backgroundImage = new BackgroundRenderer(settings.Width, settings.Height, ParseColor(settings.BackgroundColor)); if (settings.BackgroundImageFile != null) { using (var bm = Image.FromFile(settings.BackgroundImageFile)) { backgroundImage.Add(new ImageInfo(bm, ContentAlignment.MiddleCenter, true, DockStyle.None, 0.5f)); } } if (settings.LogoImageFile != null) { using (var bm = Image.FromFile(settings.LogoImageFile)) { backgroundImage.Add(new ImageInfo(bm, ContentAlignment.BottomRight, false, DockStyle.None, 1)); } } if (settings.VgmFile != null) { var gd3 = Gd3Tag.LoadFromVgm(settings.VgmFile); var gd3Text = gd3.ToString(); if (gd3Text.Length > 0) { backgroundImage.Add(new TextInfo(gd3Text, settings.Gd3Font, settings.Gd3FontSize, ContentAlignment.BottomLeft, FontStyle.Regular, DockStyle.Bottom, ParseColor(settings.Gd3FontColor))); } } var renderer = new WaveformRenderer { BackgroundImage = backgroundImage.Image, Columns = settings.Columns, FramesPerSecond = settings.FramesPerSecond, Width = settings.Width, Height = settings.Height, SamplingRate = loader.SampleRate, RenderedLineWidthInSamples = settings.ViewWidthMs * loader.SampleRate / 1000, RenderingBounds = backgroundImage.WaveArea }; if (settings.GridLineWidth > 0) { renderer.Grid = new WaveformRenderer.GridConfig { Color = ParseColor(settings.GridColor), Width = settings.GridLineWidth, DrawBorder = settings.GridBorder }; } if (settings.ZeroLineWidth > 0) { renderer.ZeroLine = new WaveformRenderer.ZeroLineConfig { Color = ParseColor(settings.ZeroLineColor), Width = settings.ZeroLineWidth }; } // Add the data to the renderer foreach (var channel in loader.Data) { renderer.AddChannel(new Channel( channel.Samples, ParseColor(settings.LineColor), settings.LineWidth, GuessChannelName(channel.Filename), CreateTriggerAlgorithm(settings.TriggerAlgorithm), settings.TriggerLookahead)); } if (settings.ChannelLabelsFont != null) { renderer.ChannelLabels = new WaveformRenderer.LabelConfig { Color = ParseColor(settings.ChannelLabelsColor), FontName = settings.ChannelLabelsFont, Size = settings.ChannelLabelsSize }; } var outputs = new List <IGraphicsOutput>(); if (settings.FfMpegPath != null) { Console.WriteLine("Adding FFMPEG renderer..."); outputs.Add(new FfmpegOutput(settings.FfMpegPath, settings.OutputFile, settings.Width, settings.Height, settings.FramesPerSecond, settings.FfMpegExtraOptions, settings.MasterAudioFile)); } if (settings.PreviewFrameskip > 0) { Console.WriteLine("Adding preview renderer..."); outputs.Add(new PreviewOutput(settings.PreviewFrameskip)); } try { Console.WriteLine("Rendering..."); var sw = Stopwatch.StartNew(); renderer.Render(outputs); sw.Stop(); int numFrames = (int)(loader.Length.TotalSeconds * settings.FramesPerSecond); Console.WriteLine($"Rendering complete in {sw.Elapsed:g}, average {numFrames / sw.Elapsed.TotalSeconds:N} fps"); } catch (Exception ex) { // Should mean it was cancelled Console.WriteLine($"Rendering cancelled: {ex.Message}"); } finally { foreach (var graphicsOutput in outputs) { graphicsOutput.Dispose(); } } }
private static void Render(Settings settings, IReadOnlyCollection <Channel> channels) { Console.WriteLine("Generating background image..."); var backgroundImage = new BackgroundRenderer(settings.Width, settings.Height, ParseColor(settings.BackgroundColor)); if (settings.BackgroundImageFile != null) { using (var bm = Image.FromFile(settings.BackgroundImageFile)) { backgroundImage.Add(new ImageInfo(bm, ContentAlignment.MiddleCenter, true, DockStyle.None, 0.5f)); } } if (!string.IsNullOrEmpty(settings.LogoImageFile)) { using (var bm = Image.FromFile(settings.LogoImageFile)) { backgroundImage.Add(new ImageInfo(bm, ContentAlignment.BottomRight, false, DockStyle.None, 1)); } } if (settings.VgmFile != null) { var gd3 = Gd3Tag.LoadFromVgm(settings.VgmFile); var gd3Text = gd3.ToString(); if (gd3Text.Length > 0) { backgroundImage.Add(new TextInfo(gd3Text, settings.Gd3Font, settings.Gd3FontSize, ContentAlignment.BottomLeft, FontStyle.Regular, DockStyle.Bottom, ParseColor(settings.Gd3FontColor))); } } if (settings.MaximumAspectRatio > 0.0) { Console.WriteLine($"Determining column count for maximum aspect ratio {settings.MaximumAspectRatio}:"); for (var columns = 1; columns < 100; ++columns) { var width = backgroundImage.WaveArea.Width / columns; var rows = channels.Count / columns + (channels.Count % columns == 0 ? 0 : 1); var height = backgroundImage.WaveArea.Height / rows; var ratio = (double)width / height; Console.WriteLine($"- {columns} columns => {width} x {height} pixels => ratio {ratio}"); if (ratio < settings.MaximumAspectRatio) { settings.Columns = columns; break; } } } var renderer = new WaveformRenderer { BackgroundImage = backgroundImage.Image, Columns = settings.Columns, FramesPerSecond = settings.FramesPerSecond, Width = settings.Width, Height = settings.Height, SamplingRate = channels.First().SampleRate, RenderingBounds = backgroundImage.WaveArea }; if (settings.GridLineWidth > 0) { foreach (var channel in channels) { channel.BorderColor = ParseColor(settings.GridColor); channel.BorderWidth = settings.GridLineWidth; channel.BorderEdges = settings.GridBorder; } } // Add the data to the renderer foreach (var channel in channels) { renderer.AddChannel(channel); } var outputs = new List <IGraphicsOutput>(); if (settings.FfMpegPath != null) { Console.WriteLine("Adding FFMPEG renderer..."); outputs.Add(new FfmpegOutput(settings.FfMpegPath, settings.OutputFile, settings.Width, settings.Height, settings.FramesPerSecond, settings.FfMpegExtraOptions, settings.MasterAudioFile)); } if (settings.PreviewFrameskip > 0) { Console.WriteLine("Adding preview renderer..."); outputs.Add(new PreviewOutput(settings.PreviewFrameskip, true)); } try { Console.WriteLine("Rendering..."); var sw = Stopwatch.StartNew(); renderer.Render(outputs); sw.Stop(); int numFrames = (int)(channels.Max(x => x.Length).TotalSeconds *settings.FramesPerSecond); Console.WriteLine($"Rendering complete in {sw.Elapsed:g}, average {numFrames / sw.Elapsed.TotalSeconds:N} fps"); } catch (Exception ex) { // Should mean it was cancelled Console.WriteLine($"Rendering cancelled: {ex.Message}"); } finally { foreach (var graphicsOutput in outputs) { graphicsOutput.Dispose(); } } }
private static void Go(string filename, ICollection <string> filenames, int width, int height, int fps, string background, string logo, string vgmFile, int previewFrameskip, float highPassFrequency, float scale, Type triggerAlgorithm, int viewSamples, int numColumns, string ffMpegPath, string ffMpegExtraArgs, string masterAudioFilename, float autoScale, Color gridColor, float gridWidth, bool gridOuter, Color zeroLineColor, float zeroLineWidth, float lineWidth) { filename = Path.GetFullPath(filename); var waitForm = new WaitForm(); waitForm.Show(); int sampleRate; using (var reader = new WaveFileReader(filenames.First())) { sampleRate = reader.WaveFormat.SampleRate; } // ReSharper disable once CompareOfFloatsByEqualityOperator int stepsPerFile = 1 + (highPassFrequency > 0 ? 1 : 0) + 2; int totalProgress = filenames.Count * stepsPerFile; int progress = 0; var loadTask = Task.Run(() => { // Do a parallel read of all files var channels = filenames.AsParallel().Select((wavFilename, channelIndex) => { var reader = new WaveFileReader(wavFilename); var buffer = new float[reader.SampleCount]; // We read the file and convert to mono reader.ToSampleProvider().ToMono().Read(buffer, 0, (int)reader.SampleCount); Interlocked.Increment(ref progress); // We don't care about ones where the samples are all equal // ReSharper disable once CompareOfFloatsByEqualityOperator if (buffer.Length == 0 || buffer.All(s => s == buffer[0])) { // So we skip steps here reader.Dispose(); Interlocked.Add(ref progress, stepsPerFile - 1); return(null); } if (highPassFrequency > 0) { // Apply the high pass filter var filter = BiQuadFilter.HighPassFilter(reader.WaveFormat.SampleRate, highPassFrequency, 1); for (int i = 0; i < buffer.Length; ++i) { buffer[i] = filter.Transform(buffer[i]); } Interlocked.Increment(ref progress); } float max = float.MinValue; foreach (var sample in buffer) { max = Math.Max(max, Math.Abs(sample)); } return(new { Data = buffer, WavReader = reader, Max = max }); }).Where(ch => ch != null).ToList(); if (autoScale > 0 || scale > 1) { // Calculate the multiplier float multiplier = 1.0f; if (autoScale > 0) { multiplier = autoScale / channels.Max(channel => channel.Max); } if (scale > 1) { multiplier *= scale; } // ...and we apply it channels.AsParallel().Select(channel => channel.Data).ForAll(samples => { for (int i = 0; i < samples.Length; ++i) { samples[i] *= multiplier; } Interlocked.Increment(ref progress); }); } return(channels.ToList()); }); while (!loadTask.IsCompleted) { Application.DoEvents(); Thread.Sleep(1); waitForm.Progress("Reading data...", (double)progress / totalProgress); } var voiceData = loadTask.Result.Select(channel => channel.Data).ToList(); waitForm.Close(); // Emit normalised data to a WAV file for later mixing if (masterAudioFilename == null) { // Generate a temp filename masterAudioFilename = filename + ".wav"; // Mix the audio. We should probably not be re-reading it here... should do this in one pass. foreach (var reader in loadTask.Result.Select(channel => channel.WavReader)) { reader.Position = 0; } var mixer = new MixingSampleProvider(loadTask.Result.Select(channel => channel.WavReader.ToSampleProvider())); var length = (int)loadTask.Result.Max(channel => channel.WavReader.SampleCount); var mixedData = new float[length * mixer.WaveFormat.Channels]; mixer.Read(mixedData, 0, mixedData.Length); // Then we want to deinterleave it var leftChannel = new float[length]; var rightChannel = new float[length]; for (int i = 0; i < length; ++i) { leftChannel[i] = mixedData[i * 2]; rightChannel[i] = mixedData[i * 2 + 1]; } // Then Replay Gain it // The +3 is to make it at "YouTube loudness", which is a lot louder than ReplayGain defaults to. var replayGain = new TrackGain(sampleRate); replayGain.AnalyzeSamples(leftChannel, rightChannel); float multiplier = (float)Math.Pow(10, (replayGain.GetGain() + 3) / 20); Debug.WriteLine($"ReplayGain multiplier is {multiplier}"); // And apply it for (int i = 0; i < mixedData.Length; ++i) { mixedData[i] *= multiplier; } WaveFileWriter.CreateWaveFile( masterAudioFilename, new FloatArraySampleProvider(mixedData, sampleRate).ToWaveProvider()); } var backgroundImage = new BackgroundRenderer(width, height, Color.Black); if (background != null) { using (var bm = Image.FromFile(background)) { backgroundImage.Add(new ImageInfo(bm, ContentAlignment.MiddleCenter, true, DockStyle.None, 0.5f)); } } if (logo != null) { using (var bm = Image.FromFile(logo)) { backgroundImage.Add(new ImageInfo(bm, ContentAlignment.BottomRight, false, DockStyle.None, 1)); } } if (vgmFile != null) { var gd3 = Gd3Tag.LoadFromVgm(vgmFile); var gd3Text = gd3.ToString(); if (gd3Text.Length > 0) { backgroundImage.Add(new TextInfo(gd3Text, "Tahoma", 16, ContentAlignment.BottomLeft, FontStyle.Regular, DockStyle.Bottom, Color.White)); } } var renderer = new WaveformRenderer { BackgroundImage = backgroundImage.Image, Columns = numColumns, FramesPerSecond = fps, Width = width, Height = height, SamplingRate = sampleRate, RenderedLineWidthInSamples = viewSamples, RenderingBounds = backgroundImage.WaveArea }; if (gridColor != Color.Empty && gridWidth > 0) { renderer.Grid = new WaveformRenderer.GridConfig { Color = gridColor, Width = gridWidth, DrawBorder = gridOuter }; } if (zeroLineColor != Color.Empty && zeroLineWidth > 0) { renderer.ZeroLine = new WaveformRenderer.ZeroLineConfig { Color = zeroLineColor, Width = zeroLineWidth }; } foreach (var channel in voiceData) { renderer.AddChannel(new Channel(channel, Color.White, lineWidth, "Hello world", Activator.CreateInstance(triggerAlgorithm) as ITriggerAlgorithm, 0)); } var outputs = new List <IGraphicsOutput>(); if (ffMpegPath != null) { outputs.Add(new FfmpegOutput(ffMpegPath, filename, width, height, fps, ffMpegExtraArgs, masterAudioFilename)); } if (previewFrameskip > 0) { outputs.Add(new PreviewOutput(previewFrameskip)); } try { renderer.Render(outputs); } catch (Exception) { // Should mean it was cancelled } finally { foreach (var graphicsOutput in outputs) { graphicsOutput.Dispose(); } } }