/// <summary> /// Generates a visualisation of an audio stream using the 'showfreqs' filter /// </summary> /// <param name="inputPath">Path to the input file containing the audio stream to visualise</param> /// <param name="outputPath">Path to output the visualised audio stream to</param> /// <param name="size">The Size of the outputted video stream</param> /// <param name="pixelFormat">The output pixel format (default is yuv420p)</param> /// <param name="mode">The visualisation mode (default is bar)</param> /// <param name="amplitudeScale">The frequency scale (default is lin)</param> /// <param name="frequencyScale">The amplitude scale (default is log)</param> /// <returns>IConversion object</returns> public async Task <IConversion> VisualiseAudio(string inputPath, string outputPath, VideoSize size, PixelFormat pixelFormat = PixelFormat.yuv420p, VisualisationMode mode = VisualisationMode.bar, AmplitudeScale amplitudeScale = AmplitudeScale.lin, FrequencyScale frequencyScale = FrequencyScale.log) { return(await Task.FromResult(Conversion.VisualiseAudio(inputPath, outputPath, size, pixelFormat, mode, amplitudeScale, frequencyScale))); }
public static IConversion VisualiseAudio(string inputPath, string outputPath, VideoSize size, PixelFormat pixelFormat = PixelFormat.yuv420p, VisualisationMode mode = VisualisationMode.bar, AmplitudeScale amplitudeScale = AmplitudeScale.lin, FrequencyScale frequencyScale = FrequencyScale.log) { IMediaInfo inputInfo = FFmpeg.GetMediaInfo(inputPath).GetAwaiter().GetResult(); IAudioStream audioStream = inputInfo.AudioStreams.FirstOrDefault(); IVideoStream videoStream = inputInfo.VideoStreams.FirstOrDefault(); string filter = $"\"[0:a]showfreqs=mode={mode}:fscale={frequencyScale}:ascale={amplitudeScale},format={pixelFormat},scale={size.ToFFmpegFormat()} [v]\""; return(New() .AddStream(audioStream) .AddParameter($"-filter_complex {filter}") .AddParameter("-map [v]") .SetFrameRate(videoStream != null ? videoStream.Framerate : 30) // Pin framerate at the original rate or 30 fps to stop dropped or duplicated frames .SetOutput(outputPath)); }
public async Task VisualiseAudioTest(VideoSize size, PixelFormat pixelFormat, VisualisationMode mode, AmplitudeScale amplitudeScale, FrequencyScale frequencyScale) { string output = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + FileExtensions.Mp4); IMediaInfo info = await FFmpeg.GetMediaInfo(Resources.MkvWithAudio); IAudioStream audioStream = info.AudioStreams.First()?.SetCodec(AudioCodec.aac); IConversionResult conversionResult = await(await FFmpeg.Conversions.FromSnippet.VisualiseAudio(Resources.Mp4WithAudio, output, size, pixelFormat, mode, amplitudeScale, frequencyScale)) .Start(); IMediaInfo resultFile = await FFmpeg.GetMediaInfo(output); // The resulting streams are 4 seconds longer than the original Assert.Equal(resultFile.VideoStreams.First().Duration, audioStream.Duration + TimeSpan.FromSeconds(4)); Assert.Equal(resultFile.AudioStreams.First().Duration, audioStream.Duration + TimeSpan.FromSeconds(4)); Assert.Equal(1920, resultFile.VideoStreams.First().Width); Assert.Equal(1080, resultFile.VideoStreams.First().Height); }