public static void Modify( string filename, AudioUtilityInfo sourceExpected, AudioUtilityRequest request, string outputMimeType, AudioUtilityInfo outputExpected, Action <AudioUtilityInfo, AudioUtilityInfo> additionalTests = null) { var source = PathHelper.GetTestAudioFile(filename); var destExtension = MediaTypes.GetExtension(outputMimeType); var outputFilename = Path.GetFileNameWithoutExtension(filename) + "_modified." + destExtension; foreach (var util in new[] { TestHelper.GetAudioUtility() }) { var dir = PathHelper.GetTempDir(); var output = new FileInfo(Path.Combine(dir.FullName, outputFilename)); util.Modify(source, MediaTypes.GetMediaType(source.Extension), output, outputMimeType, request); var sourceInfo = util.Info(source); TestHelper.CheckAudioUtilityInfo(sourceExpected, sourceInfo); var outputInfo = util.Info(output); var outputInfoText = GetDurationInfo(outputInfo); additionalTests?.Invoke(sourceExpected, sourceInfo); PathHelper.DeleteTempDir(dir); } }
public Task <FileSegment> PrepareFile <TSource>( DirectoryInfo outputDirectory, ISegment <TSource> source, string outputMediaType, int?targetSampleRateHz, DirectoryInfo temporaryFilesDirectory, int[] channelSelection, bool?mixDownToMono) { int min = (int)source.StartOffsetSeconds.Seconds().TotalMinutes; if (typeof(TSource) != typeof(FileInfo)) { throw new NotSupportedException("Dummy Source Preparer only works with FileInfos"); } var basename = Path.GetFileNameWithoutExtension((source.Source as FileInfo).Name); return(Task.Run(() => { var path = outputDirectory.CombineFile(basename + $"_{min}min." + MediaTypes.GetExtension(outputMediaType)); using (var file = path.CreateText()) { file.WriteLine( $"{outputDirectory},{source},{outputMediaType},{source.StartOffsetSeconds},{source.EndOffsetSeconds},{targetSampleRateHz}" + $",{temporaryFilesDirectory},{channelSelection},{mixDownToMono}"); } return new FileSegment(path, targetSampleRateHz.Value, (source.EndOffsetSeconds - source.StartOffsetSeconds).Seconds()); })); }
public void AdvancedChannelSelectionFfmpegRawPcmFails(int[] channelMap) { var audioUtilityRequest = new AudioUtilityRequest { Channels = channelMap, MixDownToMono = false, TargetSampleRate = 22050, BitDepth = 16, }; var mediaType = MediaTypes.MediaTypePcmRaw; var otherMediaType = MediaTypes.MediaTypeWav1; var utility = TestHelper.GetAudioUtilityFfmpegRawPcm(); var file = FourChannelFileRaw; Assert.ThrowsException <ChannelSelectionOperationNotImplemented>( () => { utility.Modify( TestHelper.GetAudioFile(file), mediaType, TempFileHelper.NewTempFile(MediaTypes.GetExtension(otherMediaType)), otherMediaType, audioUtilityRequest); }); audioUtilityRequest.MixDownToMono = true; Assert.ThrowsException <ChannelSelectionOperationNotImplemented>( () => { utility.Modify( TestHelper.GetAudioFile(file), mediaType, TempFileHelper.NewTempFile(MediaTypes.GetExtension(otherMediaType)), otherMediaType, audioUtilityRequest); }); audioUtilityRequest.Channels = null; Assert.ThrowsException <InvalidOperationException>( () => { utility.Modify( TestHelper.GetAudioFile(file), mediaType, TempFileHelper.NewTempFile(MediaTypes.GetExtension(otherMediaType)), otherMediaType, audioUtilityRequest); }); }
private static void AssertAdvancedChannelConversionFails(string file, string mediaType, IAudioUtility utility, string otherMediaType = null, bool skipMonoCheck = false) { // array of channels of frequencies (expected in each channel) var audioUtilityRequest = new AudioUtilityRequest { Channels = new[] { 1, 2, 3, 4 }, MixDownToMono = false }; otherMediaType = otherMediaType ?? mediaType; Assert.ThrowsException <ChannelSelectionOperationNotImplemented>( () => { utility.Modify( TestHelper.GetAudioFile(file), mediaType, TempFileHelper.NewTempFile(MediaTypes.GetExtension(otherMediaType)), otherMediaType, audioUtilityRequest); }); audioUtilityRequest.MixDownToMono = true; Assert.ThrowsException <ChannelSelectionOperationNotImplemented>( () => { utility.Modify( TestHelper.GetAudioFile(file), mediaType, TempFileHelper.NewTempFile(MediaTypes.GetExtension(otherMediaType)), otherMediaType, audioUtilityRequest); }); if (skipMonoCheck) { return; } audioUtilityRequest.Channels = null; Assert.ThrowsException <ChannelSelectionOperationNotImplemented>( () => { utility.Modify( TestHelper.GetAudioFile(file), mediaType, TempFileHelper.NewTempFile(MediaTypes.GetExtension(otherMediaType)), otherMediaType, audioUtilityRequest); }); }
private static void SegmentsCorrectly( string filename, string mimetype, TimeSpan start, TimeSpan end, TimeSpan maxVariance) { foreach (var util in new[] { TestHelper.GetAudioUtility() }) { var dir = PathHelper.GetTempDir(); var destMimeType = mimetype; if (mimetype == MediaTypes.MediaTypeWavpack) { destMimeType = MediaTypes.MediaTypeWav; } var output = new FileInfo( Path.Combine( dir.FullName, Path.GetFileNameWithoutExtension(filename) + "_segmented." + MediaTypes.GetExtension(destMimeType))); var audioUtilRequest = new AudioUtilityRequest { OffsetStart = start, OffsetEnd = end }; var input = PathHelper.GetTestAudioFile(filename); util.Modify(input, mimetype, output, destMimeType, audioUtilRequest); var utilInfoInput = util.Info(input); var utilInfoOutput = util.Info(output); var infoInput = GetDurationInfo(utilInfoInput); var infoOutput = GetDurationInfo(utilInfoOutput); var compareResult = "Expected duration " + (end - start) + " actual duration " + utilInfoOutput.Duration.Value + " expected max variation " + maxVariance + " actual variation " + (end - start).Subtract(utilInfoOutput.Duration.Value).Duration(); using (var cr = new ConsoleRedirector()) { LoggedConsole.WriteLine(compareResult); } Assert.IsTrue( TestHelper.CompareTimeSpans(utilInfoOutput.Duration.Value, end - start, maxVariance), compareResult + ". Info input: " + infoInput + "." + Environment.NewLine + "Info output: " + infoOutput); PathHelper.DeleteTempDir(dir); } }
private FileInfo SegmentRawPcmToWav(FileInfo source, AudioUtilityRequest request) { // use a temp file for wvunpack. var extension = MediaTypes.GetExtension(MediaTypes.MediaTypeWav1); var rawFile = TempFileHelper.NewTempFile(this.TemporaryFilesDirectory, extension); if (this.Log.IsDebugEnabled) { this.Log.Debug("Converting/segmenting raw file " + source.FullName + " to wav " + rawFile.FullName + " using ffmpeg. Settings: " + request); } // use ffmpeg to segment and convert to wav. this.ffmpegRawPcmUtility.Modify(source, MediaTypes.MediaTypePcmRaw, rawFile, MediaTypes.MediaTypeWav, request); return(rawFile); }
/// <summary> /// Check if a file can be processed. /// </summary> /// <param name="file"> /// The file to check. /// </param> /// <param name="validMediaTypes"> /// The valid Mime Types. /// </param> /// <param name="invalidMediaTypes"> /// The invalid Mime Types. /// </param> /// <exception cref="NotSupportedException"><c>NotSupportedException</c>.</exception> /// <exception cref="FileNotFoundException"><c>FileNotFoundException</c>.</exception> protected void CanProcess(FileInfo file, IEnumerable <string> validMediaTypes, IEnumerable <string> invalidMediaTypes) { const string ErrorFormatString = "File ({0}) cannot be processed. {1}"; const string ValidFormatsAre = " Valid formats are: {0}."; const string InvalidFormatsAre = " Invalid formats are: {0}."; var sbFormats = new StringBuilder(); if (validMediaTypes != null && validMediaTypes.Any()) { var formats = string.Join( ", ", validMediaTypes.Select(m => MediaTypes.GetExtension(m) + " (" + m + ")").ToArray()); sbFormats.AppendFormat(ValidFormatsAre, formats); } if (invalidMediaTypes != null && invalidMediaTypes.Any()) { var formats = string.Join( ", ", invalidMediaTypes.Select(m => MediaTypes.GetExtension(m) + " (" + m + ")").ToArray()); sbFormats.AppendFormat(InvalidFormatsAre, formats); } string fileExtension = this.GetExtension(file); if (validMediaTypes != null) { var validExts = validMediaTypes.Select(m => MediaTypes.GetExtension(m).ToUpperInvariant()); if (!validExts.Contains(fileExtension)) { throw new NotSupportedException(string.Format(ErrorFormatString, file.Name, sbFormats)); } } if (invalidMediaTypes != null) { var invalidExts = invalidMediaTypes.Select(m => MediaTypes.GetExtension(m).ToUpperInvariant()); if (invalidExts.Contains(fileExtension)) { throw new NotSupportedException(string.Format(ErrorFormatString, file.Name, sbFormats)); } } }
public void AdvancedChannelSelectionMp3SpltFails() { // mp3 only supports two channels anwyay... // array of channels of frequencies (expected in each channel) var audioUtilityRequest = new AudioUtilityRequest { Channels = new[] { 1, 2 }, MixDownToMono = false }; Assert.ThrowsException <ChannelSelectionOperationNotImplemented>( () => { TestHelper.GetAudioUtilityMp3Splt().Modify( TestHelper.GetAudioFile(TwoChannelFileMp3), MediaTypes.MediaTypeMp3, TempFileHelper.NewTempFile(MediaTypes.GetExtension(MediaTypes.MediaTypeMp3)), MediaTypes.MediaTypeMp3, audioUtilityRequest); }); }
public static string GetFileName( string outputFileName, string outputMediaType, TimeSpan?requestOffsetStart, TimeSpan?requestOffsetEnd, bool oldFormat = false) { var start = oldFormat ? requestOffsetStart?.TotalMinutes : requestOffsetStart?.TotalSeconds; var end = oldFormat ? requestOffsetEnd?.TotalMinutes : requestOffsetEnd?.TotalSeconds; var format = oldFormat ? "0.######" : "0.###"; outputFileName = string.Format( "{0}_{1}{2}{4}.{3}", Path.GetFileNameWithoutExtension(outputFileName), (start ?? 0).ToString(format, CultureInfo.InvariantCulture), end?.ToString("\\-" + format, CultureInfo.InvariantCulture) ?? string.Empty, MediaTypes.GetExtension(outputMediaType), oldFormat ? "min" : string.Empty); return(outputFileName); }
private static void ChannelTest( string sourceFile, int[] channels, bool?mixDownToMono, int[][] expectedFrequencies, AudioUtilityRequest customRequest = null) { // adjust params for this test var sourceInfo = TestHelper.AudioDetails[sourceFile]; var expected = sourceInfo.ShallowClone(); expected.ChannelCount = expectedFrequencies.Length; var audioUtilityRequest = customRequest ?? new AudioUtilityRequest(); audioUtilityRequest.MixDownToMono = mixDownToMono; audioUtilityRequest.Channels = channels; var outputMimeType = MediaTypes.MediaTypeWav; var source = PathHelper.GetTestAudioFile(sourceFile); var destExtension = MediaTypes.GetExtension(outputMimeType); var outputFilename = Path.GetFileNameWithoutExtension(FourChannelFile) + "_modified." + destExtension; var util = TestHelper.GetAudioUtility(); var dir = PathHelper.GetTempDir(); var output = new FileInfo(Path.Combine(dir.FullName, outputFilename)); expected.SourceFile = output; util.Modify(source, MediaTypes.GetMediaType(source.Extension), output, outputMimeType, audioUtilityRequest); DoFrequencyAnalysis(expected, expectedFrequencies); PathHelper.DeleteTempDir(dir); }
/// <summary> /// Create a spectrogram from a segment of the <paramref name="source"/> audio file. /// <paramref name="output"/> image file will be created. /// </summary> /// <param name="source"> /// The source audio file. /// </param> /// <param name="sourceMimeType"> /// The source Mime Type. /// </param> /// <param name="output"> /// The output image file. Ensure the file does not exist. /// </param> /// <param name="outputMimeType"> /// The output Mime Type. /// </param> /// <param name="request"> /// The spectrogram request. /// </param> public void Create(FileInfo source, string sourceMimeType, FileInfo output, string outputMimeType, SpectrogramRequest request) { this.ValidateMimeTypeExtension(source, sourceMimeType, output, outputMimeType); var tempFile = TempFileHelper.NewTempFile(this.TemporaryFilesDirectory, MediaTypes.ExtWav); var audioUtilRequest = new AudioUtilityRequest { MixDownToMono = true, OffsetStart = request.Start, OffsetEnd = request.End, TargetSampleRate = 22050, }; this.audioUtility.Modify(source, sourceMimeType, tempFile, MediaTypes.MediaTypeWav, audioUtilRequest); Image sourceImage; if (this.Log.IsDebugEnabled) { var stopwatch = new Stopwatch(); stopwatch.Start(); sourceImage = Spectrogram(File.ReadAllBytes(tempFile.FullName)); stopwatch.Stop(); this.Log.DebugFormat( "Generated spectrogram for {0}. Took {1} ({2}ms).", source.Name, stopwatch.Elapsed.Humanise(), stopwatch.Elapsed.TotalMilliseconds); this.Log.Debug("Source " + this.BuildFileDebuggingOutput(source)); } else { sourceImage = Spectrogram(File.ReadAllBytes(tempFile.FullName)); } // modify image to match request using (sourceImage) { // remove 1px from bottom (DC value) var sourceRectangle = new Rectangle(0, 0, sourceImage.Width, sourceImage.Height - 1); using var requestedImage = new Image <Rgb24>( request.IsCalculatedWidthAvailable ? request.CalculatedWidth : sourceRectangle.Width, request.Height ?? sourceRectangle.Height); var destRectangle = new Rectangle(0, 0, requestedImage.Width, requestedImage.Height); requestedImage.DrawImage(sourceImage, destRectangle, sourceRectangle); var format = MediaTypes.GetImageFormat(MediaTypes.GetExtension(outputMimeType)); var encoder = requestedImage.GetConfiguration().ImageFormatsManager.FindEncoder(format); if (this.Log.IsDebugEnabled) { var stopwatch = new Stopwatch(); stopwatch.Start(); requestedImage.Save(output.FullName, encoder); stopwatch.Stop(); this.Log.DebugFormat( "Saved spectrogram for {0} to {1}. Took {2} ({3}ms).", source.Name, output.Name, stopwatch.Elapsed.Humanise(), stopwatch.Elapsed.TotalMilliseconds); this.Log.Debug("Output " + this.BuildFileDebuggingOutput(output)); } else { requestedImage.Save(output.FullName, encoder); } } tempFile.Delete(); }
private static void ConvertsCorrectly( string filename, string mimetype, string outputMimeType, TimeSpan expectedDuration, TimeSpan maxVariance, AudioUtilityRequest customRequest = null) { foreach (var util in new[] { TestHelper.GetAudioUtility() }) { var dir = PathHelper.GetTempDir(); var output = dir.CombineFile( Path.GetFileNameWithoutExtension(filename) + "_converted." + MediaTypes.GetExtension(outputMimeType)); var audioUtilRequest = customRequest ?? new AudioUtilityRequest { }; var input = PathHelper.GetTestAudioFile(filename); util.Modify(input, mimetype, output, outputMimeType, audioUtilRequest); var utilInfoOutput = util.Info(output); var infoOutput = GetDurationInfo(util.Info(output)); var compareResult = "Expected duration " + expectedDuration + " actual duration " + utilInfoOutput.Duration + " expected max variation " + maxVariance + " actual variation " + expectedDuration.Subtract( utilInfoOutput.Duration.HasValue ? utilInfoOutput.Duration.Value : TimeSpan.Zero) .Duration(); using (ConsoleRedirector cr = new ConsoleRedirector()) { LoggedConsole.WriteLine(compareResult); } var message = $"{compareResult}.{Environment.NewLine}Info output: {infoOutput}"; Assert.IsTrue( TestHelper.CompareTimeSpans(expectedDuration, utilInfoOutput.Duration.Value, maxVariance), message); var info = util.Info(output); PathHelper.DeleteTempDir(dir); /* * var sb = new StringBuilder(); * foreach (var item in info) * { * sb.AppendLine(item.Key + ": " + item.Value); * } */ if (info?.RawData != null && info.RawData.ContainsKey("STREAM codec_long_name")) { var codec = info.RawData["STREAM codec_long_name"]; if (outputMimeType == MediaTypes.MediaTypeWav) { Assert.IsTrue(codec == MediaTypes.CodecWavPcm16BitLe); } else if (outputMimeType == MediaTypes.MediaTypeOggAudio) { Assert.IsTrue(codec == MediaTypes.CodecVorbis); } else if (outputMimeType == MediaTypes.MediaTypeMp3) { Assert.IsTrue(codec == MediaTypes.CodecMp3); } else if (outputMimeType == MediaTypes.MediaTypeWebMAudio) { Assert.IsTrue(codec == MediaTypes.CodecVorbis); } else { Assert.IsTrue(codec == MediaTypes.ExtUnknown); } } } }
private FileInfo ConvertAndSegmentUsingSox(FileInfo source, string sourceMimeType, AudioUtilityRequest request) { // use a temp file to run sox. var soxtempfile = TempFileHelper.NewTempFile(this.TemporaryFilesDirectory, MediaTypes.GetExtension(MediaTypes.MediaTypeWav)); if (this.Log.IsDebugEnabled) { this.Log.Debug("Converting and segmenting " + sourceMimeType + " file " + source.FullName + " to wav " + soxtempfile.FullName + " using sox. Settings: " + request); } // run sox this.soxUtility.Modify(source, sourceMimeType, soxtempfile, MediaTypes.MediaTypeWav, request); return(soxtempfile); }
private FileInfo ConvertNonWavOrMp3(FileInfo source, string sourceMimeType, AudioUtilityRequest request) { // use a temp file to segment. var ffmpegTempFile = TempFileHelper.NewTempFile(this.TemporaryFilesDirectory, MediaTypes.GetExtension(MediaTypes.MediaTypeWav)); if (this.Log.IsDebugEnabled) { this.Log.Debug("Converting " + sourceMimeType + " file " + source.FullName + " to wav " + ffmpegTempFile.FullName + " using ffmpeg. Settings: " + request); } // use ffmpeg to segment. this.ffmpegUtility.Modify(source, sourceMimeType, ffmpegTempFile, MediaTypes.MediaTypeWav, request); return(ffmpegTempFile); }
private FileInfo SegmentMp3(FileInfo source, string sourceMimeType, AudioUtilityRequest request) { if (this.mp3SpltUtility == null) { throw new NotSupportedException($"MP3 conversion not supported because mp3splt utility has not been configured for this { nameof(MasterAudioUtility)}."); } // use a temp file to segment. var mp3SpltTempFile = TempFileHelper.NewTempFile(this.TemporaryFilesDirectory, MediaTypes.GetExtension(MediaTypes.MediaTypeMp3)); if (this.Log.IsDebugEnabled) { this.Log.Debug("Segmenting mp3 file " + source.FullName + " to " + mp3SpltTempFile.FullName + " using mp3splt. Settings: " + request); } // use mp3splt to segment mp3. this.mp3SpltUtility.Modify(source, sourceMimeType, mp3SpltTempFile, MediaTypes.MediaTypeMp3, request); return(mp3SpltTempFile); }
/// <summary> /// Check that mime type and extension match. /// </summary> /// <param name="file"> /// The audio file. /// </param> /// <param name="mimeType"> /// The mime Type. /// </param> /// <exception cref="ArgumentException"> /// <c>ArgumentException</c>. /// </exception> /// <returns> /// True if mime type and extension match, otherwise false. /// </returns> protected bool CheckMimeTypeExtension(FileInfo file, string mimeType) { string fileExtension = this.GetExtension(file); return(MediaTypes.GetExtension(mimeType).ToUpperInvariant() == fileExtension); }
private FileInfo SegmentWavpackToWav(FileInfo source, AudioUtilityRequest request) { // use a temp file for wvunpack. var wavunpackTempFile = TempFileHelper.NewTempFile(this.TemporaryFilesDirectory, MediaTypes.GetExtension(MediaTypes.MediaTypeWav)); if (this.Log.IsDebugEnabled) { this.Log.Debug("Segmenting wavpack file " + source.FullName + " to wav " + wavunpackTempFile.FullName + " using wvunpack. Settings: " + request); } // use wvunpack to segment and convert to wav. this.wvunpackUtility.Modify(source, MediaTypes.MediaTypeWavpack, wavunpackTempFile, MediaTypes.MediaTypeWav, request); return(wavunpackTempFile); }
/// <summary> /// Create a spectrogram from a segment of the <paramref name="source"/> audio file. /// <paramref name="output"/> image file will be created. /// </summary> /// <param name="source"> /// The source audio file. /// </param> /// <param name="sourceMimeType"> /// The source Mime Type. /// </param> /// <param name="output"> /// The output image file. Ensure the file does not exist. /// </param> /// <param name="outputMimeType"> /// The output Mime Type. /// </param> /// <param name="request"> /// The spectrogram request. /// </param> public void Create(FileInfo source, string sourceMimeType, FileInfo output, string outputMimeType, SpectrogramRequest request) { this.ValidateMimeTypeExtension(source, sourceMimeType, output, outputMimeType); this.CanProcess(output, new[] { MediaTypes.MediaTypePng, MediaTypes.MediaTypeJpeg }, null); // to get a proper image from sox, need to remove DC value, plus 1px from top and left. var wavFile = TempFileHelper.NewTempFile(this.TemporaryFilesDirectory, MediaTypes.ExtWav); var originalSoxFile = TempFileHelper.NewTempFile(this.TemporaryFilesDirectory, MediaTypes.ExtPng); var audioUtilRequest = new AudioUtilityRequest { OffsetStart = request.Start, OffsetEnd = request.End, MixDownToMono = true, TargetSampleRate = 22050 }; this.audioUtility.Modify(source, sourceMimeType, wavFile, MediaTypes.MediaTypeWav, audioUtilRequest); // generate spectrogram using sox. if (this.Log.IsDebugEnabled) { var stopwatch = new Stopwatch(); stopwatch.Start(); this.Spectrogram(wavFile, originalSoxFile); stopwatch.Stop(); this.Log.DebugFormat( "Generated and saved spectrogram for {0}. Took {1} ({2}ms).", source.Name, stopwatch.Elapsed.Humanise(), stopwatch.Elapsed.TotalMilliseconds); this.Log.Debug("Source " + this.BuildFileDebuggingOutput(source)); this.Log.Debug("Output " + this.BuildFileDebuggingOutput(output)); } else { this.Spectrogram(wavFile, originalSoxFile); } wavFile.Delete(); // modify the original image to match the request using (var sourceImage = Image.Load(originalSoxFile.FullName)) { // remove 1px from top, bottom (DC value) and left var sourceRectangle = new Rectangle(1, 1, sourceImage.Width - 1, sourceImage.Height - 2); var width = request.IsCalculatedWidthAvailable ? request.CalculatedWidth : sourceRectangle.Width; using var requestedImage = new Image <Rgb24>( width, request.Height ?? sourceRectangle.Height); var destRectangle = new Rectangle(0, 0, requestedImage.Width, requestedImage.Height); requestedImage.DrawImage(sourceImage, destRectangle, sourceRectangle); var format = MediaTypes.GetImageFormat(MediaTypes.GetExtension(outputMimeType)); var encoder = requestedImage.GetConfiguration().ImageFormatsManager.FindEncoder(format); if (this.Log.IsDebugEnabled) { var stopwatch = new Stopwatch(); stopwatch.Start(); requestedImage.Save(output.FullName, encoder); stopwatch.Stop(); this.Log.DebugFormat( "Saved spectrogram for {0} to {1}. Took {2} ({3}ms).", source.Name, output.Name, stopwatch.Elapsed.Humanise(), stopwatch.Elapsed.TotalMilliseconds); this.Log.Debug("Output " + this.BuildFileDebuggingOutput(output)); } else { requestedImage.Save(output.FullName, encoder); } } originalSoxFile.Delete(); }