public void SegmentsWavCorrectly5Master()
        {
            var expected = new AudioUtilityInfo
            {
                Duration      = TimeSpan.FromSeconds(90),
                SampleRate    = 17460,
                ChannelCount  = 1,
                MediaType     = MediaTypes.MediaTypeWav,
                BitsPerSecond = 279000,
            };

            var request = new AudioUtilityRequest
            {
                MixDownToMono    = true,
                OffsetStart      = TimeSpan.FromSeconds(27),
                OffsetEnd        = TimeSpan.FromSeconds(117),
                TargetSampleRate = 17460,
            };

            var util = TestHelper.GetAudioUtility();

            var source = TestHelper.GetAudioFile("FemaleKoala MaleKoala.wav");
            var output = PathHelper.GetTempFile(MediaTypes.ExtWav);

            util.Modify(source, MediaTypes.GetMediaType(source.Extension), output, MediaTypes.GetMediaType(output.Extension), request);

            var actual = util.Info(output);

            File.Delete(output.FullName);

            TestHelper.CheckAudioUtilityInfo(expected, actual);
        }
Ejemplo n.º 2
0
        public void SegmentsWavCorrectly6Shntool()
        {
            var expected = new AudioUtilityInfo
            {
                Duration      = TimeSpan.FromSeconds(93),
                SampleRate    = 17460,
                ChannelCount  = 1,
                MediaType     = MediaTypes.MediaTypeWav,
                BitsPerSecond = 279000,
            };

            var request = new AudioUtilityRequest
            {
                //MixDownToMono = false,
                //Channel = 2,

                OffsetStart = TimeSpan.FromSeconds(27),

                //TargetSampleRate = 17460,
            };

            var util = TestHelper.GetAudioUtilityShntool();

            var source = TestHelper.GetAudioFile("geckos.wav");
            var output = PathHelper.GetTempFile(MediaTypes.ExtWav);

            util.Modify(source, MediaTypes.GetMediaType(source.Extension), output, MediaTypes.GetMediaType(output.Extension), request);

            var actual = util.Info(output);

            File.Delete(output.FullName);

            TestHelper.CheckAudioUtilityInfo(expected, actual);
        }
Ejemplo n.º 3
0
        public void SegmentsWavpackCorrectly7Master()
        {
            var expected = new AudioUtilityInfo
            {
                Duration      = TimeSpan.FromMinutes(10) + TimeSpan.FromSeconds(0),
                SampleRate    = 17460,
                ChannelCount  = 1,
                MediaType     = MediaTypes.MediaTypeWav,
                BitsPerSecond = 279000,
            };

            var request = new AudioUtilityRequest
            {
                MixDownToMono    = true,
                TargetSampleRate = 17460,
            };

            var util = TestHelper.GetAudioUtility();

            var source = TestHelper.GetAudioFile("f969b39d-2705-42fc-992c-252a776f1af3_090705-0600.wv");
            var output = PathHelper.GetTempFile(MediaTypes.ExtWav);

            util.Modify(source, MediaTypes.GetMediaType(source.Extension), output, MediaTypes.GetMediaType(output.Extension), request);

            var actual = util.Info(output);

            File.Delete(output.FullName);

            TestHelper.CheckAudioUtilityInfo(expected, actual);
        }
Ejemplo n.º 4
0
        public void SegmentsWavpackCorrectly3Wavunpack()
        {
            var expected = new AudioUtilityInfo
            {
                Duration      = TimeSpan.FromSeconds(55),
                SampleRate    = 22050,
                ChannelCount  = 1,
                MediaType     = MediaTypes.MediaTypeWav,
                BitsPerSecond = 353000,
            };

            var request = new AudioUtilityRequest
            {
                MixDownToMono = false,
                OffsetStart   = TimeSpan.FromSeconds(0),
                OffsetEnd     = TimeSpan.FromSeconds(55),

                //SampleRate = 11025
            };

            var source = TestHelper.GetAudioFile("f969b39d-2705-42fc-992c-252a776f1af3_090705-0600.wv");
            var output = PathHelper.GetTempFile(MediaTypes.ExtWav);

            TestHelper.GetAudioUtilityWavunpack().Modify(source, MediaTypes.GetMediaType(source.Extension), output, MediaTypes.GetMediaType(output.Extension), request);

            var actual = TestHelper.GetAudioUtility().Info(output);

            File.Delete(output.FullName);

            TestHelper.CheckAudioUtilityInfo(expected, actual);
        }
        public void FailsWithInvalidBitDepth()
        {
            var request = new AudioUtilityRequest
            {
                BitDepth         = 64,
                TargetSampleRate = 22050,
                Channels         = new[] { 1, 2, 3, 4 },

                //BandPass
            };

            var tester = new FfmpegRawPcmAudioUtilityTester();

            TestHelper.ExceptionMatches <BitDepthOperationNotImplemented>(
                () =>
            {
                tester.InvokeCheckRequestValid(
                    this.source,
                    MediaTypes.GetMediaType(this.source.Extension),
                    this.output,
                    MediaTypes.GetMediaType(this.output.Extension),
                    request);
            },
                "Supplied bit depth of 64");
        }
        /// <summary>
        /// The prepare file.
        /// </summary>
        /// <param name="request">
        ///   The request.
        /// </param>
        public static AudioUtilityModifiedInfo PrepareFile(FileInfo sourceFile, FileInfo outputFile, AudioUtilityRequest request, DirectoryInfo temporaryFilesDirectory)
        {
            var    audioUtility    = new MasterAudioUtility(temporaryFilesDirectory);
            var    sourceMimeType  = MediaTypes.GetMediaType(sourceFile.Extension);
            var    outputMimeType  = MediaTypes.GetMediaType(outputFile.Extension);
            string outputDirectory = Path.GetDirectoryName(outputFile.FullName);

            if (!Directory.Exists(outputDirectory))
            {
                Directory.CreateDirectory(outputDirectory);
            }

            audioUtility.Modify(
                sourceFile,
                sourceMimeType,
                outputFile,
                outputMimeType,
                request);

            var result = new AudioUtilityModifiedInfo
            {
                TargetInfo = audioUtility.Info(outputFile),
                SourceInfo = audioUtility.Info(sourceFile),
            };

            return(result);
        }
Ejemplo n.º 7
0
        private static string GetNewName(FileInfo file, TimeSpan timezone)
        {
            var fileName     = file.Name;
            var fileLength   = file.Length;
            var lastModified = file.LastWriteTime;
            var mediaType    = MediaTypes.GetMediaType(file.Extension);

            var audioUtility = new MasterAudioUtility();
            var info         = audioUtility.Info(file);
            var duration     = info.Duration.HasValue ? info.Duration.Value : TimeSpan.Zero;

            var recordingStart = lastModified - duration;

            // some tweaking to get nice file names - round the minutes of last mod and duration
            // ticks are in 100-nanosecond intervals

            //var modifiedRecordingStart = lastModified.Round(TimeSpan.FromSeconds(15))
            //                             - duration.Round(TimeSpan.FromSeconds(15));

            //// DateTime rounded = new DateTime(((now.Ticks + 25000000) / 50000000) * 50000000);

            ////var roundedTotalSeconds = Math.Round(mediaFile.RecordingStart.TimeOfDay.TotalSeconds);
            ////var modifiedRecordingStart = mediaFile.RecordingStart.Date.AddSeconds(roundedTotalSeconds);

            var dateWithOffset = new DateTimeOffset(recordingStart, timezone);
            var dateTime       = dateWithOffset.ToUniversalTime().ToString(AppConfigHelper.StandardDateFormatUtc);
            var ext            = fileName.Substring(fileName.LastIndexOf('.') + 1).ToLowerInvariant();

            var prefix = fileName.Substring(0, fileName.LastIndexOf('.'));
            var result = string.Format("{0}_{1}.{2}", prefix, dateTime, ext);

            return(result);
        }
        public void FailsWithMissingBitDepth()
        {
            var request = new AudioUtilityRequest
            {
                //BitDepth = 16,
                TargetSampleRate = 22050,
                Channels         = new[] { 1, 2, 3, 4 },

                //BandPass
            };

            var tester = new FfmpegRawPcmAudioUtilityTester();

            TestHelper.ExceptionMatches <InvalidOperationException>(
                () =>
            {
                tester.InvokeCheckRequestValid(
                    this.source,
                    MediaTypes.GetMediaType(this.source.Extension),
                    this.output,
                    MediaTypes.GetMediaType(this.output.Extension),
                    request);
            },
                "A BitDepth must be supplied");
        }
        /// <summary>
        /// Get a segment from an mp3 file.
        /// </summary>
        /// <param name="audioFile">
        /// The audio file.
        /// </param>
        /// <param name="start">
        /// The start.
        /// </param>
        /// <param name="end">
        /// The end.
        /// </param>
        /// <param name="requestMimeType">
        /// The request Mime Type.
        /// </param>
        /// <returns>
        /// Byte array of audio segment. Byte array will be null or 0 length if segmentation failed.
        /// </returns>
        public byte[] SegmentMp3(string audioFile, long?start, long?end, string requestMimeType)
        {
            try
            {
                var pathToMp3Split = AppConfigHelper.Mp3SpltExe;



                var mimeType = MediaTypes.GetMediaType(Path.GetExtension(audioFile));

                if (mimeType == MediaTypes.MediaTypeMp3 && requestMimeType == MediaTypes.MediaTypeMp3 &&
                    !string.IsNullOrEmpty(pathToMp3Split) && File.Exists(pathToMp3Split))
                {
                    var tempFile = TempFileHelper.NewTempFile(this.TemporaryFilesDirectory, MediaTypes.ExtMp3);

                    var segmentedFile = this.SingleSegment(
                        tempFile.FullName, start ?? 0, end ?? long.MaxValue);

                    byte[] bytes = File.ReadAllBytes(segmentedFile);

                    tempFile.Delete();

                    return(bytes);
                }
            }
            catch
            {
                return(new byte[0]);
            }

            return(new byte[0]);
        }
Ejemplo n.º 10
0
        public void SegmentsMp3Correctly2Sox()
        {
            var expected = new AudioUtilityInfo
            {
                Duration      = TimeSpan.FromMinutes(3) + TimeSpan.FromSeconds(20),
                SampleRate    = 22050,
                ChannelCount  = 1,
                MediaType     = MediaTypes.MediaTypeMp3,
                BitsPerSecond = 32000,
            };

            var request = new AudioUtilityRequest
            {
                MixDownToMono = true,
                OffsetStart   = TimeSpan.FromSeconds(15),
                OffsetEnd     = TimeSpan.FromMinutes(3) + TimeSpan.FromSeconds(35),
            };

            var util = TestHelper.GetAudioUtilitySox();

            var source = TestHelper.GetAudioFile("Currawongs_curlew_West_Knoll_Bees_20091102-183000.mp3");
            var output = PathHelper.GetTempFile(MediaTypes.ExtMp3);

            util.Modify(source, MediaTypes.GetMediaType(source.Extension), output, MediaTypes.GetMediaType(output.Extension), request);

            var actual = util.Info(output);

            File.Delete(output.FullName);

            TestHelper.CheckAudioUtilityInfo(expected, actual);
        }
Ejemplo n.º 11
0
        public void SegmentsMp3Correctly6Master()
        {
            var expected = new AudioUtilityInfo
            {
                Duration      = TimeSpan.FromSeconds(134.6),
                SampleRate    = 44100,
                ChannelCount  = 1,
                MediaType     = MediaTypes.MediaTypeMp3,
                BitsPerSecond = 64000,
            };

            var request = new AudioUtilityRequest
            {
                MixDownToMono    = false,
                Channels         = 2.AsArray(),
                OffsetStart      = TimeSpan.FromSeconds(27),
                TargetSampleRate = 44100,
            };

            var util = TestHelper.GetAudioUtility();

            var source = TestHelper.GetAudioFile("A French Fiddle Speaks.mp3");
            var output = PathHelper.GetTempFile(MediaTypes.ExtMp3);

            util.Modify(source, MediaTypes.GetMediaType(source.Extension), output, MediaTypes.GetMediaType(output.Extension), request);

            var actual = util.Info(output);

            File.Delete(output.FullName);

            TestHelper.CheckAudioUtilityInfo(expected, actual);
        }
        /// <summary>
        /// Get metadata for the given file.
        /// </summary>
        /// <param name="source">File to get metadata from. This should be an audio file.</param>
        /// <returns>A dictionary containing metadata for the given file.</returns>
        public override AudioUtilityInfo Info(FileInfo source)
        {
            var mediaType = MediaTypes.GetMediaType(source.Extension);
            AudioUtilityInfo info;

            if (mediaType == MediaTypes.MediaTypeWavpack)
            {
                if (this.wvunpackUtility == null)
                {
                    throw new AudioFormatNotSupportedException(WavPackAudioUtility.MissingBinary);
                }

                info = this.Combine(this.wvunpackUtility.Info(source), this.ffmpegUtility.Info(source));
            }
            else if (mediaType == MediaTypes.MediaTypeMp3 && this.soxUtility.SupportsMp3)
            {
                info = this.Combine(this.soxUtility.Info(source), this.ffmpegUtility.Info(source));
            }
            else if (mediaType == MediaTypes.MediaTypeWav)
            {
                info = this.Combine(this.soxUtility.Info(source), this.ffmpegUtility.Info(source));
            }
            else if (mediaType == MediaTypes.MediaTypePcmRaw)
            {
                info = this.ffmpegRawPcmUtility.Info(source);
            }
            else
            {
                info = this.ffmpegUtility.Info(source);
            }

            return(info);
        }
Ejemplo n.º 13
0
        public static void Modify(
            string filename,
            AudioUtilityInfo sourceExpected,
            AudioUtilityRequest request,
            string outputMimeType,
            AudioUtilityInfo outputExpected,
            Action <AudioUtilityInfo, AudioUtilityInfo> additionalTests = null)
        {
            var source = PathHelper.GetTestAudioFile(filename);

            var destExtension  = MediaTypes.GetExtension(outputMimeType);
            var outputFilename = Path.GetFileNameWithoutExtension(filename) + "_modified." + destExtension;

            foreach (var util in new[] { TestHelper.GetAudioUtility() })
            {
                var dir    = PathHelper.GetTempDir();
                var output = new FileInfo(Path.Combine(dir.FullName, outputFilename));

                util.Modify(source, MediaTypes.GetMediaType(source.Extension), output, outputMimeType, request);

                var sourceInfo = util.Info(source);

                TestHelper.CheckAudioUtilityInfo(sourceExpected, sourceInfo);

                var outputInfo     = util.Info(output);
                var outputInfoText = GetDurationInfo(outputInfo);

                additionalTests?.Invoke(sourceExpected, sourceInfo);

                PathHelper.DeleteTempDir(dir);
            }
        }
Ejemplo n.º 14
0
        public void SoxResamplingShouldBeDeterministic()
        {
            var expected = new AudioUtilityInfo
            {
                Duration      = TimeSpan.FromSeconds(60),
                SampleRate    = 22050,
                ChannelCount  = 1,
                MediaType     = MediaTypes.MediaTypeWav,
                BitsPerSecond = 352800,
            };

            var request = new AudioUtilityRequest
            {
                MixDownToMono    = true,
                TargetSampleRate = 22050,
            };

            var util = TestHelper.GetAudioUtility();

            var source = TestHelper.GetAudioFile("CaneToad_Gympie_44100.wav");

            var repeats = new double[5][];

            for (int r = 0; r < repeats.Length; r++)
            {
                var output = PathHelper.GetTempFile(MediaTypes.ExtWav);

                util.Modify(source, MediaTypes.GetMediaType(source.Extension), output,
                            MediaTypes.GetMediaType(output.Extension), request);

                var actual = util.Info(output);

                TestHelper.CheckAudioUtilityInfo(expected, actual);

                var reader = new WavReader(output);

                TestHelper.WavReaderAssertions(reader, actual);

                repeats[r] = reader.Samples;

                File.Delete(output.FullName);
            }

            for (int i = 1; i < repeats.Length; i++)
            {
                Assert.AreEqual(repeats[0].Length, repeats[1].Length);

                var totalDifference = 0.0;
                for (int j = 0; j < repeats[0].Length; j++)
                {
                    var delta = Math.Abs(repeats[i][j] - repeats[0][j]);
                    totalDifference += delta;
                }

                CollectionAssert.AreEqual(repeats[0], repeats[i], $"Repeat {i} was not identical to repeat 0. Total delta: {totalDifference}");
            }
        }
Ejemplo n.º 15
0
        public static async Task <int> Execute(Arguments arguments)
        {
            if (arguments == null)
            {
                throw new NoDeveloperMethodException();
            }

            var sw = new Stopwatch();

            sw.Start();
            ISourcePreparer sourcePreparer = new LocalSourcePreparer(filterShortSegments: true, useOldNamingFormat: false);

            //create analysis settings using arguments
            AnalysisSettings settings = new AnalysisSettings()
            {
                AnalysisMaxSegmentDuration = TimeSpan.FromSeconds(arguments.SegmentDuration),
                SegmentMediaType           = MediaTypes.GetMediaType(arguments.SegmentFileExtension),
                AnalysisMinSegmentDuration = TimeSpan.FromSeconds(arguments.SegmentDurationMinimum),
                SegmentOverlapDuration     = TimeSpan.FromSeconds(arguments.SegmentOverlap),
                AnalysisTargetSampleRate   = arguments.SampleRate,
                AnalysisTempDirectory      = (arguments.TemporaryFilesDir ?? arguments.OutputDir).ToDirectoryInfo(),
            };

            // create segments from file
            var fileSegment = new FileSegment(arguments.InputFile.ToFileInfo(), TimeAlignment.None, dateBehavior: FileSegment.FileDateBehavior.None)
            {
                SegmentStartOffset = TimeSpan.FromSeconds(arguments.StartOffset),
            };

            if (arguments.EndOffset.HasValue)
            {
                fileSegment.SegmentEndOffset = TimeSpan.FromSeconds(arguments.EndOffset.Value);
            }

            var fileSegments = sourcePreparer.CalculateSegments(new[] { fileSegment }, settings).ToList();

            LoggedConsole.WriteLine(
                "Started segmenting at {0} {1}: {2}.",
                DateTime.Now,
                arguments.Parallel ? "in parallel" : "sequentially",
                arguments.InputFile);

            if (arguments.Parallel)
            {
                RunParallel(fileSegments, sourcePreparer, settings, arguments);
            }
            else
            {
                var runTime = await RunSequential(fileSegments, sourcePreparer, settings, arguments);
            }

            sw.Stop();
            LoggedConsole.WriteLine("Took {0}. Done.", sw.Elapsed);
            return(ExceptionLookup.Ok);
        }
 private void RunUtility(AudioUtilityRequest request)
 {
     TestHelper
     .GetAudioUtilityFfmpegRawPcm()
     .Modify(
         this.source,
         MediaTypes.GetMediaType(this.source.Extension),
         this.output,
         MediaTypes.GetMediaType(this.output.Extension),
         request);
 }
        public void FailsWithNullRequest()
        {
            var tester = new FfmpegRawPcmAudioUtilityTester();

            TestHelper.ExceptionMatches <ArgumentNullException>(
                () =>
            {
                tester.InvokeCheckRequestValid(
                    this.source,
                    MediaTypes.GetMediaType(this.source.Extension),
                    this.output,
                    MediaTypes.GetMediaType(this.output.Extension),
                    null);
            },
                "raw PCM data requires prior knowledge");
        }
Ejemplo n.º 18
0
        public static FileStreamResult Get(string assemblyName, string resourceAddress, string folders)
        {
            var resourceName = string.Empty;

            try
            {
                resourceName = Common.GetCorrectedResourceName(resourceAddress);
                var pluginAssemblyAddress = GetCorrectedFullAssemblyAddress(assemblyName, folders);
                var physicalPath          = HttpContext.Current.Server.MapPath(pluginAssemblyAddress);
                var stream = ResourceHelper.GetEmbeddedResource(physicalPath, resourceName);
                return(new FileStreamResult(stream, MediaTypes.GetMediaType(resourceName)));
            }
            catch (Exception)
            {
                return(new FileStreamResult(new MemoryStream(), MediaTypes.GetMediaType(resourceName)));
            }
        }
        /// <summary>
        /// Get a segment from an mp3 file.
        /// </summary>
        /// <param name="audioFile">
        /// The audio file.
        /// </param>
        /// <param name="start">
        /// The start.
        /// </param>
        /// <param name="end">
        /// The end.
        /// </param>
        /// <param name="requestMimeType">
        /// The request Mime Type.
        /// </param>
        /// <returns>
        /// Byte array of audio segment. Byte array will be null or 0 length if segmentation failed.
        /// </returns>
        public byte[] SegmentMp3(string audioFile, long?start, long?end, string requestMimeType)
        {
            try
            {
                const string Mp3SpltPathKey = "PathToMp3Splt";

                var pathToMp3Split = ConfigurationManager.AppSettings.AllKeys.Contains(Mp3SpltPathKey)
                                         ? ConfigurationManager.AppSettings[Mp3SpltPathKey]
                                         : string.Empty;

                const string ConversionfolderKey = "ConversionFolder";

                var conversionPath = ConfigurationManager.AppSettings.AllKeys.Contains(ConversionfolderKey)
                                         ? ConfigurationManager.AppSettings[ConversionfolderKey]
                                         : string.Empty;

                var mimeType = MediaTypes.GetMediaType(Path.GetExtension(audioFile));

                if (mimeType == MediaTypes.MediaTypeMp3 && requestMimeType == MediaTypes.MediaTypeMp3 &&
                    !string.IsNullOrEmpty(pathToMp3Split) && File.Exists(pathToMp3Split) &&
                    !string.IsNullOrEmpty(conversionPath) && Directory.Exists(conversionPath))
                {
                    var tempFile = TempFileHelper.NewTempFile(this.TemporaryFilesDirectory, MediaTypes.ExtMp3);

                    var segmentedFile = this.SingleSegment(
                        tempFile.FullName, start.HasValue ? start.Value : 0, end.HasValue ? end.Value : long.MaxValue);

                    byte[] bytes = File.ReadAllBytes(segmentedFile);

                    tempFile.Delete();

                    return(bytes);
                }
            }
            catch
            {
                return(new byte[0]);
            }

            return(new byte[0]);
        }
        public void SegmentsRawPcmCorrectlyMaster(object startWrapped, object endWrapped, bool mixDown, int expectedChannels, int expectedBitRate)
        {
            double?start = (double?)startWrapped;
            double?end   = (double?)endWrapped;

            var duration = TimeSpan.FromSeconds((end ?? 60.0) - (start ?? 0.0));

            var expected = new AudioUtilityInfo
            {
                Duration      = duration,
                SampleRate    = 44100,
                ChannelCount  = expectedChannels,
                MediaType     = MediaTypes.MediaTypeWav,
                BitsPerSecond = expectedBitRate,
            };

            var request = new AudioUtilityRequest
            {
                MixDownToMono    = mixDown,
                OffsetStart      = start?.Seconds(),
                OffsetEnd        = end?.Seconds(),
                BitDepth         = 16,
                TargetSampleRate = 44100,
                Channels         = new[] { 1, 2, 3, 4 },
            };

            TestHelper
            .GetAudioUtility()
            .Modify(
                this.source,
                MediaTypes.GetMediaType(this.source.Extension),
                this.output,
                MediaTypes.GetMediaType(this.output.Extension),
                request);

            var actual = TestHelper.GetAudioUtility().Info(this.output);

            TestHelper.CheckAudioUtilityInfo(expected, actual);
        }
        private static void ChannelTest(
            string sourceFile,
            int[] channels,
            bool?mixDownToMono,
            int[][] expectedFrequencies,
            AudioUtilityRequest customRequest = null)
        {
            // adjust params for this test
            var sourceInfo = TestHelper.AudioDetails[sourceFile];

            var expected = sourceInfo.ShallowClone();

            expected.ChannelCount = expectedFrequencies.Length;

            var audioUtilityRequest = customRequest ?? new AudioUtilityRequest();

            audioUtilityRequest.MixDownToMono = mixDownToMono;
            audioUtilityRequest.Channels      = channels;

            var outputMimeType = MediaTypes.MediaTypeWav;
            var source         = PathHelper.GetTestAudioFile(sourceFile);

            var destExtension  = MediaTypes.GetExtension(outputMimeType);
            var outputFilename = Path.GetFileNameWithoutExtension(FourChannelFile) + "_modified." + destExtension;

            var util = TestHelper.GetAudioUtility();

            var dir    = PathHelper.GetTempDir();
            var output = new FileInfo(Path.Combine(dir.FullName, outputFilename));

            expected.SourceFile = output;

            util.Modify(source, MediaTypes.GetMediaType(source.Extension), output, outputMimeType, audioUtilityRequest);

            DoFrequencyAnalysis(expected, expectedFrequencies);

            PathHelper.DeleteTempDir(dir);
        }
Ejemplo n.º 22
0
        /// <summary>
        /// Convert an audio file to a specific wav format using the default audio utility settings.
        /// </summary>
        /// <param name="source">
        /// The source audio file.
        /// </param>
        /// <param name="output">
        /// The destination wav path.
        /// </param>
        /// <param name="request">
        /// The request.
        /// </param>
        public static void SegmentToWav(FileInfo source, FileInfo output, AudioUtilityRequest request)
        {
            var audioUtility = new MasterAudioUtility();

            // allows start and end offsets to be specified independently or not all
            if (!request.OffsetStart.HasValue)
            {
                request.OffsetStart = TimeSpan.Zero;
            }

            if (!request.OffsetEnd.HasValue)
            {
                var info = audioUtility.Info(source);
                request.OffsetEnd = info.Duration;
            }

            audioUtility.Modify(
                source,
                MediaTypes.GetMediaType(source.Extension),
                output,
                MediaTypes.MediaTypeWav,
                request);
        }
Ejemplo n.º 23
0
        public void SegmentsMp3NotAvailableOnOsxWithSox()
        {
            var request = new AudioUtilityRequest
            {
                MixDownToMono    = true,
                OffsetStart      = TimeSpan.FromSeconds(20),
                OffsetEnd        = TimeSpan.FromSeconds(50),
                TargetSampleRate = 11025,
            };

            var util = TestHelper.GetAudioUtilitySox();

            var source = TestHelper.GetAudioFile("Currawongs_curlew_West_Knoll_Bees_20091102-183000.mp3");
            var output = PathHelper.GetTempFile(MediaTypes.ExtMp3);

            Assert.ThrowsException <NotSupportedException>(
                () => util.Info(source),
                "cannot be processed. Valid formats are: wav (audio/wav), flac (audio/flac).");

            Assert.ThrowsException <NotSupportedException>(
                () => util.Modify(source, MediaTypes.GetMediaType(source.Extension), output, MediaTypes.GetMediaType(output.Extension), request),
                "cannot be processed. Valid formats are: wav (audio/wav), flac (audio/flac)");
        }
Ejemplo n.º 24
0
        public void SegmentsMp3Correctly3Master()
        {
            /*
             *
             * mp3splt accuracy varies with the quality of the input file!
             *
             */
            var expected = new AudioUtilityInfo
            {
                Duration      = TimeSpan.FromSeconds(48),
                SampleRate    = 11025,
                ChannelCount  = 1,
                MediaType     = MediaTypes.MediaTypeMp3,
                BitsPerSecond = 16000,
            };

            var request = new AudioUtilityRequest
            {
                MixDownToMono    = true,
                OffsetStart      = TimeSpan.Zero,
                OffsetEnd        = TimeSpan.FromSeconds(48),
                TargetSampleRate = 11025,
            };

            var util = TestHelper.GetAudioUtility();

            var source = TestHelper.GetAudioFile("Currawongs_curlew_West_Knoll_Bees_20091102-183000.mp3");
            var output = PathHelper.GetTempFile(MediaTypes.ExtMp3);

            util.Modify(source, MediaTypes.GetMediaType(source.Extension), output, MediaTypes.GetMediaType(output.Extension), request);

            var actual = util.Info(output);

            File.Delete(output.FullName);

            TestHelper.CheckAudioUtilityInfo(expected, actual, 380);
        }
Ejemplo n.º 25
0
        /// <summary>
        /// Get metadata for the given file.
        /// </summary>
        /// <param name="source">File to get metadata from. This should be an audio file.</param>
        /// <returns>A dictionary containing metadata for the given file.</returns>
        public override AudioUtilityInfo Info(FileInfo source)
        {
            var mediaType = MediaTypes.GetMediaType(source.Extension);
            AudioUtilityInfo info;

            if (mediaType == MediaTypes.MediaTypeWavpack)
            {
                info = this.Combine(this.wvunpackUtility.Info(source), this.ffmpegUtility.Info(source));
            }
            else if (mediaType == MediaTypes.MediaTypeMp3 || mediaType == MediaTypes.MediaTypeWav)
            {
                info = this.Combine(this.soxUtility.Info(source), this.ffmpegUtility.Info(source));
            }
            else if (mediaType == MediaTypes.MediaTypePcmRaw)
            {
                info = this.ffmpegRawPcmUtility.Info(source);
            }
            else
            {
                info = this.ffmpegUtility.Info(source);
            }

            return(info);
        }
Ejemplo n.º 26
0
 public void MediaTypeIsCorrect(SerializationFormat format, string expected)
 {
     Assert.Equal(expected, MediaTypes.GetMediaType(format));
 }
Ejemplo n.º 27
0
 public static StringContent SERIALIZE_CONTENT <T>(this T m, MediaTypes mediaType = MediaTypes.JSON)
 {
     return(new StringContent(m.SERIALIZE(), Encoding.UTF8, mediaType.GetMediaType()));
 }
Ejemplo n.º 28
0
        /// <summary>
        /// 2. Analyses long audio recording (mp3 or wav) as per passed config file. Outputs an events.csv file AND an
        /// indices.csv file
        /// Signed off: Michael Towsey 4th December 2012
        /// </summary>
        public static void Execute(Arguments arguments)
        {
            if (arguments == null)
            {
                throw new NoDeveloperMethodException();
            }

            LoggedConsole.WriteLine("# PROCESS LONG RECORDING");
            LoggedConsole.WriteLine("# DATE AND TIME: " + DateTime.Now);

            // 1. set up the necessary files
            var sourceAudio        = arguments.Source;
            var configFile         = arguments.Config.ToFileInfo();
            var outputDirectory    = arguments.Output;
            var tempFilesDirectory = arguments.TempDir;

            // if a temp dir is not given, use output dir as temp dir
            if (tempFilesDirectory == null)
            {
                Log.Warn("No temporary directory provided, using output directory");
                tempFilesDirectory = outputDirectory;
            }

            // try an automatically find the config file
            if (configFile == null)
            {
                throw new FileNotFoundException("No config file argument provided");
            }
            else if (!configFile.Exists)
            {
                Log.Warn($"Config file {configFile.FullName} not found... attempting to resolve config file");

                // we use .ToString() here to get the original input string - Using fullname always produces an absolute path wrt to pwd... we don't want to prematurely make asusmptions:
                // e.g. We require a missing absolute path to fail... that wouldn't work with .Name
                // e.g. We require a relative path to try and resolve, using .FullName would fail the first absolute check inside ResolveConfigFile
                configFile = ConfigFile.Resolve(configFile.ToString(), Directory.GetCurrentDirectory().ToDirectoryInfo());
            }

            if (arguments.StartOffset.HasValue ^ arguments.EndOffset.HasValue)
            {
                throw new InvalidStartOrEndException("If StartOffset or EndOffset is specified, then both must be specified");
            }

            if (arguments.StartOffset.HasValue && arguments.EndOffset.HasValue && arguments.EndOffset.Value <= arguments.StartOffset.Value)
            {
                throw new InvalidStartOrEndException("Start offset must be less than end offset.");
            }

            LoggedConsole.WriteLine("# Recording file:      " + sourceAudio.FullName);
            LoggedConsole.WriteLine("# Configuration file:  " + configFile);
            LoggedConsole.WriteLine("# Output folder:       " + outputDirectory);
            LoggedConsole.WriteLine("# Temp File Directory: " + tempFilesDirectory);

            // optionally copy logs / config to make results easier to understand
            // TODO: remove, see https://github.com/QutEcoacoustics/audio-analysis/issues/133
            if (arguments.WhenExitCopyConfig || arguments.WhenExitCopyLog)
            {
                AppDomain.CurrentDomain.ProcessExit += (sender, args) => { Cleanup(arguments, configFile); };
            }

            // 2. initialize the analyzer
            // we're changing the way resolving config files works. Ideally, we'd like to use statically typed config files
            // but we can't do that unless we know which type we have to load first! Currently analyzer to load is in
            // the config file so we can't know which analyzer we can use. Thus we will change to using the file name,
            // or an argument to resolve the analyzer to load.
            // Get analysis name:
            IAnalyser2 analyzer = FindAndCheckAnalyzer <IAnalyser2>(arguments.AnalysisIdentifier, configFile.Name);

            // 2. get the analysis config
            AnalyzerConfig configuration = analyzer.ParseConfig(configFile);

            SaveBehavior saveIntermediateWavFiles  = configuration.SaveIntermediateWavFiles;
            bool         saveIntermediateDataFiles = configuration.SaveIntermediateCsvFiles;
            SaveBehavior saveSonogramsImages       = configuration.SaveSonogramImages;

            bool filenameDate = configuration.RequireDateInFilename;

            if (configuration[AnalysisKeys.AnalysisName].IsNotWhitespace())
            {
                Log.Warn("Your config file has `AnalysisName` set - this property is deprecated and ignored");
            }

            // AT 2018-02: changed logic so default index properties loaded if not provided
            FileInfo indicesPropertiesConfig = IndexProperties.Find(configuration, configFile);

            if (indicesPropertiesConfig == null || !indicesPropertiesConfig.Exists)
            {
                Log.Warn("IndexProperties config can not be found! Loading a default");
                indicesPropertiesConfig = ConfigFile.Default <Dictionary <string, IndexProperties> >();
            }

            LoggedConsole.WriteLine("# IndexProperties Cfg: " + indicesPropertiesConfig.FullName);

            // min score for an acceptable event
            Log.Info("Minimum event threshold has been set to " + configuration.EventThreshold);

            FileSegment.FileDateBehavior defaultBehavior = FileSegment.FileDateBehavior.Try;
            if (filenameDate)
            {
                if (!FileDateHelpers.FileNameContainsDateTime(sourceAudio.Name))
                {
                    throw new InvalidFileDateException(
                              "When RequireDateInFilename option is set, the filename of the source audio file must contain "
                              + "a valid AND UNAMBIGUOUS date. Such a date was not able to be parsed.");
                }

                defaultBehavior = FileSegment.FileDateBehavior.Required;
            }

            // 3. initilize AnalysisCoordinator class that will do the analysis
            var analysisCoordinator = new AnalysisCoordinator(
                new LocalSourcePreparer(),
                saveIntermediateWavFiles,
                false,
                arguments.Parallel);

            // 4. get the segment of audio to be analysed
            // if tiling output, specify that FileSegment needs to be able to read the date
            var fileSegment         = new FileSegment(sourceAudio, arguments.AlignToMinute, null, defaultBehavior);
            var bothOffsetsProvided = arguments.StartOffset.HasValue && arguments.EndOffset.HasValue;

            if (bothOffsetsProvided)
            {
                fileSegment.SegmentStartOffset = TimeSpan.FromSeconds(arguments.StartOffset.Value);
                fileSegment.SegmentEndOffset   = TimeSpan.FromSeconds(arguments.EndOffset.Value);
            }
            else
            {
                Log.Debug("Neither start nor end segment offsets provided. Therefore both were ignored.");
            }

            // 6. initialize the analysis settings object
            var analysisSettings = analyzer.DefaultSettings;

            analysisSettings.ConfigFile                = configFile;
            analysisSettings.Configuration             = configuration;
            analysisSettings.AnalysisOutputDirectory   = outputDirectory;
            analysisSettings.AnalysisTempDirectory     = tempFilesDirectory;
            analysisSettings.AnalysisDataSaveBehavior  = saveIntermediateDataFiles;
            analysisSettings.AnalysisImageSaveBehavior = saveSonogramsImages;
            analysisSettings.AnalysisChannelSelection  = arguments.Channels;
            analysisSettings.AnalysisMixDownToMono     = arguments.MixDownToMono;

            var segmentDuration = configuration.SegmentDuration?.Seconds();

            if (!segmentDuration.HasValue)
            {
                segmentDuration = analysisSettings.AnalysisMaxSegmentDuration ?? TimeSpan.FromMinutes(1);
                Log.Warn(
                    $"Can't read `{nameof(AnalyzerConfig.SegmentDuration)}` from config file. "
                    + $"Default value of {segmentDuration} used)");
            }

            analysisSettings.AnalysisMaxSegmentDuration = segmentDuration.Value;

            var segmentOverlap = configuration.SegmentOverlap?.Seconds();

            if (!segmentOverlap.HasValue)
            {
                segmentOverlap = analysisSettings.SegmentOverlapDuration;
                Log.Warn(
                    $"Can't read `{nameof(AnalyzerConfig.SegmentOverlap)}` from config file. "
                    + $"Default value of {segmentOverlap} used)");
            }

            analysisSettings.SegmentOverlapDuration = segmentOverlap.Value;

            // set target sample rate
            var resampleRate = configuration.ResampleRate;

            if (!resampleRate.HasValue)
            {
                resampleRate = analysisSettings.AnalysisTargetSampleRate ?? AppConfigHelper.DefaultTargetSampleRate;
                Log.Warn(
                    $"Can't read {nameof(configuration.ResampleRate)} from config file. "
                    + $"Default value of {resampleRate} used)");
            }

            analysisSettings.AnalysisTargetSampleRate = resampleRate;

            Log.Info(
                $"{nameof(configuration.SegmentDuration)}={segmentDuration}, "
                + $"{nameof(configuration.SegmentOverlap)}={segmentOverlap}, "
                + $"{nameof(configuration.ResampleRate)}={resampleRate}");

            // 7. ####################################### DO THE ANALYSIS ###################################
            LoggedConsole.WriteLine("START ANALYSIS ...");
            var analyserResults = analysisCoordinator.Run(fileSegment, analyzer, analysisSettings);

            // ##############################################################################################
            // 8. PROCESS THE RESULTS
            LoggedConsole.WriteLine(string.Empty);
            LoggedConsole.WriteLine("START PROCESSING RESULTS ...");
            if (analyserResults == null)
            {
                LoggedConsole.WriteErrorLine("###################################################\n");
                LoggedConsole.WriteErrorLine("The Analysis Run Coordinator has returned a null result.");
                LoggedConsole.WriteErrorLine("###################################################\n");
                throw new AnalysisOptionDevilException();
            }

            // Merge and correct main result types
            EventBase[]         mergedEventResults         = ResultsTools.MergeResults(analyserResults, ar => ar.Events, ResultsTools.CorrectEvent);
            SummaryIndexBase[]  mergedIndicesResults       = ResultsTools.MergeResults(analyserResults, ar => ar.SummaryIndices, ResultsTools.CorrectSummaryIndex);
            SpectralIndexBase[] mergedSpectralIndexResults = ResultsTools.MergeResults(analyserResults, ar => ar.SpectralIndices, ResultsTools.CorrectSpectrumIndex);

            // not an exceptional state, do not throw exception
            if (mergedEventResults != null && mergedEventResults.Length == 0)
            {
                LoggedConsole.WriteWarnLine("The analysis produced no EVENTS (mergedResults had zero count)");
            }

            if (mergedIndicesResults != null && mergedIndicesResults.Length == 0)
            {
                LoggedConsole.WriteWarnLine("The analysis produced no Summary INDICES (mergedResults had zero count)");
            }

            if (mergedSpectralIndexResults != null && mergedSpectralIndexResults.Length == 0)
            {
                LoggedConsole.WriteWarnLine("The analysis produced no Spectral INDICES (merged results had zero count)");
            }

            // 9. CREATE SUMMARY INDICES IF NECESSARY (FROM EVENTS)
#if DEBUG
            // get the duration of the original source audio file - need this to convert Events datatable to Indices Datatable
            var audioUtility = new MasterAudioUtility(tempFilesDirectory);
            var mimeType     = MediaTypes.GetMediaType(sourceAudio.Extension);
            var sourceInfo   = audioUtility.Info(sourceAudio);

            // updated by reference all the way down in LocalSourcePreparer
            Debug.Assert(fileSegment.TargetFileDuration == sourceInfo.Duration);
#endif
            var duration = fileSegment.TargetFileDuration.Value;

            ResultsTools.ConvertEventsToIndices(
                analyzer,
                mergedEventResults,
                ref mergedIndicesResults,
                duration,
                configuration.EventThreshold);
            int eventsCount           = mergedEventResults?.Length ?? 0;
            int numberOfRowsOfIndices = mergedIndicesResults?.Length ?? 0;

            // 10. Allow analysers to post-process

            // TODO: remove results directory if possible
            var instanceOutputDirectory =
                AnalysisCoordinator.GetNamedDirectory(analysisSettings.AnalysisOutputDirectory, analyzer);

            // 11. IMPORTANT - this is where IAnalyser2's post processor gets called.
            // Produces all spectrograms and images of SPECTRAL INDICES.
            // Long duration spectrograms are drawn IFF analysis type is Towsey.Acoustic
            analyzer.SummariseResults(analysisSettings, fileSegment, mergedEventResults, mergedIndicesResults, mergedSpectralIndexResults, analyserResults);

            // 12. SAVE THE RESULTS
            string fileNameBase = Path.GetFileNameWithoutExtension(sourceAudio.Name);

            var eventsFile  = ResultsTools.SaveEvents(analyzer, fileNameBase, instanceOutputDirectory, mergedEventResults);
            var indicesFile = ResultsTools.SaveSummaryIndices(analyzer, fileNameBase, instanceOutputDirectory, mergedIndicesResults);
            var spectraFile = ResultsTools.SaveSpectralIndices(analyzer, fileNameBase, instanceOutputDirectory, mergedSpectralIndexResults);

            // 13. THIS IS WHERE SUMMARY INDICES ARE PROCESSED
            //     Convert summary indices to black and white tracks image
            if (mergedIndicesResults == null)
            {
                Log.Info("No summary indices produced");
            }
            else
            {
                if (indicesPropertiesConfig == null || !indicesPropertiesConfig.Exists)
                {
                    throw new InvalidOperationException("Cannot process indices without an index configuration file, the file could not be found!");
                }

                // this arbitrary amount of data.
                if (mergedIndicesResults.Length > 5000)
                {
                    Log.Warn("Summary Indices Image not able to be drawn - there are too many indices to render");
                }
                else
                {
                    var    basename   = Path.GetFileNameWithoutExtension(fileNameBase);
                    string imageTitle = $"SOURCE:{basename},   {Meta.OrganizationTag};  ";

                    // Draw Tracks-Image of Summary indices
                    // set time scale resolution for drawing of summary index tracks
                    TimeSpan timeScale   = TimeSpan.FromSeconds(0.1);
                    Bitmap   tracksImage =
                        IndexDisplay.DrawImageOfSummaryIndices(
                            IndexProperties.GetIndexProperties(indicesPropertiesConfig),
                            indicesFile,
                            imageTitle,
                            timeScale,
                            fileSegment.TargetFileStartDate);
                    var imagePath = FilenameHelpers.AnalysisResultPath(instanceOutputDirectory, basename, "SummaryIndices", ImageFileExt);
                    tracksImage.Save(imagePath);
                }
            }

            // 14. wrap up, write stats
            LoggedConsole.WriteLine("INDICES CSV file(s) = " + (indicesFile?.Name ?? "<<No indices result, no file!>>"));
            LoggedConsole.WriteLine("\tNumber of rows (i.e. minutes) in CSV file of indices = " + numberOfRowsOfIndices);
            LoggedConsole.WriteLine(string.Empty);

            if (eventsFile == null)
            {
                LoggedConsole.WriteLine("An Events CSV file was NOT returned.");
            }
            else
            {
                LoggedConsole.WriteLine("EVENTS CSV file(s) = " + eventsFile.Name);
                LoggedConsole.WriteLine("\tNumber of events = " + eventsCount);
            }

            Log.Success($"Analysis Complete.\nSource={sourceAudio.Name}\nOutput={instanceOutputDirectory.FullName}");
        }
Ejemplo n.º 29
0
 public void MediaTypeAndVersionIsCorrect(SerializationFormat format, SpecificationVersion schemaVersion, string expected)
 {
     Assert.Equal(expected, MediaTypes.GetMediaType(format, schemaVersion));
 }
Ejemplo n.º 30
0
        /// <summary>
        /// Segment a <paramref name="source"/> audio file.
        /// <paramref name="output"/> file will be created.
        /// Will not delete the output.
        /// </summary>
        /// <param name="source">
        /// The source audio file.
        /// </param>
        /// <param name="sourceMediaType">
        /// The source Mime Type.
        /// </param>
        /// <param name="output">
        /// The output audio file.
        /// </param>
        /// <param name="outputMediaType">
        /// The output Mime Type.
        /// </param>
        /// <param name="request">
        /// The request.
        /// </param>
        public override void Modify(FileInfo source, string sourceMediaType, FileInfo output, string outputMediaType, AudioUtilityRequest request)
        {
            if (source == null)
            {
                throw new ArgumentNullException("source");
            }

            if (output == null)
            {
                throw new ArgumentNullException("output");
            }

            if (source.FullName == output.FullName)
            {
                throw new ArgumentException("Source and output cannot be the same path: " + source.FullName);
            }

            var segmentRequest = new AudioUtilityRequest
            {
                OffsetStart   = request.OffsetStart,
                OffsetEnd     = request.OffsetEnd,
                MixDownToMono = false,
            };

            FileInfo soxSourceFile;
            var      soxRequest = request;

            // do specialized convert and/or segment
            if (sourceMediaType == MediaTypes.MediaTypeWavpack)
            {
                // convert and segment wavpack file to wav
                soxSourceFile          = this.SegmentWavpackToWav(source, segmentRequest);
                soxRequest.OffsetStart = null;
                soxRequest.OffsetEnd   = null;
            }
            else if (sourceMediaType == MediaTypes.MediaTypeMp3)
            {
                // segment mp3 file
                soxSourceFile          = this.SegmentMp3(source, sourceMediaType, segmentRequest);
                soxRequest.OffsetStart = null;
                soxRequest.OffsetEnd   = null;
            }
            else if (sourceMediaType == MediaTypes.MediaTypePcmRaw)
            {
                // transform (and segment) raw PCM file
                // the raw file needs additional information to proceed
                segmentRequest.BitDepth         = request.BitDepth;
                segmentRequest.Channels         = request.Channels;
                segmentRequest.TargetSampleRate = request.TargetSampleRate;

                soxSourceFile = this.SegmentRawPcmToWav(source, segmentRequest);

                // should probably null Channels & TargetSampleRate but they should equivalently be noops
                // sox does not support bit depth - it must be nulled
                soxRequest.BitDepth = null;

                soxRequest.OffsetStart = null;
                soxRequest.OffsetEnd   = null;
            }
            else if (sourceMediaType != MediaTypes.MediaTypeWav && sourceMediaType != MediaTypes.MediaTypeMp3)
            {
                // convert to wav using ffmpeg
                soxSourceFile          = this.ConvertNonWavOrMp3(source, sourceMediaType, segmentRequest);
                soxRequest.OffsetStart = null;
                soxRequest.OffsetEnd   = null;
            }
            else
            {
                // TODO: this is dangerous
                soxSourceFile = source;
            }

            // audio file is now in either mp3 or wav
            FileInfo soxOutputFile;

            // apply modifications using sox
            soxOutputFile = this.ConvertAndSegmentUsingSox(
                soxSourceFile, MediaTypes.GetMediaType(soxSourceFile.Extension), soxRequest);

            // ensure result is in correct format
            if (MediaTypes.GetMediaType(soxOutputFile.Extension) != outputMediaType)
            {
                // if format is not correct, convert it
                this.ffmpegUtility.Modify(soxOutputFile, MediaTypes.MediaTypeWav, output, outputMediaType, new AudioUtilityRequest {
                    MixDownToMono = false
                });
            }
            else
            {
                // create output dir if it does not exist.
                if (!Directory.Exists(output.DirectoryName))
                {
                    Directory.CreateDirectory(output.DirectoryName);
                }

                // if output is correct, just copy it.
                // will not overwrite, will throw exception if the output file already exists.
                // do not overwrite!!!

                // AT: the following code by Towsey is extremely dangerous in parallel code. It Effectively means
                //     previous runs can cache files - if files are faulty it corrupts analysis.
                // AT: This code is allowed in DEBUG for ease of use. It should not be subverted in RELEASE
                // AT, August 2017: Reverting this behavior again! With enhancements made to our code, there is no
                //     a guarantee that files will be produced from the same source, or even on nicely aligned minutes.
                //     The only sane alternative here is to crash because it means something is catastrophically wrong
                //     with our logic (or that a previous run failed and did not clean up it's files!).
                //     Note to Michael (whom I've probably made grumpy with this change - sorry): I'd recommend instead
                //     of changing this code back, you instead add add a cleanup command to your dev methods... something
                //     like Directory.Delete(outputDirectory, true) that executes before you start a new analysis and
                //     will clear away old files before the analysis runs.

                // However, output file may already exist if saved by user on previous run
                if (output.Exists)
                {
                    this.Log.Error($"MasterAudioUtility is trying to create file ({output.FullName}) that already exists.");
                }

                File.Copy(soxOutputFile.FullName, output.FullName);
            }

            // tidy up
            if (soxSourceFile.FullName != source.FullName && soxSourceFile.FullName != output.FullName)
            {
                soxSourceFile.Delete();
            }

            if (soxOutputFile.FullName != source.FullName && soxOutputFile.FullName != output.FullName)
            {
                soxOutputFile.Delete();
            }
        }