/// <summary> /// Initializes a new instance of the <see cref="AnalysisCoordinator"/> class but also allows for advanced /// channel mapping options. /// </summary> /// <param name="sourcePreparer">The source preparer to use.</param> /// <param name="saveIntermediateWavFiles">Defines when intermediate WAVE files should be saved.</param> /// <param name="isParallel">Whether or not to run the analysis with multiple threads.</param> /// <param name="uniqueDirectoryPerSegment">Whether or not to create unique directories per segment (in both temp and output directories).</param> public AnalysisCoordinator( ISourcePreparer sourcePreparer, SaveBehavior saveIntermediateWavFiles, bool uniqueDirectoryPerSegment = true, bool isParallel = false) { Contract.Requires <ArgumentNullException>(sourcePreparer != null, "sourcePreparer must not be null"); this.saveIntermediateWavFiles = saveIntermediateWavFiles; this.SourcePreparer = sourcePreparer; this.UniqueDirectoryPerSegment = uniqueDirectoryPerSegment; this.IsParallel = isParallel; }
private static void RunParallel(List <ISegment <FileInfo> > fileSegments, ISourcePreparer sourcePreparer, AnalysisSettings settings, Arguments arguments) { var totalItems = fileSegments.Count; Parallel.ForEach( fileSegments, (item, state, index) => { var item1 = item; int index1 = Convert.ToInt32(index); // call create segment synchronously CreateSegment(sourcePreparer, item1, settings, arguments, index1 + 1, totalItems, arguments.MixDownToMono).Wait(); }); }
private static async Task <double> CreateSegment( ISourcePreparer sourcePreparer, ISegment <FileInfo> fileSegment, AnalysisSettings settings, Arguments arguments, int itemNumber, int itemCount, bool mixDownToMono) { var timer = Stopwatch.StartNew(); FileSegment preparedFile; try { preparedFile = await sourcePreparer.PrepareFile( arguments.OutputDir.ToDirectoryInfo(), fileSegment, settings.SegmentMediaType, settings.AnalysisTargetSampleRate, settings.AnalysisTempDirectory, null, mixDownToMono); } catch (IOException ioex) { LoggedConsole.WriteError($"Failed to cut segment {itemNumber} of {itemCount}:" + ioex.Message); return(double.NaN); } LoggedConsole.WriteLine( "Created segment {0} of {1}: {2}", itemNumber, itemCount, preparedFile.SourceMetadata.Identifier); return(timer.Elapsed.TotalSeconds); }
private static List <ISegment <TSource> > PrepareAnalysisSegments <TSource>( ISourcePreparer preparer, ISegment <TSource>[] segments, AnalysisSettings settings) { // ensure all segments are valid and have source metadata set double duration = 0; foreach (var segment in segments) { var segmentDuration = segment.EndOffsetSeconds - segment.StartOffsetSeconds; duration += segmentDuration; Contract.Requires <InvalidSegmentException>( segmentDuration > 0, $"Segment {segment} was invalid because end was less than start"); Contract.Requires <InvalidSegmentException>(segment.Source != null, $"Segment {segment} source was null"); Contract.Requires( segment.SourceMetadata.NotNull(), $"Segment {segment} must have metadata supplied."); // it should equal itself (because it is in the list) but should not equal anything else var matchingSegments = segments.Where(x => x.Equals(segment)).ToArray(); Contract.Requires <InvalidSegmentException>( matchingSegments.Length == 1, $"Supplied segment is a duplicate of another segment. Supplied:\n{segment}\nMatches\n: {string.Join("\n-----------\n", matchingSegments.Select(x => x.ToString()))}"); } Log.Info($"Analysis Coordinator will analyze a total of {duration} seconds of audio"); // split the provided segments up into processable chunks var analysisSegments = preparer.CalculateSegments(segments, settings).ToArray(); // ensure after splitting there are no identical segments // we use a a dictionary here because a HashSet does not support supplying an initial capacity (uggh) var postCutSegments = new List <ISegment <TSource> >(analysisSegments.Length); foreach (var analysisSegment in analysisSegments) { // check if the segment is too short... and if so, remove it var tooShort = analysisSegment.EndOffsetSeconds - analysisSegment.StartOffsetSeconds < settings.AnalysisMinSegmentDuration.TotalSeconds; if (tooShort) { Log.Warn("Analysis segment removed because it was too short " + $"(less than {settings.AnalysisMinSegmentDuration}): {analysisSegment}"); continue; } // ensure there are no identical segments (no use processing the same piece of audio twice // with the same analysis! // warning this is an O^2 operation. if (postCutSegments.Any(x => x.Equals(analysisSegment))) { Log.Warn($"A duplicate analysis segment was removed: {analysisSegment}"); continue; } postCutSegments.Add(analysisSegment); } return(postCutSegments); }
private static async Task <double> RunSequential(List <ISegment <FileInfo> > fileSegments, ISourcePreparer sourcePreparer, AnalysisSettings settings, Arguments arguments) { var totalItems = fileSegments.Count; var totalTime = 0.0; for (var index = 0; index < fileSegments.Count; index++) { var item = fileSegments[index]; totalTime += await CreateSegment(sourcePreparer, item, settings, arguments, index + 1, totalItems, arguments.MixDownToMono); } return(totalTime); }