public void Initialize() { this.analysisSettings = new AnalysisSettings(); this.analysisSettings.AnalysisOutputDirectory = this.TestOutputDirectory.Combine("output"); this.analysisSettings.AnalysisTempDirectory = this.TestOutputDirectory.Combine("tmp"); var fakeSource = this.TestOutputDirectory.CombineFile("abc_min1.wav"); fakeSource.Touch(); this.segment = new FileSegment(fakeSource, 123456, 60.0.Seconds()) { SegmentStartOffset = TimeSpan.Zero, SegmentEndOffset = 60.0.Seconds(), }; var fakePrepared = this.TestOutputDirectory.CombineFile("abc_min1.wav"); fakePrepared.Touch(); this.preparedSegment = new FileSegment(fakePrepared, 123456, 30.Seconds()) { SegmentStartOffset = 0.Seconds(), SegmentEndOffset = 59.999.Seconds(), }; this.segmentSettings = new SegmentSettings <FileInfo>( this.analysisSettings, this.segment, (this.analysisSettings.AnalysisOutputDirectory, this.analysisSettings.AnalysisTempDirectory), this.preparedSegment); }
public void ThrowsIfArgumentNull2() { Assert.ThrowsException <ArgumentNullException>( () => { this.segmentSettings = new SegmentSettings <FileInfo>( this.analysisSettings, this.segment, (null, null), this.preparedSegment); }); }
public void ThrowsIfArgumentNull3() { Assert.ThrowsException <ArgumentNullException>( () => { this.segmentSettings = new SegmentSettings <FileInfo>( this.analysisSettings, this.segment, (this.analysisSettings.AnalysisOutputDirectory, this.analysisSettings.AnalysisTempDirectory), null); }); }
public AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var audioFile = segmentSettings.SegmentAudioFile; var recording = new AudioRecording(audioFile.FullName); var sourceRecordingName = recording.BaseName; var outputDirectory = segmentSettings.SegmentOutputDirectory; bool saveCsv = analysisSettings.AnalysisDataSaveBehavior; var analysisResult = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); // generate spectrogram // TODO the following may need to be checked since change of method signature in December 2019. //var configurationDictionary = new Dictionary<string, string>(configuration.ToDictionary()); //configurationDictionary[ConfigKeys.Recording.Key_RecordingCallName] = audioFile.FullName; //configurationDictionary[ConfigKeys.Recording.Key_RecordingFileName] = audioFile.Name; //var soxImage = new FileInfo(Path.Combine(segmentSettings.SegmentOutputDirectory.FullName, audioFile.Name + ".SOX.png")); var configInfo = ConfigFile.Deserialize <AnalyzerConfig>(analysisSettings.ConfigFile); var spectrogramResult = Audio2Sonogram.GenerateSpectrogramImages(audioFile, configInfo, sourceRecordingName); // this analysis produces no results! But we still print images (that is the point) if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResult.Events.Length)) { Debug.Assert(segmentSettings.SegmentImageFile.Exists); } if (saveCsv) { var basename = Path.GetFileNameWithoutExtension(segmentSettings.SegmentAudioFile.Name); var spectrogramCsvFile = outputDirectory.CombineFile(basename + ".Spectrogram.csv"); Csv.WriteMatrixToCsv(spectrogramCsvFile, spectrogramResult.DecibelSpectrogram.Data, TwoDimensionalArray.None); } return(analysisResult); }
public AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var audioFile = segmentSettings.SegmentAudioFile; var recording = new AudioRecording(audioFile.FullName); var sourceRecordingName = recording.BaseName; var configuration = (SpectrogramGeneratorConfig)analysisSettings.Configuration; var analysisResult = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); var spectrogramResult = GenerateSpectrogramImages(audioFile, configuration, sourceRecordingName); // this analysis produces no results! But we still print images (that is the point) if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResult.Events.Length)) { ImageExtensions.Save(spectrogramResult.CompositeImage, segmentSettings.SegmentImageFile.FullName); } //if (saveCsv) //{ // var basename = Path.GetFileNameWithoutExtension(segmentSettings.SegmentAudioFile.Name); // var spectrogramCsvFile = outputDirectory.CombineFile(basename + ".Spectrogram.csv"); // Csv.WriteMatrixToCsv(spectrogramCsvFile, spectrogramResult.DecibelSpectrogram.Data, TwoDimensionalArray.None); //} return(analysisResult); }
//////////////////////////////////////////////////////////////////////////////////////////////// /*--------------------------------------------------------------------------------------------*/ internal void Build(ArcState pArcState, SegmentState pSegState, float pArcAngle, ICustomSegment pCustom) { vArcState = pArcState; vSegState = pSegState; ArcAngle = pArcAngle; vSegState.SetCursorDistanceFunction(CalcCursorDistance); vCursorBaseTx = gameObject.transform.parent.parent.parent.parent; //HovercastSetup const float pi = (float)Math.PI; const float slideBufferAngle = pi / 80f; vSlideDegrees = (pArcAngle - slideBufferAngle * 2) / (float)Math.PI * 180; vSlideDir0 = MeshUtil.GetRingPoint(1, -pArcAngle / 2f + slideBufferAngle); //// Type rendType = pCustom.GetSegmentRenderer(vSegState.NavItem); SegmentSettings sett = pCustom.GetSegmentSettings(vSegState.NavItem); var rendObj = new GameObject("Renderer"); rendObj.transform.SetParent(gameObject.transform, false); vRenderer = (IUiSegmentRenderer)rendObj.AddComponent(rendType); vRenderer.Build(vArcState, vSegState, pArcAngle, sett); }
//////////////////////////////////////////////////////////////////////////////////////////////// /*--------------------------------------------------------------------------------------------*/ public virtual void Build(ArcState pArcState, SegmentSettings pSettings, float pAngle0, float pAngle1) { vArcState = pArcState; vSettings = pSettings; vAngle0 = pAngle0; vAngle1 = pAngle1; vMeshSteps = (int)Math.Round(Math.Max(2, (vAngle1 - vAngle0) / Math.PI * 60)); vInnerRadius = 0.17f; vDiameter = UiSelectRenderer.ArcCanvasThickness; //// vBackground = new GameObject("Background"); vBackground.transform.SetParent(gameObject.transform, false); vBackground.AddComponent <MeshFilter>(); vBackground.AddComponent <MeshRenderer>(); vBackground.renderer.sharedMaterial = new Material(Shader.Find("Unlit/AlphaSelfIllum")); vBackground.renderer.sharedMaterial.renderQueue -= 100; vBackground.renderer.sharedMaterial.color = Color.clear; BuildMesh(vBackground.GetComponent <MeshFilter>().mesh); //// var labelObj = new GameObject("Label"); labelObj.transform.SetParent(gameObject.transform, false); labelObj.transform.localPosition = new Vector3(0, 0, vInnerRadius); labelObj.transform.localScale = new Vector3(1, 1, (vArcState.IsLeft ? 1 : -1)); vLabel = labelObj.AddComponent <UiLabel>(); vLabel.IsLeft = vArcState.IsLeft; }
public (AnalysisSettings, SegmentSettings <FileInfo>) ToAnalysisSettings( AnalysisSettings defaults = null, bool outputIntermediate = false, FileSegment sourceSegment = null, FileSegment preparedSegment = null) { var analysisSettings = base.ToAnalysisSettings(defaults, true); var segment = sourceSegment ?? new FileSegment(this.Source, TimeAlignment.None); var segmentSettings = new SegmentSettings <FileInfo>( analysisSettings, segment, (analysisSettings.AnalysisOutputDirectory, analysisSettings.AnalysisTempDirectory), preparedSegment ?? segment); return(analysisSettings, segmentSettings); }
public SegmentsDialog(SegmentSettings settings) { this.settings = settings; InitializeComponent(); InitializeDefaults(); this.MaximumSize = Size; }
//////////////////////////////////////////////////////////////////////////////////////////////// /*--------------------------------------------------------------------------------------------*/ internal static void UpdateSettingsWithBgAlpha(SegmentSettings pSegSett) { Color c = pSegSett.BackgroundColor; c.a = BgAlpha; pSegSett.BackgroundColor = c; c = pSegSett.SliderFillColor; c.a = 0.5f * BgAlpha; pSegSett.SliderFillColor = c; }
List<NuGenPoint> FillPoints(SegmentSettings seg) { List<NuGenPoint> list = new List<NuGenPoint>(); foreach(NuGenSegment segment in segments) { list.AddRange(segment.FillPoints(seg)); } return list; }
//////////////////////////////////////////////////////////////////////////////////////////////// /*--------------------------------------------------------------------------------------------*/ public void Awake() { vRootSettings = GameObject.Find("DemoEnvironment/MenuData") .GetComponent <HovercastCustomizationProvider>() .GetSegmentSettings(null); vHueSettings = new SegmentSettings(); vHueSlider = (NavItemSlider)gameObject.GetComponent <HovercastNavItem>().GetItem(); vHueSlider.OnValueChanged += HandleValueChanged; }
//////////////////////////////////////////////////////////////////////////////////////////////// /*--------------------------------------------------------------------------------------------*/ private void HandleValueChanged(NavItem <bool> pNavItem) { if (!pNavItem.Value) { return; } SegmentSettings sett = SegSett; switch (Type) { case ThemeType.Dark: sett.TextColor = new Color(1, 1, 1); sett.ArrowIconColor = new Color(1, 1, 1); sett.ToggleIconColor = new Color(1, 1, 1); sett.BackgroundColor = new Color(0.1f, 0.1f, 0.1f, 0.5f); sett.EdgeColor = new Color(0.5f, 0.5f, 0.5f, 1); sett.HighlightColor = new Color(0.25f, 0.25f, 0.25f, 0.43f); sett.SelectionColor = new Color(0.5f, 0.5f, 0.5f, 1); sett.SliderTrackColor = new Color(0.1f, 0.1f, 0.1f, 0.25f); sett.SliderFillColor = new Color(0.5f, 0.5f, 0.5f, 0.25f); sett.SliderTickColor = new Color(1, 1, 1, 0.25f); break; case ThemeType.Light: sett.TextColor = new Color(0, 0, 0); sett.ArrowIconColor = new Color(0, 0, 0); sett.ToggleIconColor = new Color(0, 0, 0); sett.BackgroundColor = new Color(1, 1, 1, 0.25f); sett.EdgeColor = new Color(1, 1, 1, 1); sett.HighlightColor = new Color(1, 1, 1, 0.25f); sett.SelectionColor = new Color(1, 1, 1, 1); sett.SliderTrackColor = new Color(1, 1, 1, 0.15f); sett.SliderFillColor = new Color(1, 1, 1, 0.5f); sett.SliderTickColor = new Color(0, 0, 0, 0.5f); break; case ThemeType.Color: sett.TextColor = new Color(1, 1, 0.7f); sett.ArrowIconColor = new Color(1, 1, 0.7f); sett.ToggleIconColor = new Color(1, 1, 0.7f); sett.BackgroundColor = new Color(0.05f, 0.25f, 0.45f, 0.5f); sett.EdgeColor = new Color(0.1f, 0.9f, 0.2f); sett.HighlightColor = new Color(0.1f, 0.5f, 0.9f); sett.SelectionColor = new Color(0.1f, 0.9f, 0.2f); sett.SliderTrackColor = new Color(0.1f, 0.5f, 0.9f, 0.5f); sett.SliderFillColor = new Color(0.1f, 0.9f, 0.2f, 0.5f); sett.SliderTickColor = new Color(1, 1, 1, 0.2f); break; } DemoCustomBgListener.UpdateSettingsWithBgAlpha(sett); }
//////////////////////////////////////////////////////////////////////////////////////////////// /*--------------------------------------------------------------------------------------------*/ public override void Build(ArcState pArcState, SegmentState pSegState, float pArcAngle, SegmentSettings pSettings) { base.Build(pArcState, pSegState, pArcAngle, pSettings); vIcon = GameObject.CreatePrimitive(PrimitiveType.Quad); vIcon.name = "Icon"; vIcon.transform.SetParent(gameObject.transform, false); vIcon.renderer.sharedMaterial = new Material(Shader.Find("Unlit/AlphaSelfIllum")); vIcon.renderer.sharedMaterial.color = Color.clear; vIcon.renderer.sharedMaterial.mainTexture = GetIconTexture(); vIcon.transform.localRotation = vLabel.CanvasLocalRotation; }
//////////////////////////////////////////////////////////////////////////////////////////////// /*--------------------------------------------------------------------------------------------*/ private void HandleValueChanged(NavItem <float> pNavItem) { Color col = DemoEnvironment.HsvToColor(vHueSlider.RangeValue, 1, 0.666f); Color colFade = col; colFade.a = 0.25f; SegmentSettings.Fill(vRootSettings, vHueSettings); vHueSettings.SelectionColor = col; vHueSettings.SliderTrackColor = colFade; vHueSettings.SliderFillColor = colFade; }
/*--------------------------------------------------------------------------------------------*/ private void Rebuild() { vPrevRendererObj = vRendererObj; const float halfAngle = UiLevel.AngleFull / 2f; NavItem navItem = vArcState.GetLevelParentItem(); Type rendType = vCustom.GetPalmRenderer(navItem); SegmentSettings sett = vCustom.GetPalmSettings(navItem); vRendererHold.SetActive(true); //ensures that Awake() is called in the renderers vRendererObj = new GameObject("Renderer"); vRendererObj.transform.SetParent(vRendererHold.transform, false); vRenderer = (IUiPalmRenderer)vRendererObj.AddComponent(rendType); vRenderer.Build(vArcState, sett, -halfAngle, halfAngle); }
public AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var audioFile = segmentSettings.SegmentAudioFile; var recording = new AudioRecording(audioFile.FullName); var outputDirectory = segmentSettings.SegmentOutputDirectory; var analysisResult = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); Config configuration = ConfigFile.Deserialize(analysisSettings.ConfigFile); bool saveCsv = analysisSettings.AnalysisDataSaveBehavior; if (configuration.GetBool(AnalysisKeys.MakeSoxSonogram)) { Log.Warn("SoX spectrogram generation config variable found (and set to true) but is ignored when running as an IAnalyzer"); } // generate spectrogram var configurationDictionary = new Dictionary <string, string>(configuration.ToDictionary()); configurationDictionary[ConfigKeys.Recording.Key_RecordingCallName] = audioFile.FullName; configurationDictionary[ConfigKeys.Recording.Key_RecordingFileName] = audioFile.Name; var soxImage = new FileInfo(Path.Combine(segmentSettings.SegmentOutputDirectory.FullName, audioFile.Name + ".SOX.png")); var spectrogramResult = Audio2Sonogram.GenerateFourSpectrogramImages( audioFile, soxImage, configurationDictionary, dataOnly: analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResult.Events.Length), makeSoxSonogram: false); // this analysis produces no results! // but we still print images (that is the point) if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResult.Events.Length)) { Debug.Assert(segmentSettings.SegmentImageFile.Exists); } if (saveCsv) { var basename = Path.GetFileNameWithoutExtension(segmentSettings.SegmentAudioFile.Name); var spectrogramCsvFile = outputDirectory.CombineFile(basename + ".Spectrogram.csv"); Csv.WriteMatrixToCsv(spectrogramCsvFile, spectrogramResult.DecibelSpectrogram.Data, TwoDimensionalArray.None); } return(analysisResult); }
public AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var audioFile = segmentSettings.SegmentAudioFile; var recording = new AudioRecording(audioFile.FullName); var sourceRecordingName = recording.BaseName; // TODO get the start and end-time offsets for accurate labeling of the time scale. //if (arguments.StartOffset.HasValue ^ arguments.EndOffset.HasValue) //{ // throw new InvalidStartOrEndException("If StartOffset or EndOffset is specified, then both must be specified"); //} // set default offsets - only use defaults if not provided in arguments list // var offsetsProvided = arguments.StartOffset.HasValue && arguments.EndOffset.HasValue; //TimeSpan? startOffset; //TimeSpan? endOffset; //if (offsetsProvided) //{ // startOffset = TimeSpan.FromSeconds(arguments.StartOffset.Value); // endOffset = TimeSpan.FromSeconds(arguments.EndOffset.Value); //} //var outputDirectory = segmentSettings.SegmentOutputDirectory; //bool saveCsv = analysisSettings.AnalysisDataSaveBehavior; var analysisResult = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); var configInfo = ConfigFile.Deserialize <AnalyzerConfig>(analysisSettings.ConfigFile); var spectrogramResult = Audio2Sonogram.GenerateSpectrogramImages(audioFile, configInfo, sourceRecordingName); // this analysis produces no results! But we still print images (that is the point) // if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResult.Events.Length)) // { // Debug.Assert(condition: segmentSettings.SegmentImageFile.Exists, "Warning: Image file must exist."); spectrogramResult.CompositeImage.Save(segmentSettings.SegmentImageFile.FullName, ImageFormat.Png); // } //if (saveCsv) //{ // var basename = Path.GetFileNameWithoutExtension(segmentSettings.SegmentAudioFile.Name); // var spectrogramCsvFile = outputDirectory.CombineFile(basename + ".Spectrogram.csv"); // Csv.WriteMatrixToCsv(spectrogramCsvFile, spectrogramResult.DecibelSpectrogram.Data, TwoDimensionalArray.None); //} return(analysisResult); }
//////////////////////////////////////////////////////////////////////////////////////////////// /*--------------------------------------------------------------------------------------------*/ public virtual void Build(ArcState pArcState, SegmentState pSegState, float pArcAngle, SegmentSettings pSettings) { vArcState = pArcState; vSegState = pSegState; vSettings = pSettings; //// vSlice = new UiSlice(gameObject); vSlice.Resize(pArcAngle); //// var labelObj = new GameObject("Label"); labelObj.transform.SetParent(gameObject.transform, false); labelObj.transform.localPosition = new Vector3(0, 0, 1); labelObj.transform.localScale = new Vector3(1, 1, (vArcState.IsLeft ? 1 : -1)); vLabel = labelObj.AddComponent <UiLabel>(); vLabel.IsLeft = vArcState.IsLeft; }
private Lazy <IndexCalculateResult[]> GetLazyIndices <T>( AudioRecording recording, AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings, AcousticIndices.AcousticIndicesConfig acousticConfiguration) { // Convert the Config config to IndexCalculateConfig class and merge in the unnecesary parameters. IndexCalculateResult[] Callback() { IndexCalculateResult[] subsegmentResults = AcousticIndices.CalculateIndicesInSubsegments( recording, segmentSettings.SegmentStartOffset, segmentSettings.AnalysisIdealSegmentDuration, acousticConfiguration.IndexCalculationDuration.Seconds(), acousticConfiguration.IndexProperties, segmentSettings.Segment.SourceMetadata.SampleRate, acousticConfiguration); return(subsegmentResults); } return(new Lazy <IndexCalculateResult[]>(Callback, LazyThreadSafetyMode.ExecutionAndPublication)); }
void RemoveUnneededLines(NuGenSegment[] lastSegment, NuGenSegment[] currSegment, int height, SegmentSettings seg) { NuGenSegment segLast = null; for (int yLast = 0; yLast < height; yLast++) { if ((lastSegment[yLast] != null) && (lastSegment [yLast] != segLast)) { segLast = lastSegment [yLast]; // if the segment is found in the current column then it is still in work so postpone processing bool found = false; for (int yCur = 0; yCur < height; yCur++) if (segLast == currSegment [yCur]) { found = true; break; } if (!found) { if (segLast.Length < (seg.minPoints - 1) * seg.pointSeparation) { segments.Remove(segLast); // autoDelete is on } else // keep segment, but try to fold lines segLast.RemoveUnneededLines(); } } } }
void FinishRun(bool[] lastBool, bool[] nextBool, NuGenSegment[] lastSegment, NuGenSegment[] currSegment, int x, int yStart, int yStop, int height, SegmentSettings set) { // when looking at adjacent columns, include pixels that touch diagonally since // those may also diagonally touch nearby runs in the same column (which would indicate // a branch) // count runs that touch on the left if (AdjacentRuns(lastBool, yStart, yStop, height) > 1) return; // count runs that touch on the right if (AdjacentRuns(nextBool, yStart, yStop, height) > 1) return; NuGenSegment seg; if (AdjacentSegments(lastSegment, yStart, yStop, height) == 0) { // this is the start of a new segment seg = new NuGenSegment((int) (0.5 + (yStart + yStop) / 2.0)); segments.Add(seg); } else { // this is the continuation of an existing segment seg = AdjacentSegment(lastSegment, yStart, yStop, height); seg.AppendColumn(x, (int) (0.5 + (yStart + yStop) / 2.0), set); } for (int y = yStart; y <= yStop; y++) currSegment [y] = seg; }
/// <summary> /// This entrypoint should be used for testing short files (less than 2 minutes) /// </summary> public static void Execute(Arguments arguments) { MainEntry.WarnIfDevleoperEntryUsed("EventRecognizer entry does not do any audio maniuplation."); Log.Info("Running event recognizer"); var sourceAudio = arguments.Source; var configFile = arguments.Config.ToFileInfo(); var outputDirectory = arguments.Output; if (configFile == null) { throw new FileNotFoundException("No config file argument provided"); } else if (!configFile.Exists) { Log.Warn($"Config file {configFile.FullName} not found... attempting to resolve config file"); configFile = ConfigFile.Resolve(configFile.Name, Directory.GetCurrentDirectory().ToDirectoryInfo()); } LoggedConsole.WriteLine("# Recording file: " + sourceAudio.FullName); LoggedConsole.WriteLine("# Configuration file: " + configFile); LoggedConsole.WriteLine("# Output folder: " + outputDirectory); // find an appropriate event IAnalyzer IAnalyser2 recognizer = AnalyseLongRecording.FindAndCheckAnalyser <IEventRecognizer>( arguments.AnalysisIdentifier, configFile.Name); Log.Info("Attempting to run recognizer: " + recognizer.Identifier); Log.Info("Reading configuration file"); Config configuration = ConfigFile.Deserialize <RecognizerBase.RecognizerConfig>(configFile); // get default settings AnalysisSettings analysisSettings = recognizer.DefaultSettings; // convert arguments to analysis settings analysisSettings = arguments.ToAnalysisSettings( analysisSettings, outputIntermediate: true, resultSubDirectory: recognizer.Identifier, configuration: configuration); // Enable this if you want the Config file ResampleRate parameter to work. // Generally however the ResampleRate should remain at 22050Hz for all recognizers. //analysisSettings.AnalysisTargetSampleRate = (int) configuration[AnalysisKeys.ResampleRate]; // get transform input audio file - if needed Log.Info("Querying source audio file"); var audioUtilityRequest = new AudioUtilityRequest() { TargetSampleRate = analysisSettings.AnalysisTargetSampleRate, }; var preparedFile = AudioFilePreparer.PrepareFile( outputDirectory, sourceAudio, MediaTypes.MediaTypeWav, audioUtilityRequest, outputDirectory); var source = preparedFile.SourceInfo.ToSegment(); var prepared = preparedFile.TargetInfo.ToSegment(FileSegment.FileDateBehavior.None); var segmentSettings = new SegmentSettings <FileInfo>( analysisSettings, source, (analysisSettings.AnalysisOutputDirectory, analysisSettings.AnalysisTempDirectory), prepared); if (preparedFile.TargetInfo.SampleRate.Value != analysisSettings.AnalysisTargetSampleRate) { Log.Warn("Input audio sample rate does not match target sample rate"); } // Execute a pre analyzer hook recognizer.BeforeAnalyze(analysisSettings); // execute actual analysis - output data will be written Log.Info("Running recognizer: " + recognizer.Identifier); AnalysisResult2 results = recognizer.Analyze(analysisSettings, segmentSettings); // run summarize code - output data can be written Log.Info("Running recognizer summary: " + recognizer.Identifier); recognizer.SummariseResults( analysisSettings, source, results.Events, results.SummaryIndices, results.SpectralIndices, new[] { results }); //Log.Info("Recognizer run, saving extra results"); // TODO: Michael, output anything else as you wish. Log.Debug("Clean up temporary files"); if (source.Source.FullName != prepared.Source.FullName) { prepared.Source.Delete(); } int eventCount = results?.Events?.Length ?? 0; Log.Info($"Number of detected events: {eventCount}"); Log.Success(recognizer.Identifier + " recognizer has completed"); }
//////////////////////////////////////////////////////////////////////////////////////////////// /*--------------------------------------------------------------------------------------------*/ public virtual void Build(ArcState pArcState, SegmentState pSegState, float pArcAngle, SegmentSettings pSettings) { vArcState = pArcState; vSegState = pSegState; vAngle0 = -pArcAngle / 2f + UiSlice.AngleInset; vAngle1 = pArcAngle / 2f - UiSlice.AngleInset; vSettings = pSettings; vNavSlider = (NavItemSlider)vSegState.NavItem; const float pi = (float)Math.PI; const float radInner = 1.04f; const float radOuter = 1.46f; vGrabArcHalf = pi / 80f; vSlideDegree0 = (vAngle0 + vGrabArcHalf) / pi * 180; vSlideDegrees = (vAngle1 - vAngle0 - vGrabArcHalf * 2) / pi * 180; //// vHiddenSlice = new UiSlice(gameObject, true); vHiddenSlice.Resize(pArcAngle); vHiddenSlice.UpdateBackground(Color.clear); vTrackA = new UiSlice(gameObject, true, "TrackA", radInner, radOuter); vTrackB = new UiSlice(gameObject, true, "TrackB", radInner, radOuter); vFillA = new UiSlice(gameObject, true, "FillA", radInner, radOuter); vFillB = new UiSlice(gameObject, true, "FillB", radInner, radOuter); //// vTickMat = new Material(Shader.Find("Unlit/AlphaSelfIllum")); vTickMat.renderQueue -= 400; vTickMat.color = Color.clear; if (vNavSlider.Ticks > 1) { Vector3 quadScale = new Vector3(UiSlice.AngleInset * 2, 0.36f, 0.1f); float percPerTick = 1 / (float)(vNavSlider.Ticks - 1); vTicks = new GameObject[vNavSlider.Ticks]; for (int i = 0; i < vNavSlider.Ticks; ++i) { var tick = new GameObject("Tick" + i); tick.transform.SetParent(gameObject.transform, false); tick.transform.localRotation = Quaternion.AngleAxis( vSlideDegree0 + vSlideDegrees * i * percPerTick, Vector3.up); vTicks[i] = tick; var quad = GameObject.CreatePrimitive(PrimitiveType.Quad); quad.renderer.sharedMaterial = vTickMat; quad.transform.SetParent(tick.transform, false); quad.transform.localPosition = new Vector3(0, 0, 1.25f); quad.transform.localRotation = Quaternion.FromToRotation(Vector3.back, Vector3.down); quad.transform.localScale = quadScale; } } //// vGrabHold = new GameObject("GrabHold"); vGrabHold.transform.SetParent(gameObject.transform, false); var grabObj = new GameObject("Grab"); grabObj.transform.SetParent(vGrabHold.transform, false); vGrab = grabObj.AddComponent <UiSliderGrabRenderer>(); vGrab.Build(vArcState, vSegState, vGrabArcHalf * 2, pSettings); //// vHoverHold = new GameObject("HoverHold"); vHoverHold.transform.SetParent(gameObject.transform, false); var hoverObj = new GameObject("Hover"); hoverObj.transform.SetParent(vHoverHold.transform, false); vHover = new UiSlice(hoverObj, false, "Hover"); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { FileInfo audioFile = segmentSettings.SegmentAudioFile; /* ###################################################################### */ Dictionary <string, string> configuration = analysisSettings.Configuration.ToDictionary(); KoalaMaleResults results = Analysis(audioFile, configuration, segmentSettings.SegmentStartOffset); /* ###################################################################### */ BaseSonogram sonogram = results.Sonogram; double[,] hits = results.Hits; Plot scores = results.Plot; var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, results.RecordingtDuration) { AnalysisIdentifier = this.Identifier, }; analysisResults.Events = results.Events.ToArray(); if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteEventsFile(segmentSettings.SegmentEventsFile, analysisResults.Events); analysisResults.EventsFile = segmentSettings.SegmentEventsFile; } if (analysisSettings.AnalysisDataSaveBehavior) { // noop } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { string imagePath = segmentSettings.SegmentImageFile.FullName; const double EventThreshold = 0.1; Image image = DrawSonogram(sonogram, hits, scores, results.Events, EventThreshold); image.Save(imagePath); analysisResults.ImageFile = segmentSettings.SegmentImageFile; } return(analysisResults); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { FileInfo audioFile = segmentSettings.SegmentAudioFile; var aedConfig = GetAedParametersFromConfigFileOrDefaults(analysisSettings.Configuration); var results = Detect(audioFile, aedConfig, segmentSettings.SegmentStartOffset); var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, results.Item2.Duration); analysisResults.AnalysisIdentifier = this.Identifier; analysisResults.Events = results.Item1; BaseSonogram sonogram = results.Item3; if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteEventsFile(segmentSettings.SegmentEventsFile, analysisResults.Events); analysisResults.EventsFile = segmentSettings.SegmentEventsFile; } if (analysisSettings.AnalysisDataSaveBehavior) { // noop } // save image of sonograms if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { Image image = DrawSonogram(sonogram, results.Item1); image.Save(segmentSettings.SegmentImageFile.FullName); analysisResults.ImageFile = segmentSettings.SegmentImageFile; } return(analysisResults); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var configuration = (StandardizedFeatureExtractionConfig)analysisSettings.Configuration; var audioFile = segmentSettings.SegmentAudioFile; var recording = new AudioRecording(audioFile.FullName); // Configurations non-specific for bands TimeSpan indexCalculationDuration = configuration.IndexCalculationDurationTimeSpan; TimeSpan bgNoiseNeighbourhood = configuration.BgNoiseBuffer; // Bands List <StandardizedFeatureExtractionConfig.BandsProperties> bandsList = configuration.Bands; // Check if there are identical bands CheckForIdenticalBands(bandsList); // Estimate total number of subsegments double segmentDurationSeconds = segmentSettings.AnalysisIdealSegmentDuration.TotalSeconds; double subsegmentDuration = indexCalculationDuration.TotalSeconds; int subsegmentCount = (int)Math.Round(segmentDurationSeconds / subsegmentDuration); int totalSubsegmentCount = subsegmentCount * bandsList.Count; // Store results of all subsegments var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); analysisResults.AnalysisIdentifier = this.Identifier; var trackScores = new List <Plot>(totalSubsegmentCount); var tracks = new List <SpectralTrack>(totalSubsegmentCount); analysisResults.SummaryIndices = new SummaryIndexBase[totalSubsegmentCount]; analysisResults.SpectralIndices = new SpectralIndexBase[totalSubsegmentCount]; // Create list to store images, one for each band. They are later combined into one image. var list = new List <Image <Rgb24> >(); string imagePath = segmentSettings.SegmentImageFile.FullName; int maxImageWidth = 0; int bandCount = 0; foreach (var band in bandsList) { Log.DebugFormat("Starting band {0}/{1}", bandCount + 1, bandsList.Count); // Calculate spectral indices // get a fresh copy of the ICC config var config = (IndexCalculateConfig)((ICloneable)configuration).Clone(); // Add values specific for band from custom configuration file to config config.MinBandWidth = band.Bandwidth.Min; config.MaxBandWidth = band.Bandwidth.Max; config.FrameLength = band.FftWindow; if (band.MelScale != 0) { config.FrequencyScale = FreqScaleType.Mel; config.MelScale = band.MelScale; } else { config.FrequencyScale = FreqScaleType.Linear; } // Calculate indices for each subsegment and for each band IndexCalculateResult[] subsegmentResults = AcousticIndices.CalculateIndicesInSubsegments( recording, segmentSettings.SegmentStartOffset, segmentSettings.AnalysisIdealSegmentDuration, indexCalculationDuration, config.IndexProperties, segmentSettings.Segment.SourceMetadata.SampleRate, config); int columnsAmplitudeSpectrogram = subsegmentResults[0].AmplitudeSpectrogram.GetLength(1); double[,] amplitudeSpectrogramSegment = new double[0, columnsAmplitudeSpectrogram]; for (int i = 0; i < subsegmentResults.Length; i++) { var indexCalculateResult = subsegmentResults[i]; indexCalculateResult.SummaryIndexValues.FileName = segmentSettings.Segment.SourceMetadata.Identifier; indexCalculateResult.SpectralIndexValues.FileName = segmentSettings.Segment.SourceMetadata.Identifier; analysisResults.SummaryIndices[bandCount + (i * bandsList.Count)] = indexCalculateResult.SummaryIndexValues; analysisResults.SpectralIndices[bandCount + (i * bandsList.Count)] = indexCalculateResult.SpectralIndexValues; trackScores.AddRange(indexCalculateResult.TrackScores); if (indexCalculateResult.Tracks != null) { tracks.AddRange(indexCalculateResult.Tracks); } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave()) { // Add amplitude spectrograms of each subsegment together to get amplitude spectrogram of one segment double[,] amplitudeSpectrogramSubsegment = indexCalculateResult.AmplitudeSpectrogram; amplitudeSpectrogramSegment = MatrixTools.ConcatenateMatrixRows( amplitudeSpectrogramSegment, amplitudeSpectrogramSubsegment); } } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave()) { // Create image of amplitude spectrogram var image = ImageTools.DrawReversedMatrix(MatrixTools.MatrixRotate90Anticlockwise(amplitudeSpectrogramSegment)); // Label information string minBandWidth = band.Bandwidth.Min.ToString(); string maxBandWidth = band.Bandwidth.Max.ToString(); string fftWindow = band.FftWindow.ToString(); string mel; string melScale; if (band.MelScale != 0) { mel = "Mel"; melScale = band.MelScale.ToString(); } else { mel = "Standard"; melScale = 0.ToString(); } // Create label string segmentSeparator = "_"; string[] segments = { minBandWidth, maxBandWidth, fftWindow, mel, melScale }; string labelText = segments.Aggregate(string.Empty, (aggregate, item) => aggregate + segmentSeparator + item); var stringFont = Drawing.Arial14; int width = 250; int height = image.Height; var label = new Image <Rgb24>(width, height); label.Mutate(g1 => { g1.Clear(Color.Gray); g1.DrawText(labelText, stringFont, Color.Black, new PointF(4, 30)); g1.DrawLine(new Pen(Color.Black, 1), 0, 0, width, 0); //draw upper boundary g1.DrawLine(new Pen(Color.Black, 1), 0, 1, width, 1); //draw upper boundary }); var labelledImage = ImageTools.CombineImagesInLine(label, image); // Add labeled image to list list.Add(labelledImage); // Update maximal width of image if (image.Width > maxImageWidth) { maxImageWidth = image.Width; } } bandCount += 1; Log.InfoFormat("Completed band {0}/{1}", bandCount, bandsList.Count); } if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); analysisResults.SummaryIndicesFile = segmentSettings.SegmentSummaryIndicesFile; analysisResults.SpectraIndicesFiles = this.WriteSpectrumIndicesFiles( segmentSettings.SegmentSpectrumIndicesDirectory, Path.GetFileNameWithoutExtension(segmentSettings.SegmentAudioFile.Name), analysisResults.SpectralIndices); } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave()) { var finalImage = ImageTools.CombineImagesVertically(list, maxImageWidth); finalImage.Save(imagePath); analysisResults.ImageFile = new FileInfo(imagePath); LoggedConsole.WriteLine("See {0} for spectrogram pictures", imagePath); } return(analysisResults); }
void MatchRunsToSegments(int x, int height, bool[] lastBool, NuGenSegment[] lastSegment, bool[] currBool, NuGenSegment[] currSegment, bool[] nextBool, SegmentSettings seg) { LoadSegment(currSegment, height); int yStart = 0; bool inRun = false; for (int y = 0; y < height; y++) { if (!inRun && currBool [y]) { inRun = true; yStart = y; } if ((y + 1 >= height) || !currBool [y + 1]) { if (inRun) FinishRun(lastBool, nextBool, lastSegment, currSegment, x, yStart, y, height, seg); inRun = false; } } RemoveUnneededLines(lastSegment, currSegment, height, seg); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { FileInfo audioFile = segmentSettings.SegmentAudioFile; // execute actual analysis Dictionary <string, string> configuration = analysisSettings.Configuration; LimnodynastesConvexResults results = Analysis(audioFile, configuration, analysisSettings, segmentSettings); var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, results.RecordingDuration); BaseSonogram sonogram = results.Sonogram; double[,] hits = results.Hits; Plot scores = results.Plot; List <AcousticEvent> predictedEvents = results.Events; analysisResults.Events = predictedEvents.ToArray(); if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteEventsFile(segmentSettings.SegmentEventsFile, analysisResults.Events); analysisResults.EventsFile = segmentSettings.SegmentEventsFile; } if (analysisSettings.AnalysisDataSaveBehavior) { var unitTime = TimeSpan.FromMinutes(1.0); analysisResults.SummaryIndices = this.ConvertEventsToSummaryIndices(analysisResults.Events, unitTime, analysisResults.SegmentAudioDuration, 0); this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { string imagePath = segmentSettings.SegmentImageFile.FullName; const double EventThreshold = 0.1; Image image = DrawSonogram(sonogram, hits, scores, predictedEvents, EventThreshold); image.Save(imagePath, ImageFormat.Png); analysisResults.ImageFile = segmentSettings.SegmentImageFile; } return(analysisResults); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { Contract.Requires(segmentSettings.SegmentStartOffset == segmentSettings.Segment.StartOffsetSeconds.Seconds()); var recording = new AudioRecording(segmentSettings.SegmentAudioFile); var segment = (RemoteSegmentWithData)segmentSettings.Segment; // sometimes events will share the same audio block so we have to analyze each event // within this segment of audio IReadOnlyCollection <object> importedEvents = segment.Data; Log.Debug($"Calculating event statistics for {importedEvents.Count} items in {segmentSettings.SegmentAudioFile}"); EventStatistics[] results = new EventStatistics[importedEvents.Count]; int index = 0; foreach (var importedEventObject in importedEvents) { var importedEvent = (ImportedEvent)importedEventObject; var temporalRange = new Range <TimeSpan>( importedEvent.EventStartSeconds.Value.Seconds(), importedEvent.EventEndSeconds.Value.Seconds()); var spectralRange = new Range <double>( importedEvent.LowFrequencyHertz.Value, importedEvent.HighFrequencyHertz.Value); Log.Debug( $"Calculating event statistics for {importedEvent.AudioEventId},{temporalRange}," + $"{spectralRange} in {segmentSettings.SegmentAudioFile}, Duration: {recording.Duration}"); // Repeat sanity check here. Previous duration sanity check only checks the header of the audio file, // but that still allows for a fragmented audio file to have been downloaded, shorter than it should be var expectedDuration = segment.Offsets.Size().Seconds(); var durationDelta = expectedDuration - recording.Duration; if (durationDelta > 1.0.Seconds()) { Log.Warn( $"Media ({segmentSettings.SegmentAudioFile}) did not have expected duration." + $" Expected: {expectedDuration}, Actual: {recording.Duration}"); } var configuration = (EventStatisticsConfiguration)analysisSettings.Configuration; var statistics = EventStatisticsCalculate.AnalyzeAudioEvent( recording, temporalRange, spectralRange, configuration, segmentSettings.SegmentStartOffset); if (statistics.Error) { Log.Warn($"Event statistics failed for {importedEvent.AudioEventId},{temporalRange}," + $"{spectralRange} in {segmentSettings.SegmentAudioFile}, Duration: {recording.Duration}"); } // lastly add some metadata to make the results useful statistics.Order = importedEvent.Order; statistics.AudioRecordingId = segment.Source.Id; statistics.AudioRecordingRecordedDate = segment.SourceMetadata.RecordedDate; statistics.AudioEventId = importedEvent.AudioEventId; results[index] = statistics; index++; } var result = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); result.Events = results; return(result); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { this.Pause("Analyze" + segmentSettings.SegmentStartOffset.TotalMinutes); return(new AnalysisResult2(analysisSettings, segmentSettings, TimeSpan.FromSeconds(60.0))); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { // boilerplate Analyzer var audioFile = segmentSettings.SegmentAudioFile; var sampleRate = segmentSettings.Segment.SourceMetadata.SampleRate; var recording = new AudioRecording(audioFile.FullName); var outputDirectory = segmentSettings.SegmentOutputDirectory; var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); analysisResults.AnalysisIdentifier = this.Identifier; var result = new ChannelIntegrityIndices() { ResultStartSeconds = segmentSettings.SegmentStartOffset.TotalSeconds, }; // do some sanity checks if (recording.WavReader.Channels != 2) { throw new InvalidAudioChannelException($"The channel integrity analyzer requires exactly two channels but {recording.WavReader.Channels} channels found in file ({audioFile.FullName}"); } // actual analysis double[] channelLeft = recording.WavReader.GetChannel(0); double[] channelRight = recording.WavReader.GetChannel(1); double epsilon = recording.WavReader.Epsilon; ChannelIntegrity.SimilarityIndex(channelLeft, channelRight, epsilon, sampleRate, out var similarityIndex, out var decibelIndex, out var avDecibelBias, out var medianDecibelBias, out var lowDecibelBias, out var midDecibelBias, out var highDecibelBias); //double similarityIndex = ChannelIntegrity.SimilarityIndex(channelLeft, channelRight, epsilon, sampleRate.Value); result.ChannelSimilarity = similarityIndex; result.ChannelDiffDecibels = decibelIndex; result.AverageDecibelBias = avDecibelBias; result.MedianDecibelBias = medianDecibelBias; result.LowFreqDecibelBias = lowDecibelBias; result.MidFreqDecibelBias = midDecibelBias; result.HighFreqDecibelBias = highDecibelBias; ChannelIntegrity.ZeroCrossingIndex(channelLeft, channelRight, out var zeroCrossingFractionLeft, out var zeroCrossingFractionRight); result.ZeroCrossingFractionLeft = zeroCrossingFractionLeft; result.ZeroCrossingFractionRight = zeroCrossingFractionRight; // finish the analyzer analysisResults.Events = new EventBase[0]; analysisResults.SummaryIndices = new SummaryIndexBase[] { result }; analysisResults.SpectralIndices = new SpectralIndexBase[0]; if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); analysisResults.SummaryIndicesFile = segmentSettings.SegmentSummaryIndicesFile; } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { throw new NotImplementedException(); } if (false && analysisSettings.AnalysisDataSaveBehavior) { throw new NotImplementedException(); } return(analysisResults); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var recording = new AudioRecording(segmentSettings.SegmentAudioFile.FullName); // get indices configuration - extracted in BeforeAnalyze var acousticIndicesConfig = (RecognizerConfig)analysisSettings.Configuration; // get a lazily calculated indices function - if you never get the lazy value, the indices will never be calculated var lazyIndices = this.GetLazyIndices( recording, analysisSettings, segmentSettings, acousticIndicesConfig.HighResolutionIndices); // determine imageWidth for output images int imageWidth = (int)Math.Floor( recording.Duration.TotalSeconds / acousticIndicesConfig.HighResolutionIndices.IndexCalculationDuration); // execute actual analysis RecognizerResults results = this.Recognize( recording, analysisSettings.Configuration, segmentSettings.SegmentStartOffset, lazyIndices, segmentSettings.SegmentOutputDirectory, imageWidth); var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); BaseSonogram sonogram = results.Sonogram; double[,] hits = results.Hits; var predictedEvents = results.Events; // double check all the events have the right offset in case it was missed foreach (var predictedEvent in predictedEvents) { predictedEvent.SegmentStartSeconds = segmentSettings.SegmentStartOffset.TotalSeconds; } analysisResults.Events = predictedEvents.ToArray(); // compress high resolution indices - and save them. // IF they aren't used, empty values are returned. if (lazyIndices.IsValueCreated) { this.SummarizeHighResolutionIndices( analysisResults, lazyIndices.Value, acousticIndicesConfig.HighResolutionIndices); } // write intermediate output if necessary if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteEventsFile(segmentSettings.SegmentEventsFile, analysisResults.Events); analysisResults.EventsFile = segmentSettings.SegmentEventsFile; } if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); } if (analysisSettings.AnalysisDataSaveBehavior) { analysisResults.SpectraIndicesFiles = this.WriteSpectrumIndicesFiles( segmentSettings.SegmentSpectrumIndicesDirectory, segmentSettings.Segment.SourceMetadata.Identifier, analysisResults.SpectralIndices); } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { string imagePath = segmentSettings.SegmentImageFile.FullName; const double EventThreshold = 0.1; var plots = results.Plots ?? new List <Plot>(); Image image = this.DrawSonogram(sonogram, hits, plots, predictedEvents, EventThreshold); image.Save(imagePath); analysisResults.ImageFile = segmentSettings.SegmentImageFile; // draw a fancy high res index image // IF indices aren't used, no image is drawn. if (lazyIndices.IsValueCreated) { this.DrawLongDurationSpectrogram( segmentSettings.SegmentOutputDirectory, recording.BaseName, results.ScoreTrack, lazyIndices.Value, acousticIndicesConfig.HighResolutionIndices); } } return(analysisResults); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { FileInfo audioFile = segmentSettings.SegmentAudioFile; // execute actual analysis dynamic configuration = analysisSettings.Configuration; var recording = new AudioRecording(audioFile.FullName); Log.Debug("Canetoad sample rate:" + recording.SampleRate); RecognizerResults results = Analysis(recording, configuration, segmentSettings.SegmentStartOffset, segmentSettings.SegmentOutputDirectory); var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); BaseSonogram sonogram = results.Sonogram; double[,] hits = results.Hits; Plot scores = results.Plots.First(); List <AcousticEvent> predictedEvents = results.Events; analysisResults.Events = predictedEvents.ToArray(); if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteEventsFile(segmentSettings.SegmentEventsFile, analysisResults.Events); analysisResults.EventsFile = segmentSettings.SegmentEventsFile; } if (analysisSettings.AnalysisDataSaveBehavior) { var unitTime = TimeSpan.FromMinutes(1.0); analysisResults.SummaryIndices = this.ConvertEventsToSummaryIndices(analysisResults.Events, unitTime, analysisResults.SegmentAudioDuration, 0); analysisResults.SummaryIndicesFile = segmentSettings.SegmentSummaryIndicesFile; this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); } if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { string imagePath = segmentSettings.SegmentImageFile.FullName; const double EventThreshold = 0.1; Image image = DrawSonogram(sonogram, hits, scores, predictedEvents, EventThreshold); image.Save(imagePath, ImageFormat.Png); analysisResults.ImageFile = segmentSettings.SegmentImageFile; } return(analysisResults); }
public AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var acousticIndicesConfiguration = (AcousticIndicesConfig)analysisSettings.AnalysisAnalyzerSpecificConfiguration; var indexCalculationDuration = acousticIndicesConfiguration.IndexCalculationDuration.Seconds(); var audioFile = segmentSettings.SegmentAudioFile; var recording = new AudioRecording(audioFile.FullName); var outputDirectory = segmentSettings.SegmentOutputDirectory; var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); analysisResults.AnalysisIdentifier = this.Identifier; // calculate indices for each subsegment IndexCalculateResult[] subsegmentResults = CalculateIndicesInSubsegments( recording, segmentSettings.SegmentStartOffset, segmentSettings.AnalysisIdealSegmentDuration, indexCalculationDuration, acousticIndicesConfiguration.IndexProperties, segmentSettings.Segment.SourceMetadata.SampleRate, acousticIndicesConfiguration); var trackScores = new List <Plot>(subsegmentResults.Length); var tracks = new List <Track>(subsegmentResults.Length); analysisResults.SummaryIndices = new SummaryIndexBase[subsegmentResults.Length]; analysisResults.SpectralIndices = new SpectralIndexBase[subsegmentResults.Length]; for (int i = 0; i < subsegmentResults.Length; i++) { var indexCalculateResult = subsegmentResults[i]; indexCalculateResult.SummaryIndexValues.FileName = segmentSettings.Segment.SourceMetadata.Identifier; indexCalculateResult.SpectralIndexValues.FileName = segmentSettings.Segment.SourceMetadata.Identifier; analysisResults.SummaryIndices[i] = indexCalculateResult.SummaryIndexValues; analysisResults.SpectralIndices[i] = indexCalculateResult.SpectralIndexValues; trackScores.AddRange(indexCalculateResult.TrackScores); if (indexCalculateResult.Tracks != null) { tracks.AddRange(indexCalculateResult.Tracks); } } if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); analysisResults.SummaryIndicesFile = segmentSettings.SegmentSummaryIndicesFile; } if (analysisSettings.AnalysisDataSaveBehavior) { analysisResults.SpectraIndicesFiles = WriteSpectrumIndicesFilesCustom( segmentSettings.SegmentSpectrumIndicesDirectory, Path.GetFileNameWithoutExtension(segmentSettings.SegmentAudioFile.Name), analysisResults.SpectralIndices); } // write the segment spectrogram (typically of one minute duration) to CSV // this is required if you want to produced zoomed spectrograms at a resolution greater than 0.2 seconds/pixel bool saveSonogramData = analysisSettings.Configuration.GetBoolOrNull(AnalysisKeys.SaveSonogramData) ?? false; if (saveSonogramData || analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { var sonoConfig = new SonogramConfig(); // default values config sonoConfig.SourceFName = recording.FilePath; sonoConfig.WindowSize = acousticIndicesConfiguration.FrameLength; sonoConfig.WindowStep = analysisSettings.Configuration.GetIntOrNull(AnalysisKeys.FrameStep) ?? sonoConfig.WindowSize; // default = no overlap sonoConfig.WindowOverlap = (sonoConfig.WindowSize - sonoConfig.WindowStep) / (double)sonoConfig.WindowSize; // Linear or Octave frequency scale? bool octaveScale = analysisSettings.Configuration.GetBoolOrNull(AnalysisKeys.KeyOctaveFreqScale) ?? false; if (octaveScale) { sonoConfig.WindowStep = sonoConfig.WindowSize; sonoConfig.WindowOverlap = (sonoConfig.WindowSize - sonoConfig.WindowStep) / (double)sonoConfig.WindowSize; } ////sonoConfig.NoiseReductionType = NoiseReductionType.NONE; // the default ////sonoConfig.NoiseReductionType = NoiseReductionType.STANDARD; var sonogram = new SpectrogramStandard(sonoConfig, recording.WavReader); // remove the DC row of the spectrogram sonogram.Data = MatrixTools.Submatrix(sonogram.Data, 0, 1, sonogram.Data.GetLength(0) - 1, sonogram.Data.GetLength(1) - 1); if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave()) { string imagePath = Path.Combine(outputDirectory.FullName, segmentSettings.SegmentImageFile.Name); // NOTE: hits (SPT in this case) is intentionally not supported var image = DrawSonogram(sonogram, null, trackScores, tracks); image.Save(imagePath); analysisResults.ImageFile = new FileInfo(imagePath); } if (saveSonogramData) { string csvPath = Path.Combine(outputDirectory.FullName, recording.BaseName + ".csv"); Csv.WriteMatrixToCsv(csvPath.ToFileInfo(), sonogram.Data); } } return(analysisResults); }
public void MakeSegments(Image imageProcessed, SegmentSettings seg) { segments.Clear(); // for each new column of pixels, loop through the runs. a run is defined as // one or more colored pixels that are all touching, with one uncolored pixel or the // image boundary at each end of the set. for each set in the current column, count // the number of runs it touches in the adjacent (left and right) columns. here is // the pseudocode: // if ((L > 1) || (R > 1)) // "this run is at a branch point so ignore the set" // else // if (L == 0) // "this run is the start of a new segment" // else // "this run is appended to the segment on the left int width = imageProcessed.Width; int height = imageProcessed.Height; bool[] lastBool = new bool [height]; bool[] currBool = new bool [height]; bool[] nextBool = new bool [height]; NuGenSegment[] lastSegment = new NuGenSegment [height]; NuGenSegment[] currSegment = new NuGenSegment [height]; Bitmap b = new Bitmap(imageProcessed); BitmapData bmData = b.LockBits(new Rectangle(0, 0, b.Width, b.Height), ImageLockMode.ReadOnly, b.PixelFormat); LoadBool(lastBool, bmData, -1); LoadBool(currBool, bmData, 0); LoadBool(nextBool, bmData, 1); LoadSegment(lastSegment, height); for (int x = 0; x < width; x++) { MatchRunsToSegments(x, height, lastBool, lastSegment, currBool, currSegment, nextBool, seg); // get ready for next column ScrollBool(lastBool, currBool, height); ScrollBool(currBool, nextBool, height); if (x + 1 < width) LoadBool(nextBool, bmData, x + 1); ScrollSegment(lastSegment, currSegment, height); } b.UnlockBits(bmData); }