public static List <double[]> ReadFrameData(DirectoryInfo dataDir, string fileStem, TimeSpan startTime, TimeSpan maxDuration, SpectrogramZoomingConfig zoomingConfig, TimeSpan indexGenerationSegmentDuration) { TimeSpan endTime = startTime + maxDuration; int startMinute = (int)Math.Floor(startTime.TotalMinutes); int endMinute = (int)Math.Ceiling(endTime.TotalMinutes); int expectedDataDurationInSeconds = (int)indexGenerationSegmentDuration.TotalSeconds; int expectedFrameCount = (int)Math.Round(expectedDataDurationInSeconds / zoomingConfig.SpectralFrameDuration); string name = fileStem + "_" + startMinute + "min.csv"; string csvPath = Path.Combine(dataDir.FullName, name); bool skipHeader = true; bool skipFirstColumn = true; List <double[]> frameData = CsvTools.ReadCSVFileOfDoubles(csvPath, skipHeader, skipFirstColumn); ZoomTiledSpectrograms.PadEndOfListOfFrames(frameData, expectedFrameCount); for (int i = startMinute + 1; i < endMinute; i++) { name = fileStem + "_" + i + "min.csv"; csvPath = Path.Combine(dataDir.FullName, name); List <double[]> data = CsvTools.ReadCSVFileOfDoubles(csvPath, skipHeader, skipFirstColumn); ZoomTiledSpectrograms.PadEndOfListOfFrames(data, expectedFrameCount); frameData.AddRange(data); } return(frameData); }
public void EndExperiment() { instructions.ShowGoodbyeMsg(); // could add questionnaire about e.g. snake phobia here List <string> csvLines = CsvTools.GenerateCsv(_data, _header); CsvTools.SaveFile(Application.dataPath + "/Data/" + System.Guid.NewGuid(), csvLines); }
/// <summary> /// ... Select File /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void button1_Click(object sender, EventArgs e) { _ofd = CsvTools.OpenFileDiag(); if (_ofd.ShowDialog() != DialogResult.OK) { return; } lblfilepath.Text = _ofd.FileName; }
public override void Check() { try { string tesQuoteUrl = "http://ichart.finance.yahoo.com/table.csv?s=IBM&d=7&e=21&f=2013&g=d&a=0&b=2&c=1962&ignore=.csv"; string csv = CsvTools.GetCsvFromUrl(tesQuoteUrl); } catch (Exception ex) { throw new Exception(String.Format("Cannot acces Yahoo website: {0}", ex.Message)); } }
//public const string ANDREWS_SELECTION_PATH = @"C:\SensorNetworks\WavFiles\Kiwi\Results_TUITCE_20091215_220004\TUITCE_20091215_220004_ANDREWS_SELECTIONS.csv"; //public const string ANDREWS_SELECTION_PATH = @"C:\SensorNetworks\WavFiles\Kiwi\Results_KAPITI2_20100219_202900\KAPITI2_20100219_202900_ANDREWS_SELECTIONS.csv"; //public const string ANDREWS_SELECTION_PATH = @"C:\SensorNetworks\WavFiles\Kiwi\Results_TUITCE_20091215_210000\TUITCE_20091215_210000_ANDREWS_SELECTIONS.csv"; //EVENTS .CSV FILES //C:\SensorNetworks\Output\LSKiwi3\Tower\Towsey.LSKiwi3\TOWER_20100208_204500_Towsey.LSKiwi3.Events.csv //COMMAND LINES //kiwiROC "C:\SensorNetworks\Output\LSKiwi3\Tower\Towsey.LSKiwi3\TOWER_20100208_204500_Towsey.LSKiwi3.Events.csv" "C:\SensorNetworks\Output\LSKiwi3\TOWER_20100208_204500_ANDREWS_SELECTIONS.csv" //kiwiROC "C:\SensorNetworks\Output\LSKiwi3\Tower\Towsey.LSKiwi3\TOWER_20100208_204500_Towsey.LSKiwi3.Events.csv" "C:\SensorNetworks\Output\LSKiwi3\TOWER_20100208_204500_ANDREWS_SELECTIONS.csv" public static void Main(Arguments arguments) { if (arguments == null) { throw new NoDeveloperMethodException(); } bool verbose = true; if (verbose) { string title = "# SOFTWARE TO CALCULATE SENSITIVITY, RECALL AND ROC INFO FOR DETECTION OF CALLS OF THE LITTLE SPOTTED KIWI (Apteryx owenii)"; string date = "# DATE AND TIME: " + DateTime.Now; LoggedConsole.WriteLine(title); LoggedConsole.WriteLine(date); } var fiKiwiCallPredictions = arguments.Events.ToFileInfo(); string outputDir = fiKiwiCallPredictions.DirectoryName; var fiGroundTruth = arguments.Selections.ToFileInfo(); //InitOutputTableColumns(); //############################################################################ DataTable dt = CalculateRecallPrecision(fiKiwiCallPredictions, fiGroundTruth); //############################################################################ string opFileStem = fiKiwiCallPredictions.BaseName(); string fName = "LSKRoc_Report_" + opFileStem + ".csv"; string reportROCPath = Path.Combine(outputDir, fName); CsvTools.DataTable2CSV(dt, reportROCPath); //write SEE5 data and class names files. var diOutputDir = new DirectoryInfo(outputDir); WriteSee5DataFiles(dt, diOutputDir, opFileStem); var fiReport = new FileInfo(reportROCPath); if (fiReport.Exists) { LoggedConsole.WriteLine("REPORT FILE WAS PRODUCED: <{0}>", fiReport.FullName); } else { LoggedConsole.WriteLine("REPORT FILE WAS NOT PRODUCED: <{0}>", fiReport.FullName); } LoggedConsole.WriteLine("FINSIHED"); }
public static void LoadCsv(string[] args) { string path = @"D:\GreyHound\FILES\TMP\DSRoadVehicleFlowMoney.csv"; CsvTools.LoadCsvData("DSRoadVehicleFlowMoney", path, conn1, (d) => { d["SystemUID"] = Guid.NewGuid(); d["GroupUID"] = Guid.NewGuid(); return(true); }); Console.ReadKey(); }
/// <summary> /// Reads csv file containing summary indices and converts them to a tracks image. /// </summary> /// <returns>an image of two clipping tracks.</returns> public static Image <Rgb24> DrawHighAmplitudeClippingTrack(FileInfo csvFile) { if (!csvFile.Exists) { return(null); } Dictionary <string, double[]> dictionaryOfCsvFile = CsvTools.ReadCSVFile2Dictionary(csvFile.FullName); double[] array1 = dictionaryOfCsvFile["HighAmplitudeIndex"]; double[] array2 = dictionaryOfCsvFile["ClippingIndex"]; return(DrawHighAmplitudeClippingTrack(array1, array2)); }
/// <summary> /// reads a single csv file in form of table and returns a dictionary of spectral indices. /// </summary> /// <param name="csvFileName"></param> /// <returns></returns> public static Dictionary <string, double[, ]> ReadPivotTableToSpectralIndices(string csvFileName) { // MICHAEL: the new Csv class can read this in, and optionally transpose as it reads Tuple <List <string>, List <double[]> > tuple = CsvTools.ReadCSVFile(csvFileName); List <string> headers = tuple.Item1; List <double[]> columns = tuple.Item2; // set up dictionary of matrices var dict = new Dictionary <string, double[, ]>(); double min, max; DataTools.MinMax(columns[2], out min, out max); int minMinute = (int)min; int maxMinute = (int)max; DataTools.MinMax(columns[3], out min, out max); int minFreqBin = (int)min; int maxFreqBin = (int)max; int rowCount = maxFreqBin - minFreqBin + 1; int colCount = maxMinute - minMinute + 1; int pivotTableRowCount = columns[0].Length; for (int i = 4; i < headers.Count; i++) { var matrix = new double[rowCount, colCount]; for (int ptRow = 0; ptRow < pivotTableRowCount; ptRow++) { int col = (int)columns[2][ptRow]; int row = maxFreqBin - (int)columns[3][ptRow]; matrix[row, col] = columns[i][ptRow]; } string key = headers[i]; dict[key] = matrix; } return(dict); }
/// <summary> /// Reads csv file containing summary indices and converts them to a tracks image. /// </summary> public static Image <Rgb24> DrawImageOfSummaryIndices( Dictionary <string, IndexProperties> listOfIndexProperties, FileInfo csvFile, string titleText, TimeSpan indexCalculationDuration, DateTimeOffset?recordingStartDate) { if (!csvFile.Exists) { return(null); } Dictionary <string, double[]> dictionary = CsvTools.ReadCSVFile2Dictionary(csvFile.FullName); return(DrawImageOfSummaryIndices( listOfIndexProperties, dictionary, titleText, indexCalculationDuration, recordingStartDate)); }
public void WriteTableToCsv() { DateTime targetDate = _lastSelectedDate.Date; SaveFileDialog saveFileDialog = new SaveFileDialog() { FileName = GenerateCsvFilename(targetDate), DefaultExt = "csv", Filter = "Csv Text|*.csv", Title = "Save a Csv File", }; var dialogResult = saveFileDialog.ShowDialog(); if (dialogResult == false || saveFileDialog.FileName == "") { return; } var currentTimeStamp = DateTime.Now.ToString(Settings.Default.DefaultDateFormat + " " + Settings.Default.DefaultTimeFormat); var targetTimeStamp = targetDate.ToString(Settings.Default.DefaultDateFormat); var isource = (from p in Globals.Products where p.HasComponents == true select new { CurrentTime = currentTimeStamp, TargetDate = targetTimeStamp, ProductId = p.Id, ComponentCount = p.ComponentCount, Price = p.FixedPrice, CalculatedPrice = decimal.Round(p.GetCalculatedPrice(targetDate), Settings.Default.ExtendedRoundingDecimals), CalculatedPriceActual = decimal.Round(p.GetCalculatedPriceActual(targetDate), Settings.Default.ExtendedRoundingDecimals), DeltaFixedVsActualPrice = decimal.Round(ToPercetile(p.FixedPrice, p.GetCalculatedPriceActual(targetDate)), Settings.Default.ExtendedRoundingDecimals), ContainsProductsLackingFixedPrice = String.Join("|", p.GetSubProductsLackingPrice().ToArray()), }).ToList(); CsvTools.ExportListToFile(isource, saveFileDialog.FileName, Settings.Default.CsvSeparator); }
public override AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var fiAudioF = segmentSettings.SegmentAudioFile; var diOutputDir = segmentSettings.SegmentOutputDirectory; //###################################################################### var results = Analysis(fiAudioF, analysisSettings, segmentSettings.Segment.SourceMetadata.SampleRate, segmentSettings.SegmentStartOffset); //###################################################################### if (results == null) { return(null); //nothing to process (broken) } var sonogram = results.Item1; var hits = results.Item2; var scores = results.Item3; var predictedEvents = results.Item4; var recordingTimeSpan = results.Item5; var result = new AnalysisResult2(analysisSettings, segmentSettings, recordingTimeSpan); result.AnalysisIdentifier = this.Identifier; result.MiscellaneousResults["dataTable"] = null; DataTable dataTable = null; if (predictedEvents != null) { string analysisName = analysisSettings.ConfigDict[AnalysisKeys.AnalysisName]; string fName = Path.GetFileNameWithoutExtension(fiAudioF.Name); foreach (AcousticEvent ev in predictedEvents) { ev.FileName = fName; //ev.Name = analysisName; //TEMPORARY DISABLE ev.SegmentDurationSeconds = recordingTimeSpan.TotalSeconds; } //write events to a data table to return. dataTable = WriteEvents2DataTable(predictedEvents); string sortString = AnalysisKeys.EventStartAbs + " ASC"; dataTable = DataTableTools.SortTable(dataTable, sortString); //sort by start time before returning } if (analysisSettings.AnalysisDataSaveBehavior) { CsvTools.DataTable2CSV(dataTable, segmentSettings.SegmentEventsFile.FullName); } else { result.EventsFile = null; } if (analysisSettings.AnalysisDataSaveBehavior) { double scoreThreshold = 0.01; if (analysisSettings.ConfigDict.ContainsKey(AnalysisKeys.IntensityThreshold)) { scoreThreshold = ConfigDictionary.GetDouble(AnalysisKeys.IntensityThreshold, analysisSettings.ConfigDict); } TimeSpan unitTime = TimeSpan.FromSeconds(60); //index for each time span of i minute var indicesDT = this.ConvertEvents2Indices(dataTable, unitTime, recordingTimeSpan, scoreThreshold); CsvTools.DataTable2CSV(indicesDT, segmentSettings.SegmentSummaryIndicesFile.FullName); } else { result.SummaryIndices = null; } //save image of sonograms if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave(predictedEvents.Count)) { string imagePath = segmentSettings.SegmentImageFile.FullName; Image image = DrawSonogram(sonogram, hits, scores, predictedEvents); image.Save(imagePath, ImageFormat.Png); } result.MiscellaneousResults["dataTable"] = dataTable; result.ImageFile = segmentSettings.SegmentImageFile; //result.DisplayItems = { { 0, "example" }, { 1, "example 2" }, } //result.OutputFiles = { { "exmaple file key", new FileInfo("Where's that file?") } } return(result); }
/// <summary> /// Assume that we are processing data for one minute only. /// From this one minute of data, we produce images at three scales. /// A one minute recording framed at 20ms should yield 3000 frames. /// But to achieve this where sr= 22050 and frameSize=512, we need an overlap of 71 samples. /// Consequently only 2999 frames returned per minute. /// Therefore have to pad end to get 3000 frames. /// </summary> public static TimeOffsetSingleLayerSuperTile[] DrawSuperTilesFromSingleFrameSpectrogram(DirectoryInfo dataDir, LdSpectrogramConfig analysisConfig, Dictionary <string, IndexProperties> indexProperties, SpectrogramZoomingConfig zoomingConfig, int minute, double[] imageScales, string basename, IndexGenerationData indexGeneration, ImageChrome chromeOption, TimeSpan alignmentPadding) { string fileStem = basename; // string analysisType = analysisConfig.AnalysisType; TimeSpan indexScale = indexGeneration.IndexCalculationDuration; TimeSpan frameScale = TimeSpan.FromSeconds(zoomingConfig.SpectralFrameDuration); var expectedDataDurationInSeconds = (int)indexGeneration.MaximumSegmentDuration.Value.TotalSeconds; var expectedFrameCount = (int)Math.Round(expectedDataDurationInSeconds / zoomingConfig.SpectralFrameDuration); string fileName = fileStem + "_" + minute + "min.csv"; string csvPath = Path.Combine(dataDir.FullName, fileName); bool skipHeader = true; bool skipFirstColumn = true; // read spectrogram into a list of frames List <double[]> frameList = CsvTools.ReadCSVFileOfDoubles(csvPath, skipHeader, skipFirstColumn); if (frameList == null) { LoggedConsole.WriteErrorLine( "WARNING: METHOD DrawSuperTilesFromSingleFrameSpectrogram(): NO SPECTRAL DATA SUPPLIED"); return(null); } PadEndOfListOfFrames(frameList, expectedFrameCount); TrimEndOfListOfFrames(frameList, expectedFrameCount); //// frame count will be one less than expected for the recording segment because of frame overlap //// Therefore pad the end of the list of frames with the last frame. // int frameDiscrepancy = expectedFrameCount - frameList.Count; // if (frameDiscrepancy > 0) // { // double[] frame = frameList[frameList.Count - 1]; // for (int d = 0; d < frameDiscrepancy; d++) // { // frameList.Add(frame); // } // } var frameData = new TemporalMatrix("rows", MatrixTools.ConvertList2Matrix(frameList), frameScale); frameData.SwapTemporalDimension(); // so the two data matrices have the same temporal dimension TimeSpan startTime = indexGeneration.AnalysisStartOffset; // default = zero minute of day i.e. midnight TimeSpan startTimeOfData = startTime + TimeSpan.FromMinutes(minute); var str = new TimeOffsetSingleLayerSuperTile[imageScales.Length]; // make the images for (int scale = 0; scale < imageScales.Length; scale++) { TimeSpan imageScale = TimeSpan.FromSeconds(imageScales[scale]); var compressionFactor = (int)Math.Round(imageScale.TotalMilliseconds / frameData.DataScale.TotalMilliseconds); double columnDuration = imageScale.TotalSeconds; // int expectedFrameCount = (int)Math.Round(expectedDataDurationInSeconds / columnDuration); // ############## RESEARCH CHOICE HERE >>>> compress spectrograms to correct scale using either max or average // Average appears to offer better contrast. // double[,] data = frameData.CompressMatrixInTemporalDirectionByTakingMax(imageScale); double[,] data = frameData.CompressMatrixInTemporalDirectionByTakingAverage(imageScale); var spectrogramImage = DrawFrameSpectrogramAtScale( analysisConfig, zoomingConfig, startTimeOfData, imageScale, data, indexGeneration, chromeOption); str[scale] = new TimeOffsetSingleLayerSuperTile( alignmentPadding, SpectrogramType.Frame, imageScale, spectrogramImage.CloneAs <Rgba32>(), startTimeOfData); } return(str); }
public static void Execute(Arguments arguments) { var inputDirs = arguments.InputDataDirectories.Select(FileInfoExtensions.ToDirectoryInfo); var output = arguments.OutputDirectory.ToDirectoryInfo(); string date = "# DATE AND TIME: " + DateTime.Now; LoggedConsole.WriteLine("\n# DRAW an EASY IMAGE from consecutive days of SUMMARY INDICES in CSV files."); LoggedConsole.WriteLine("# IT IS ASSUMED THAT THE CSV files are already concatenated into 24 hour files."); LoggedConsole.WriteLine(date); LoggedConsole.WriteLine("# Summary Index.csv files are in directories:"); foreach (DirectoryInfo dir in inputDirs) { LoggedConsole.WriteLine(" {0}", dir.FullName); } LoggedConsole.WriteLine("# Output directory: " + output); if (arguments.StartDate == null) { LoggedConsole.WriteLine("# Start date = NULL (No argument provided). Will revise start date ...."); } else { LoggedConsole.WriteLine("# Start date = " + arguments.StartDate.ToString()); } if (arguments.EndDate == null) { LoggedConsole.WriteLine("# End date = NULL (No argument provided). Will revise end date ...."); } else { LoggedConsole.WriteLine("# End date = " + arguments.EndDate.ToString()); } LoggedConsole.WriteLine("# FILE FILTER = " + arguments.FileFilter); LoggedConsole.WriteLine(); // PATTERN SEARCH FOR SUMMARY INDEX FILES. //string pattern = "*__Towsey.Acoustic.Indices.csv"; FileInfo[] csvFiles = IndexMatrices.GetFilesInDirectories(inputDirs.ToArray(), arguments.FileFilter); //LoggedConsole.WriteLine("# Subdirectories Count = " + subDirectories.Length); LoggedConsole.WriteLine("# SummaryIndexFiles.csv Count = " + csvFiles.Length); if (csvFiles.Length == 0) { LoggedConsole.WriteErrorLine("\n\nWARNING from method DrawEasyImage.Execute():"); LoggedConsole.WriteErrorLine(" No SUMMARY index files were found."); LoggedConsole.WriteErrorLine(" RETURNING EMPTY HANDED!"); return; } // Sort the files by date and return as a dictionary: sortedDictionaryOfDatesAndFiles<DateTimeOffset, FileInfo> //var sortedDictionaryOfDatesAndFiles = LDSpectrogramStitching.FilterFilesForDates(csvFiles, arguments.TimeSpanOffsetHint); // calculate new start date if passed value = null. DateTimeOffset?startDate = arguments.StartDate; DateTimeOffset?endDate = arguments.EndDate; TimeSpan totalTimespan = (DateTimeOffset)endDate - (DateTimeOffset)startDate; int dayCount = totalTimespan.Days + 1; // assume last day has full 24 hours of recording available. LoggedConsole.WriteLine("\n# Start date = " + startDate.ToString()); LoggedConsole.WriteLine("# End date = " + endDate.ToString()); LoggedConsole.WriteLine(string.Format("# Elapsed time = {0:f1} hours", dayCount * 24)); LoggedConsole.WriteLine("# Day count = " + dayCount + " (inclusive of start and end days)"); LoggedConsole.WriteLine("# Time Zone = " + arguments.TimeSpanOffsetHint.ToString()); // create top level output directory if it does not exist. DirectoryInfo opDir = output; if (!opDir.Exists) { opDir.Create(); } // SET UP DEFAULT SITE LOCATION INFO -- DISCUSS IWTH ANTHONY // The following location data is used only to draw the sunrise/sunset tracks on images. double?latitude = null; double?longitude = null; var siteDescription = new SiteDescription(); siteDescription.SiteName = arguments.FileStemName; siteDescription.Latitude = latitude; siteDescription.Longitude = longitude; // the following required if drawing the index images FileInfo indexPropertiesConfig = null; // require IndexGenerationData and indexPropertiesConfig for drawing //indexGenerationData = IndexGenerationData.GetIndexGenerationData(csvFiles[0].Directory); indexPropertiesConfig = arguments.IndexPropertiesConfig.ToFileInfo(); Dictionary <string, IndexProperties> listOfIndexProperties = IndexProperties.GetIndexProperties(indexPropertiesConfig); Tuple <List <string>, List <double[]> > tuple = CsvTools.ReadCSVFile(csvFiles[0].FullName); var names = tuple.Item1; // default EASY indices int redID = 3; // backgroundNoise int grnID = 5; // avSNROfActiveframes int bluID = 7; // events per second string rep = @"bgn-avsnr-evn"; // ACI Ht Hpeaks EASY indices if (false) { redID = 11; // ACI grnID = 12; // Ht //bluID = 13; // HavgSp //bluID = 14; // Hvariance //bluID = 15; // Hpeaks bluID = 16; // Hcov //bluID = 7; // SPT rep = @"aci-ht-hcov"; //rep = @"aci-ht-spt"; } // LF, MF, HF if (true) { redID = 10; // LF grnID = 9; // MF bluID = 8; // HF rep = @"lf-mf-hf"; } IndexProperties redIndexProps = listOfIndexProperties[names[redID]]; IndexProperties grnIndexProps = listOfIndexProperties[names[grnID]]; IndexProperties bluIndexProps = listOfIndexProperties[names[bluID]]; int dayPixelHeight = 4; int rowCount = (dayPixelHeight * dayCount) + 35; // +30 for grid lines int colCount = 1440; var bitmap = new Image <Rgb24>(colCount, rowCount); var colour = Color.Yellow; int currentRow = 0; var oneDay = TimeSpan.FromHours(24); int graphWidth = colCount; int trackHeight = 20; var stringFont = Drawing.Arial8; string[] monthNames = { "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" }; // for drawing the y-axis scale int scaleWidth = trackHeight + 7; var yAxisScale = new Image <Rgb24>(scaleWidth, rowCount + (2 * trackHeight)); yAxisScale.Mutate(g => { g.Clear(Color.Black); // loop over days for (int d = 0; d < dayCount; d++) { var thisday = ((DateTimeOffset)startDate).AddDays(d); if (thisday.Day == 1) { int nextRow = currentRow + 1; for (int c = 0; c < colCount; c++) { bitmap[c, currentRow] = Color.Gray; bitmap[c, nextRow] = Color.Gray; } for (int c = 0; c < scaleWidth; c++) { yAxisScale[c, currentRow + trackHeight] = Color.Gray; yAxisScale[c, nextRow + trackHeight] = Color.Gray; } string month = monthNames[thisday.Month - 1]; if (thisday.Month == 1) // January { g.DrawText(thisday.Year.ToString(), stringFont, Color.White, new PointF(0, nextRow + trackHeight + 1)); //draw time g.DrawText(month, stringFont, Color.White, new PointF(1, nextRow + trackHeight + 11)); //draw time } else { g.DrawText(month, stringFont, Color.White, new PointF(1, nextRow + trackHeight + 1)); //draw time } currentRow += 2; } // get the exact date and time LoggedConsole.WriteLine($"READING DAY {d + 1} of {dayCount}: {thisday.ToString()}"); // CREATE DAY LEVEL OUTPUT DIRECTORY for this day string dateString = $"{thisday.Year}{thisday.Month:D2}{thisday.Day:D2}"; tuple = CsvTools.ReadCSVFile(csvFiles[d].FullName); var arrays = tuple.Item2; var redArray = arrays[redID]; var grnArray = arrays[grnID]; var bluArray = arrays[bluID]; // NormaliseMatrixValues the indices redArray = DataTools.NormaliseInZeroOne(redArray, redIndexProps.NormMin, redIndexProps.NormMax); grnArray = DataTools.NormaliseInZeroOne(grnArray, grnIndexProps.NormMin, grnIndexProps.NormMax); bluArray = DataTools.NormaliseInZeroOne(bluArray, bluIndexProps.NormMin, bluIndexProps.NormMax); for (int c = 0; c < colCount; c++) { for (int r = 0; r < dayPixelHeight; r++) { //transformedValue = Math.Sqrt(redArray[c]); var transformedValue = redArray[c] * redArray[c]; int redVal = (int)Math.Round(transformedValue * 255); if (redVal < 0) { redVal = 0; } else if (redVal > 255) { redVal = 255; } //transformedValue = Math.Sqrt(grnArray[c]); transformedValue = grnArray[c] * grnArray[c]; // square the value int grnVal = (int)Math.Round(transformedValue * 255); if (grnVal < 0) { grnVal = 0; } else if (grnVal > 255) { grnVal = 255; } //transformedValue = Math.Sqrt(bluArray[c]); transformedValue = bluArray[c] * bluArray[c]; // square the value int bluVal = (int)Math.Round(transformedValue * 255); if (bluVal < 0) { bluVal = 0; } else if (bluVal > 255) { bluVal = 255; } bitmap[c, currentRow + r] = Color.FromRgb((byte)redVal, (byte)grnVal, (byte)bluVal); } } // over all columns currentRow += dayPixelHeight; if (thisday.Day % 7 == 0) { for (int c = 0; c < colCount; c++) { bitmap[c, currentRow] = Color.Gray; } currentRow++; } } // over days }); // draw on civil dawn and dusk lines int startdayOfYear = ((DateTimeOffset)startDate).DayOfYear; int endDayOfYear = ((DateTimeOffset)endDate).DayOfYear; SunAndMoon.AddSunRiseSetLinesToImage(bitmap, arguments.BrisbaneSunriseDatafile.ToFileInfo(), startdayOfYear, endDayOfYear, dayPixelHeight); // add the time scales Image <Rgb24> timeBmp1 = ImageTrack.DrawTimeRelativeTrack(oneDay, graphWidth, trackHeight); var imageList = new [] { timeBmp1, bitmap, timeBmp1 }; Image <Rgb24> compositeBmp1 = (Image <Rgb24>)ImageTools.CombineImagesVertically(imageList); imageList = new [] { yAxisScale, compositeBmp1 }; Image <Rgb24> compositeBmp2 = (Image <Rgb24>)ImageTools.CombineImagesInLine(imageList); // indices used for image string indicesDescription = $"{redIndexProps.Name}|{grnIndexProps.Name}|{bluIndexProps.Name}"; string startString = $"{startDate.Value.Year}/{startDate.Value.Month}/{startDate.Value.Day}"; string endString = $"{endDate.Value.Year}/{endDate.Value.Month}/{endDate.Value.Day}"; string title = $"EASY: {arguments.FileStemName} From {startString} to {endString} Indices: {indicesDescription}"; Image <Rgb24> titleBar = ImageTrack.DrawTitleTrack(compositeBmp2.Width, trackHeight, title); imageList = new [] { titleBar, compositeBmp2 }; compositeBmp2 = (Image <Rgb24>)ImageTools.CombineImagesVertically(imageList); var outputFileName = Path.Combine(opDir.FullName, arguments.FileStemName + "." + rep + ".EASY.png"); compositeBmp2.Save(outputFileName); } // Execute()
//Analyze() /// <summary> /// ################ THE KEY ANALYSIS METHOD /// Returns a DataTable /// </summary> /// <param name="fiSegmentOfSourceFile"></param> /// <param name="analysisSettings"></param> /// <param name="originalSampleRate"></param> /// <param name="segmentStartOffset"></param> /// <param name="configDict"></param> /// <param name="diOutputDir"></param> public static Tuple <BaseSonogram, double[, ], List <Plot>, List <AcousticEvent>, TimeSpan> Analysis(FileInfo fiSegmentOfSourceFile, AnalysisSettings analysisSettings, int originalSampleRate, TimeSpan segmentStartOffset) { Dictionary <string, string> configDict = analysisSettings.ConfigDict; int originalAudioNyquist = originalSampleRate / 2; // original sample rate can be anything 11.0-44.1 kHz. //set default values - ignore those set by user int frameSize = 32; double windowOverlap = 0.3; int xCorrelationLength = 256; //for Xcorrelation - 256 frames @801 = 320ms, almost 1/3 second. //int xCorrelationLength = 128; //for Xcorrelation - 128 frames @801 = 160ms, almost 1/6 second. //int xCorrelationLength = 64; //for Xcorrelation - 64 frames @128 = 232ms, almost 1/4 second. //int xCorrelationLength = 16; //for Xcorrelation - 16 frames @128 = 232ms, almost 1/4 second. double dBThreshold = 12.0; // read frog data to datatable var dt = CsvTools.ReadCSVToTable(configDict[key_FROG_DATA], true); // read file contining parameters of frog calls to a table double intensityThreshold = double.Parse(configDict[AnalysisKeys.IntensityThreshold]); //in 0-1 double minDuration = double.Parse(configDict[AnalysisKeys.MinDuration]); // seconds double maxDuration = double.Parse(configDict[AnalysisKeys.MaxDuration]); // seconds double minPeriod = double.Parse(configDict[AnalysisKeys.MinPeriodicity]); // seconds double maxPeriod = double.Parse(configDict[AnalysisKeys.MaxPeriodicity]); // seconds AudioRecording recording = new AudioRecording(fiSegmentOfSourceFile.FullName); if (recording == null) { LoggedConsole.WriteLine("AudioRecording == null. Analysis not possible."); return(null); } //i: MAKE SONOGRAM SonogramConfig sonoConfig = new SonogramConfig(); //default values config sonoConfig.SourceFName = recording.BaseName; sonoConfig.WindowSize = frameSize; sonoConfig.WindowOverlap = windowOverlap; //sonoConfig.NoiseReductionType = SNR.Key2NoiseReductionType("NONE"); sonoConfig.NoiseReductionType = SNR.KeyToNoiseReductionType("STANDARD"); //must do noise removal TimeSpan tsRecordingtDuration = recording.Duration; int sr = recording.SampleRate; double freqBinWidth = sr / (double)sonoConfig.WindowSize; double frameOffset = sonoConfig.GetFrameOffset(sr); double framesPerSecond = 1 / frameOffset; BaseSonogram sonogram = new SpectrogramStandard(sonoConfig, recording.WavReader); //iii: GET TRACKS int nhLimit = 3; //limit of neighbourhood around maximum var peaks = DataTools.GetPeakValues(sonogram.DecibelsPerFrame); var tuple = SpectralTrack.GetSpectralMaxima(sonogram.DecibelsPerFrame, sonogram.Data, dBThreshold, nhLimit); var maxFreqArray = tuple.Item1; //array (one element per frame) indicating which freq bin has max amplitude. var hitsMatrix = tuple.Item2; int herzOffset = 0; int maxFreq = 6000; var tracks = SpectralTrack.GetSpectralTracks(maxFreqArray, framesPerSecond, freqBinWidth, herzOffset, SpectralTrack.MIN_TRACK_DURATION, SpectralTrack.MAX_INTRASYLLABLE_GAP, maxFreq); double severity = 0.5; double dynamicRange = 60; // deciBels above background noise. BG noise has already been removed from each bin. // convert sonogram to a list of frequency bin arrays var listOfFrequencyBins = SpectrogramTools.Sonogram2ListOfFreqBinArrays(sonogram, dynamicRange); int minFrameLength = SpectralTrack.FrameCountEquivalent(SpectralTrack.MIN_TRACK_DURATION, framesPerSecond); for (int i = tracks.Count - 1; i >= 0; i--) { tracks[i].CropTrack(listOfFrequencyBins, severity); if (tracks[i].Length < minFrameLength) { tracks.Remove(tracks[i]); } } // foreach track foreach (SpectralTrack track in tracks) // find any periodicity in the track and calculate its score. { SpectralTrack.DetectTrackPeriodicity(track, xCorrelationLength, listOfFrequencyBins, sonogram.FramesPerSecond); } // foreach track int rowCount = sonogram.Data.GetLength(0); int MAX_FREQ_BOUND = 6000; int topBin = (int)Math.Round(MAX_FREQ_BOUND / freqBinWidth); var plots = CreateScorePlots(tracks, rowCount, topBin); //iv: CONVERT TRACKS TO ACOUSTIC EVENTS List <AcousticEvent> frogEvents = SpectralTrack.ConvertTracks2Events(tracks, segmentStartOffset); // v: GET FROG IDs //var frogEvents = new List<AcousticEvent>(); foreach (AcousticEvent ae in frogEvents) { double oscRate = 1 / ae.Periodicity; // ae.DominantFreq // ae.Score // ae.Duration //ClassifyFrogEvent(ae); string[] names = ClassifyFrogEvent(ae.DominantFreq, oscRate, dt); ae.Name = names[0]; ae.Name2 = names[1]; } return(Tuple.Create(sonogram, hitsMatrix, plots, frogEvents, tsRecordingtDuration)); } //Analysis()
public static DataTable CalculateRecallPrecision(FileInfo fiPredictions, FileInfo fiGroundTruth) { string header_trueSex = "truSex"; string header_predictedSex = "preSex"; string header_Harmonics = "Harmonics"; string header_Quality = "Quality"; string[] ROC_HEADERS = { AnalysisKeys.EventStartAbs, //typeof(double) AnalysisKeys.EventStartMin, AnalysisKeys.EventStartSec, AnalysisKeys.EventIntensity, LSKiwiHelper.key_GRID_SCORE, LSKiwiHelper.key_DELTA_SCORE, LSKiwiHelper.key_CHIRP_SCORE, LSKiwiHelper.key_PEAKS_SNR_SCORE, LSKiwiHelper.key_BANDWIDTH_SCORE, AnalysisKeys.EventScore, AnalysisKeys.EventNormscore, header_predictedSex, header_Harmonics, header_trueSex, header_Quality, "TP", "FP","FN", }; //string[] ROC_HEADERS = { "startSec", "min", "secOffset", "intensity", "gridScore", "deltaScore", "chirpScore", "PeaksSnrScore" "bwScore", "comboScore", "normScore", "preSex", "Harmonics", "truSex", "Quality", "TP", "FP", "FN"}; Type[] ROC_COL_TYPES = { typeof(double), typeof(double), typeof(double), typeof(double), typeof(double), typeof(double), typeof(double), typeof(double), typeof(double), typeof(double), typeof(double), typeof(string), typeof(int), typeof(string), typeof(int), typeof(int), typeof(int), typeof(int) }; //ANDREW'S HEADERS: Selection, View, Channel, Begin Time (s), End Time (s), Low Freq (Hz), High Freq (Hz), Begin File, Species, Sex, Harmonics, Quality Type[] ANDREWS_TYPES = { typeof(string), typeof(string), typeof(int), typeof(double), typeof(double), typeof(double), typeof(double), typeof(string), typeof(string), typeof(string), typeof(int), typeof(int) }; bool isFirstRowHeader = true; var dtGroundTruth = CsvTools.ReadCSVToTable(fiGroundTruth.FullName, isFirstRowHeader, ANDREWS_TYPES); var dtPredictions = CsvTools.ReadCSVToTable(fiPredictions.FullName, isFirstRowHeader); dtPredictions = LSKiwiHelper.MergeAdjacentPredictions(dtPredictions); //var weights = LSKiwiHelper.GetFeatureWeights(); //to try different weightings. //string colName = "Species"; //string value = "LSK"; //DataTableTools.DeleteRows(dtADResults, colName, value); //delete rows where Species name is not "LSK" var dtOutput = DataTableTools.CreateTable(ROC_HEADERS, ROC_COL_TYPES); int TP = 0; int FP = 0; int FN = 0; foreach (DataRow myRow in dtPredictions.Rows) { double myStartSecAbs = (double)myRow[AnalysisKeys.EventStartAbs]; double startMin = (double)myRow[AnalysisKeys.EventStartMin]; double startSecOffset = (double)myRow[AnalysisKeys.EventStartSec]; double intensityScore = (double)myRow[AnalysisKeys.EventIntensity]; string name = (string)myRow[AnalysisKeys.EventName]; //double snrScore = (double)myRow[LSKiwiHelper.key_PEAKS_SNR_SCORE]; //double sdPeakScore = (double)myRow[LSKiwiHelper.key_PEAKS_STD_SCORE]; //standard deviation of peak snr's //double periodicityScore = (double)myRow[LSKiwiHelper.key_DELTA_SCORE]; double gridScore = (double)myRow[LSKiwiHelper.key_GRID_SCORE]; double deltScore = (double)myRow[LSKiwiHelper.key_DELTA_SCORE]; double chrpScore = (double)myRow[LSKiwiHelper.key_CHIRP_SCORE]; double peakSnrScore = (double)myRow[LSKiwiHelper.key_PEAKS_SNR_SCORE]; //average peak double bandWidthScore = (double)myRow[LSKiwiHelper.key_BANDWIDTH_SCORE]; //double comboScore = (double)myRow[LSKiwiHelper.key_COMBO_SCORE]; double eventScore = (double)myRow[AnalysisKeys.EventScore]; double normScore = (double)myRow[AnalysisKeys.EventNormscore]; string predictedSex; if (name.EndsWith("(m)")) { predictedSex = "M"; } else if (name.EndsWith("(f)")) { predictedSex = "F"; } else { predictedSex = "???"; } //List<string[]> excludeRules = LSKiwiHelper.GetExcludeRules(); //if (FilterEvent(myRow, excludeRules) == null) continue; DataRow opRow = dtOutput.NewRow(); opRow[AnalysisKeys.EventStartAbs] = myStartSecAbs; opRow[AnalysisKeys.EventStartMin] = startMin; opRow[AnalysisKeys.EventStartSec] = startSecOffset; opRow[AnalysisKeys.EventIntensity] = intensityScore; opRow[LSKiwiHelper.key_GRID_SCORE] = gridScore; opRow[LSKiwiHelper.key_DELTA_SCORE] = deltScore; opRow[LSKiwiHelper.key_CHIRP_SCORE] = chrpScore; opRow[LSKiwiHelper.key_PEAKS_SNR_SCORE] = peakSnrScore; opRow[LSKiwiHelper.key_BANDWIDTH_SCORE] = bandWidthScore; //opRow[LSKiwiHelper.key_COMBO_SCORE] = comboScore; opRow[AnalysisKeys.EventScore] = eventScore; opRow[AnalysisKeys.EventNormscore] = normScore; opRow[header_Quality] = 0; //fill in with blanks opRow[header_predictedSex] = predictedSex; opRow[header_trueSex] = "???"; opRow["TP"] = 0; opRow["FP"] = 0; opRow["FN"] = 0; bool isTP = false; foreach (DataRow trueEvent in dtGroundTruth.Rows) { double trueStart = (double)trueEvent["Begin Time (s)"]; string trueSex = (string)trueEvent["Sex"]; if (trueStart >= myStartSecAbs - 10 && trueStart <= myStartSecAbs + 20 && predictedSex == trueSex) //myStart is close to trueStart AND same sex THERFORE TRUE POSTIIVE { isTP = true; trueEvent["Begin Time (s)"] = double.NaN; //mark so that will not use again opRow[header_Quality] = trueEvent[header_Quality]; opRow[header_trueSex] = trueEvent["Sex"]; opRow[header_Harmonics] = trueEvent[header_Harmonics]; break; } } //foreach - AD loop if (isTP) { opRow["TP"] = 1; TP++; } else //FALSE POSITIVE { opRow["FP"] = 1; FP++; } dtOutput.Rows.Add(opRow); } //foreach - MY loop //now add in the false negatives foreach (DataRow trueEvent in dtGroundTruth.Rows) { double trueStart = (double)trueEvent["Begin Time (s)"]; if (!double.IsNaN(trueStart)) { DataRow row = dtOutput.NewRow(); row[AnalysisKeys.EventStartAbs] = trueStart; row[AnalysisKeys.EventStartMin] = (int)(trueStart / 60); row[AnalysisKeys.EventStartSec] = trueStart % 60; //row[Keys.EVENT_INTENSITY] = 0.0; //row[LSKiwiHelper.key_PEAKS_SNR_SCORE] = 0.0; //row[LSKiwiHelper.key_PEAKS_STD_SCORE] = 0.0; //row[LSKiwiHelper.key_DELTA_SCORE] = 0.0; //row[LSKiwiHelper.key_BANDWIDTH_SCORE] = 0.0; //row[Keys.EVENT_NORMSCORE] = 0.0; //row[LSKiwiHelper.key_NEW_COMBO_SCORE] = 0.0; row[header_predictedSex] = "???"; row["Harmonics"] = trueEvent["Harmonics"]; row["Quality"] = trueEvent["Quality"]; row[header_trueSex] = trueEvent["Sex"]; row["TP"] = 0; row["FP"] = 0; row["FN"] = 1; dtOutput.Rows.Add(row); FN++; } } double recall = TP / (double)(TP + FN); double specificity = TP / (double)(TP + FP); LoggedConsole.WriteLine("TP={0}, FP={1}, FN={2}", TP, FP, FN); LoggedConsole.WriteLine("RECALL={0:f3}, SPECIFICITY={1:f3}", recall, specificity); //use normalised score as the threshold to determine area under ROC curve int totalPositiveCount = dtGroundTruth.Rows.Count; int totalNegativeCount = FP; string sortString = AnalysisKeys.EventNormscore + " desc"; ROCCurve(dtOutput, totalPositiveCount, totalNegativeCount, sortString); //write ROC area above curve return(dtOutput); } //CalculateRecallPrecision()
public override void Execute() { Thread.CurrentThread.CurrentCulture = new System.Globalization.CultureInfo("en-US"); SqlConnection conn = new SqlConnection(Connstring); SqlCommand getNeedsCmd = new SqlCommand("YahooDataNeeds", conn); getNeedsCmd.CommandType = CommandType.StoredProcedure; SqlDataAdapter selectAdapter = new SqlDataAdapter(getNeedsCmd); DataTable dataNeeds = new DataTable(); selectAdapter.Fill(dataNeeds); foreach (DataRow companyRequestRow in dataNeeds.Rows) { DateTime fromDate = (DateTime)companyRequestRow["NeedSince"]; DateTime now = DateTime.Now; string abbreviation = companyRequestRow["Abbreviation"] as string; int companyId = (int)companyRequestRow["CompanyId"]; string csv = CsvTools.GetCsvFromUrl(String.Format("http://ichart.finance.yahoo.com/table.csv?s={0}&a={1}&b={2}&c={3}&d={4}&e={5}&f={6}&g=d&ignore=.csv", abbreviation, (fromDate.Month - 1).ToString("X2"), fromDate.Day, fromDate.Year, (now.Month - 1).ToString("X2"), now.Day, now.Year)); DataTable dt = CsvTools.Csv2DataTable(csv); dt.Columns[0].ColumnName = "Date"; dt.Columns[1].ColumnName = "Open"; dt.Columns[2].ColumnName = "High"; dt.Columns[3].ColumnName = "Low"; dt.Columns[4].ColumnName = "Close"; dt.Columns[5].ColumnName = "Volume"; dt.Columns[6].ColumnName = "AdjClose"; SqlCommand insertCommand = new SqlCommand("InsertYahooHistoricalData", conn); insertCommand.CommandType = CommandType.StoredProcedure; SqlCommandBuilder.DeriveParameters(insertCommand); /* * insertCommand.Parameters.Add("@CompanyId", SqlDbType.Int); * insertCommand.Parameters.Add("@Date", SqlDbType.Date); * insertCommand.Parameters.Add("@Open", SqlDbType.Real); * insertCommand.Parameters.Add("@High", SqlDbType.Real); * insertCommand.Parameters.Add("@Low", SqlDbType.Real); * insertCommand.Parameters.Add("@Close", SqlDbType.Real); * insertCommand.Parameters.Add("@Volume", SqlDbType.BigInt); * insertCommand.Parameters.Add("@AdjClose", SqlDbType.Real); */ SqlCommandTools.MapParamsToColumnsExact(dt, insertCommand); DataTable typedDt = dt.Clone(); typedDt.Columns[0].DataType = typeof(DateTime); typedDt.Columns[1].DataType = typeof(float); typedDt.Columns[2].DataType = typeof(float); typedDt.Columns[3].DataType = typeof(float); typedDt.Columns[4].DataType = typeof(float); typedDt.Columns[5].DataType = typeof(long); typedDt.Columns[6].DataType = typeof(float); DataColumn companyColumn = new DataColumn("CompanyId", typeof(int)); typedDt.Columns.Add(companyColumn); foreach (DataRow row in dt.Rows) { typedDt.ImportRow(row); } foreach (DataRow typedRow in typedDt.Rows) { typedRow["CompanyId"] = companyId; } insertCommand.Connection = conn; SqlDataAdapter insertAdapter = new SqlDataAdapter(); insertAdapter.InsertCommand = insertCommand; try { conn.Open(); SqlTransaction transaction = conn.BeginTransaction(IsolationLevel.RepeatableRead); _lastOldId = SqlCommandTools.LastId(conn, "YahooHistoricalPrices"); insertAdapter.UpdateBatchSize = 1000; insertAdapter.InsertCommand.UpdatedRowSource = UpdateRowSource.None; insertAdapter.Update(typedDt); transaction.Commit(); _lastInsertedId = SqlCommandTools.LastId(conn, "YahooHistoricalPrices"); conn.Close(); } catch { conn.Close(); throw; } } }
} //AddContext2Table() public Tuple <DataTable, DataTable> ProcessCsvFile(FileInfo fiCsvFile, FileInfo fiConfigFile) { DataTable dt = CsvTools.ReadCSVToTable(fiCsvFile.FullName, true); //get original data table if ((dt == null) || (dt.Rows.Count == 0)) { return(null); } //get its column headers var dtHeaders = new List <string>(); var dtTypes = new List <Type>(); foreach (DataColumn col in dt.Columns) { dtHeaders.Add(col.ColumnName); dtTypes.Add(col.DataType); } List <string> displayHeaders = null; //check if config file contains list of display headers if (fiConfigFile != null) { var configuration = new ConfigDictionary(fiConfigFile.FullName); Dictionary <string, string> configDict = configuration.GetTable(); if (configDict.ContainsKey(Keys.DISPLAY_COLUMNS)) { displayHeaders = configDict[Keys.DISPLAY_COLUMNS].Split(',').ToList(); } } //if config file does not exist or does not contain display headers then use the original headers if (displayHeaders == null) { displayHeaders = dtHeaders; //use existing headers if user supplies none. } //now determine how to display tracks in display datatable Type[] displayTypes = new Type[displayHeaders.Count]; bool[] canDisplay = new bool[displayHeaders.Count]; for (int i = 0; i < displayTypes.Length; i++) { displayTypes[i] = typeof(double); canDisplay[i] = false; if (dtHeaders.Contains(displayHeaders[i])) { canDisplay[i] = true; } } DataTable table2Display = DataTableTools.CreateTable(displayHeaders.ToArray(), displayTypes); foreach (DataRow row in dt.Rows) { DataRow newRow = table2Display.NewRow(); for (int i = 0; i < canDisplay.Length; i++) { if (canDisplay[i]) { newRow[displayHeaders[i]] = row[displayHeaders[i]]; } else { newRow[displayHeaders[i]] = 0.0; } } table2Display.Rows.Add(newRow); } //order the table if possible if (dt.Columns.Contains(AudioAnalysisTools.Keys.EVENT_START_ABS)) { dt = DataTableTools.SortTable(dt, AudioAnalysisTools.Keys.EVENT_START_ABS + " ASC"); } else if (dt.Columns.Contains(AudioAnalysisTools.Keys.EVENT_COUNT)) { dt = DataTableTools.SortTable(dt, AudioAnalysisTools.Keys.EVENT_COUNT + " ASC"); } else if (dt.Columns.Contains(AudioAnalysisTools.Keys.INDICES_COUNT)) { dt = DataTableTools.SortTable(dt, AudioAnalysisTools.Keys.INDICES_COUNT + " ASC"); } else if (dt.Columns.Contains(AudioAnalysisTools.Keys.START_MIN)) { dt = DataTableTools.SortTable(dt, AudioAnalysisTools.Keys.START_MIN + " ASC"); } table2Display = NormaliseColumnsOfDataTable(table2Display); return(System.Tuple.Create(dt, table2Display)); } // ProcessCsvFile()
/// <summary> /// A WRAPPER AROUND THE analyser.Analyse(analysisSettings) METHOD /// To be called as an executable with command line arguments. /// </summary> /// <param name="sourcePath"></param> /// <param name="configPath"></param> /// <param name="outputPath"></param> public static int Execute(string[] args) { int status = 0; if (args.Length < 4) { Console.WriteLine("Require at least 4 command line arguments."); status = 1; return(status); } //GET FIRST THREE OBLIGATORY COMMAND LINE ARGUMENTS string recordingPath = args[0]; string configPath = args[1]; string outputDir = args[2]; FileInfo fiSource = new FileInfo(recordingPath); if (!fiSource.Exists) { Console.WriteLine("Source file does not exist: " + recordingPath); status = 2; return(status); } FileInfo fiConfig = new FileInfo(configPath); if (!fiConfig.Exists) { Console.WriteLine("Config file does not exist: " + configPath); status = 2; return(status); } DirectoryInfo diOP = new DirectoryInfo(outputDir); if (!diOP.Exists) { Console.WriteLine("Output directory does not exist: " + outputDir); status = 2; return(status); } //INIT SETTINGS AnalysisSettings analysisSettings = new AnalysisSettings(); analysisSettings.ConfigFile = fiConfig; analysisSettings.AnalysisRunDirectory = diOP; analysisSettings.AudioFile = null; analysisSettings.EventsFile = null; analysisSettings.IndicesFile = null; analysisSettings.ImageFile = null; TimeSpan tsStart = new TimeSpan(0, 0, 0); TimeSpan tsDuration = new TimeSpan(0, 0, 0); var configuration = new ConfigDictionary(analysisSettings.ConfigFile.FullName); analysisSettings.ConfigDict = configuration.GetTable(); //PROCESS REMAINDER OF THE OPTIONAL COMMAND LINE ARGUMENTS for (int i = 3; i < args.Length; i++) { string[] parts = args[i].Split(':'); if (parts[0].StartsWith("-tmpwav")) { var outputWavPath = Path.Combine(outputDir, parts[1]); analysisSettings.AudioFile = new FileInfo(outputWavPath); } else if (parts[0].StartsWith("-events")) { string eventsPath = Path.Combine(outputDir, parts[1]); analysisSettings.EventsFile = new FileInfo(eventsPath); } else if (parts[0].StartsWith("-indices")) { string indicesPath = Path.Combine(outputDir, parts[1]); analysisSettings.IndicesFile = new FileInfo(indicesPath); } else if (parts[0].StartsWith("-sgram")) { string sonoImagePath = Path.Combine(outputDir, parts[1]); analysisSettings.ImageFile = new FileInfo(sonoImagePath); } else if (parts[0].StartsWith("-start")) { int s = Int32.Parse(parts[1]); tsStart = new TimeSpan(0, 0, s); } else if (parts[0].StartsWith("-duration")) { int s = Int32.Parse(parts[1]); tsDuration = new TimeSpan(0, 0, s); if (tsDuration.TotalMinutes > 10) { Console.WriteLine("Segment duration cannot exceed 10 minutes."); status = 3; return(status); } } } //EXTRACT THE REQUIRED RECORDING SEGMENT FileInfo tempF = analysisSettings.AudioFile; if (tsDuration.TotalSeconds == 0) //Process entire file { AudioFilePreparer.PrepareFile(fiSource, tempF, new AudioUtilityRequest { SampleRate = LSKiwiHelper.RESAMPLE_RATE }); //var fiSegment = AudioFilePreparer.PrepareFile(diOutputDir, fiSourceFile, , Human2.RESAMPLE_RATE); } else { AudioFilePreparer.PrepareFile(fiSource, tempF, new AudioUtilityRequest { SampleRate = LSKiwiHelper.RESAMPLE_RATE, OffsetStart = tsStart, OffsetEnd = tsStart.Add(tsDuration) }); //var fiSegmentOfSourceFile = AudioFilePreparer.PrepareFile(diOutputDir, new FileInfo(recordingPath), MediaTypes.MediaTypeWav, TimeSpan.FromMinutes(2), TimeSpan.FromMinutes(3), RESAMPLE_RATE); } //DO THE ANALYSIS //############################################################################################################################################# IAnalyser analyser = new LSKiwi2(); AnalysisResult result = analyser.Analyse(analysisSettings); DataTable dt = result.Data; //############################################################################################################################################# //ADD IN ADDITIONAL INFO TO RESULTS TABLE if (dt != null) { AddContext2Table(dt, tsStart, result.AudioDuration); CsvTools.DataTable2CSV(dt, analysisSettings.EventsFile.FullName); //DataTableTools.WriteTable(augmentedTable); } else { return(-993); //error!! } return(status); }
public AnalysisResult Analyse(AnalysisSettings analysisSettings) { //var configuration = new ConfigDictionary(analysisSettings.ConfigFile.FullName); //Dictionary<string, string> configDict = configuration.GetTable(); var fiAudioF = analysisSettings.AudioFile; var diOutputDir = analysisSettings.AnalysisRunDirectory; var analysisResults = new AnalysisResult(); analysisResults.AnalysisIdentifier = this.Identifier; analysisResults.SettingsUsed = analysisSettings; analysisResults.Data = null; //###################################################################### var results = Analysis(fiAudioF, analysisSettings.ConfigDict); //###################################################################### if (results == null) { return(analysisResults); //nothing to process } var sonogram = results.Item1; var hits = results.Item2; var scores = results.Item3; var predictedEvents = results.Item4; var recordingTimeSpan = results.Item5; analysisResults.AudioDuration = recordingTimeSpan; DataTable dataTableOfEvents = null; if ((predictedEvents != null) && (predictedEvents.Count != 0)) { string analysisName = analysisSettings.ConfigDict[AudioAnalysisTools.Keys.ANALYSIS_NAME]; string fName = Path.GetFileNameWithoutExtension(fiAudioF.Name); foreach (AcousticEvent ev in predictedEvents) { ev.SourceFileName = fName; //ev.Name = analysisName; //name was added previously ev.SourceFileDuration = recordingTimeSpan.TotalSeconds; } //write events to a data table to return. dataTableOfEvents = WriteEvents2DataTable(predictedEvents); string sortString = AudioAnalysisTools.Keys.EVENT_START_SEC + " ASC"; dataTableOfEvents = DataTableTools.SortTable(dataTableOfEvents, sortString); //sort by start time before returning } if ((analysisSettings.EventsFile != null) && (dataTableOfEvents != null)) { CsvTools.DataTable2CSV(dataTableOfEvents, analysisSettings.EventsFile.FullName); } if ((analysisSettings.IndicesFile != null) && (dataTableOfEvents != null)) { double scoreThreshold = 0.1; TimeSpan unitTime = TimeSpan.FromSeconds(60); //index for each time span of i minute var indicesDT = ConvertEvents2Indices(dataTableOfEvents, unitTime, recordingTimeSpan, scoreThreshold); CsvTools.DataTable2CSV(indicesDT, analysisSettings.IndicesFile.FullName); } //save image of sonograms if ((sonogram != null) && (analysisSettings.ImageFile != null)) { var fileExists = File.Exists(analysisSettings.ImageFile.FullName); string imagePath = analysisSettings.ImageFile.FullName; double eventThreshold = 0.1; Image image = DrawSonogram(sonogram, hits, scores, predictedEvents, eventThreshold); //image.Save(imagePath, ImageFormat.Png); lock (imageWriteLock) { //try //{ image.Save(analysisSettings.ImageFile.FullName, ImageFormat.Png); //} //catch (Exception ex) //{ //} } } analysisResults.Data = dataTableOfEvents; analysisResults.ImageFile = analysisSettings.ImageFile; analysisResults.AudioDuration = recordingTimeSpan; //result.DisplayItems = { { 0, "example" }, { 1, "example 2" }, } //result.OutputFiles = { { "exmaple file key", new FileInfo("Where's that file?") } } return(analysisResults); } //Analyse()
public async Task <IActionResult> UploadCSV() { string errorMessage = null; var addresses = new List <AddressCSV>(); var lines = new List <string[]>(); IFormFile fileAddrCSV; IFormFile fileTimeCSV; string[] header = null; DaysCsv[] daysCsv; PropertyInfo[] props; List <CategoryCSV> csvCategories = null; List <TagCSV> csvTags = null; List <CityDetailCSV> csvCityDetails = null; CategoryCSV[] processedCsvCatgs = null; TagCSV[] processedCsvTags = null; CityDetailCSV[] processedCsvCityDetails = null; try { fileAddrCSV = Request.Form.Files.FirstOrDefault(l => l.Name == "ADDR_CSV"); fileTimeCSV = Request.Form.Files.FirstOrDefault(l => l.Name == "TIME_CSV"); if (fileAddrCSV == null) { throw new NullReferenceException("ADDR_CSV"); } props = typeof(AddressCSV).GetProperties(); using (var reader = new StreamReader(fileAddrCSV.OpenReadStream(), Encoding.UTF7)) { header = reader.ReadLine().Split(';'); if (header.Length != (props.Length + AddressCSV.ADDITIONAL_PROPERTIES_COUNT_COMPARE)) { errorMessage = $"There are {header.Length} column instead of {props.Length + AddressCSV.ADDITIONAL_PROPERTIES_COUNT_COMPARE}."; } if (string.IsNullOrEmpty(errorMessage)) { string data = ""; string[] item = null; while (reader.Peek() >= 0) { data += reader.ReadLine(); item = data.Split(';'); if (item.Length == header.Length) { lines.Add(item); data = ""; } else { data += "\r\n"; } } } } if (string.IsNullOrEmpty(errorMessage)) { csvCategories = new List <CategoryCSV>(); csvTags = new List <TagCSV>(); csvCityDetails = new List <CityDetailCSV>(); foreach (var item in lines) { var addr = new AddressCSV(); try { addr.AddressUUID = GetColumnValue <string>(false, "Address UUID", item[0]); addr.AddressName = GetColumnValue <string>(true, "Address Name", item[1]); addr.ClientName = GetColumnValue <string>(false, "Client Name", item[2], addr.AddressName); addr.CategoryName = GetColumnValue <string>(true, "Category Name", item[3]); addr.AddressSlug = GetColumnValue <string>(true, "Address Slug", item[4], CsvTools.Slugify(addr.AddressName)); addr.CityName = GetColumnValue <string>(true, "City Name", item[5]); addr.Latitude = GetColumnValue <double>(false, "Latitude", item[6]); addr.Longitude = GetColumnValue <double>(false, "Longitude", item[7]); addr.Passport = GetColumnValue <bool>(false, "Passport", item[8]); addr.RecByFb = GetColumnValue <bool>(false, "Recommended By Facebook", item[9]); addr.DescriptionEN = GetColumnValue <string>(false, "Description EN", item[10]); addr.DescriptionFR = GetColumnValue <string>(false, "Description FR", item[11]); addr.PhysicalAddr1 = GetColumnValue <string>(false, "PhysicalAddr 1", item[12]); addr.PhysicalAddr2 = GetColumnValue <string>(false, "PhysicalAddr 2", item[13]); addr.PhysicalAddr3 = GetColumnValue <string>(false, "PhysicalAddr 3", item[14]); addr.PhysicalAddr4 = GetColumnValue <string>(false, "PhysicalAddr 4", item[15]); addr.PhysicalAddr5 = GetColumnValue <string>(false, "PhysicalAddr 5", item[16]); addr.PhysicalAddr6 = GetColumnValue <string>(false, "PhysicalAddr 6", item[17]); addr.PhoneNumber1 = GetColumnValue <string>(false, "PhoneNumber 1", item[18]); addr.PhoneNumber2 = GetColumnValue <string>(false, "PhoneNumber 2", item[19]); addr.PhoneNumber3 = GetColumnValue <string>(false, "PhoneNumber 3", item[20]); addr.MobileNumber1 = GetColumnValue <string>(false, "MobileNumber 1", item[21]); addr.MobileNumber2 = GetColumnValue <string>(false, "MobileNumber 2", item[22]); addr.MobileNumber3 = GetColumnValue <string>(false, "MobileNumber 3", item[23]); addr.FaxNumber1 = GetColumnValue <string>(false, "Fax Number 1", item[24]); addr.FaxNumber2 = GetColumnValue <string>(false, "Fax Number 2", item[25]); addr.FaxNumber3 = GetColumnValue <string>(false, "Fax Number 3", item[26]); addr.FbPageUrl = GetColumnValue <string>(false, "Facebook Page Url", item[27]); addr.WebsiteUrl1 = GetColumnValue <string>(false, "Website Url 1", item[28]); addr.WebsiteUrl2 = GetColumnValue <string>(false, "Website Url 2", item[29]); addr.WebsiteUrl3 = GetColumnValue <string>(false, "Website Url 3", item[30]); addr.Email1 = GetColumnValue <string>(false, "Email 1", item[31]); addr.Email2 = GetColumnValue <string>(false, "Email 2", item[32]); addr.Email3 = GetColumnValue <string>(false, "Email 3", item[33]); addr.AddressLogoUrl = GetColumnValue <string>(false, "Address Logo Url", item[34]); addr.AddressImgUrl1 = GetColumnValue <string>(false, "Address Imageg Url 1", item[35]); addr.AddressImgUrl2 = GetColumnValue <string>(false, "Address Imageg Url 2", item[36]); addr.AddressImgUrl3 = GetColumnValue <string>(false, "Address Imageg Url 3", item[37]); addr.AddressImgUrl4 = GetColumnValue <string>(false, "Address Imageg Url 4", item[38]); addr.AddressImgUrl5 = GetColumnValue <string>(false, "Address Imageg Url 5", item[39]); addr.AddressImgUrl6 = GetColumnValue <string>(false, "Address Imageg Url 6", item[40]); addr.AddressImgUrl7 = GetColumnValue <string>(false, "Address Imageg Url 7", item[41]); addr.AddressImgUrl8 = GetColumnValue <string>(false, "Address Imageg Url 8", item[42]); addr.AddressImgUrl9 = GetColumnValue <string>(false, "Address Imageg Url 9", item[43]); addr.AddressImgUrl10 = GetColumnValue <string>(false, "Address Imageg Url 10", item[44]); addr.AddressImgUrl11 = GetColumnValue <string>(false, "Address Imageg Url 11", item[45]); addr.AddressImgUrl12 = GetColumnValue <string>(false, "Address Imageg Url 12", item[46]); addr.AddressImgUrl13 = GetColumnValue <string>(false, "Address Imageg Url 13", item[47]); addr.AddressImgUrl14 = GetColumnValue <string>(false, "Address Imageg Url 14", item[48]); addr.AddressImgUrl15 = GetColumnValue <string>(false, "Address Imageg Url 15", item[49]); addr.AddressImgUrl16 = GetColumnValue <string>(false, "Address Imageg Url 16", item[50]); addr.AddressImgUrl17 = GetColumnValue <string>(false, "Address Imageg Url 17", item[51]); addr.AddressImgUrl18 = GetColumnValue <string>(false, "Address Imageg Url 18", item[52]); addr.AddressImgUrl19 = GetColumnValue <string>(false, "Address Imageg Url 19", item[53]); addr.AddressImgUrl20 = GetColumnValue <string>(false, "Address Imageg Url 20", item[54]); addr.AddressDocUrl1 = GetColumnValue <string>(false, "Address Document Url 1", item[55]); addr.AddressDocUrl2 = GetColumnValue <string>(false, "Address Document Url 2", item[56]); addr.AddressDocUrl3 = GetColumnValue <string>(false, "Address Document Url 3", item[57]); addr.AddressDocUrl4 = GetColumnValue <string>(false, "Address Document Url 1", item[58]); addr.AddressDocUrl5 = GetColumnValue <string>(false, "Address Document Url 1", item[59]); addr.AddressDocUrl6 = GetColumnValue <string>(false, "Address Document Url 1", item[60]); addr.AddressDocUrl7 = GetColumnValue <string>(false, "Address Document Url 1", item[61]); addr.AddressDocUrl8 = GetColumnValue <string>(false, "Address Document Url 1", item[62]); addr.AddressDocUrl9 = GetColumnValue <string>(false, "Address Document Url 1", item[63]); addr.AddressDocUrl10 = GetColumnValue <string>(false, "Address Document Url 1", item[64]); addr.AddressVideoUrl1 = GetColumnValue <string>(false, "Address Video Url 1", item[65]); addr.AddressVideoUrl2 = GetColumnValue <string>(false, "Address Video Url 2", item[66]); addr.AddressVideoUrl3 = GetColumnValue <string>(false, "Address Video Url 3", item[67]); addr.AddressVideoUrl4 = GetColumnValue <string>(false, "Address Video Url 4", item[68]); addr.AddressVideoUrl5 = GetColumnValue <string>(false, "Address Video Url 5", item[69]); addr.AddressVideoUrl6 = GetColumnValue <string>(false, "Address Video Url 6", item[70]); addr.AddressVideoUrl7 = GetColumnValue <string>(false, "Address Video Url 7", item[71]); addr.AddressVideoUrl8 = GetColumnValue <string>(false, "Address Video Url 8", item[72]); addr.AddressVideoUrl9 = GetColumnValue <string>(false, "Address Video Url 19", item[73]); addr.AddressVideoUrl10 = GetColumnValue <string>(false, "Address Video Url 10", item[74]); addr.AddressTags = GetColumnValue <string>(false, "Address Tags", item[75]); //if (_env.IsDevelopment() // && string.IsNullOrEmpty(addr.AddressTags)) //{ // addr.AddressTags = addr.CategoryName + "|" + CsvTools.Slugify(addr.AddressName).Replace("-", "|"); //} } catch (Exception ex) { addr = addr == null ? new AddressCSV() : addr; addr.ErrorMessage = $"Error caught for Row {addr.AddressUUID}:\r\n{ex.Message}"; } addresses.Add(addr.AutoCorrectProperties(csvCategories, csvTags, csvCityDetails)); } processedCsvCatgs = await _repo.Execute <CategoryCSV[]>( "ValidateCsvCategories", Helper.JSonCamelSerializeObject( csvCategories.Select(l => l.Simplify()).ToArray())); processedCsvTags = await _repo.Execute <TagCSV[]>( "ValidateCsvTags", Helper.JSonCamelSerializeObject( csvTags.Select(l => l.Simplify()).ToArray())); processedCsvCityDetails = await _repo.Execute <CityDetailCSV[]>( "ValidateCsvCityDetails", Helper.JSonCamelSerializeObject( csvCityDetails.Select(l => l.Simplify()).ToArray())); daysCsv = this.GetCsvDays(fileTimeCSV); if (daysCsv != null && daysCsv.Length > 0) { foreach (var addr in addresses) { addr._DaysCsv = (from d in daysCsv.Where(l => l.AddressUUID == addr.AddressUUID).SelectMany(l => l.Pivot()).OrderBy(l => l.Seqn) group d by d.Name into g select new { name = g.Key, values = g.OrderBy(l => l.HourFrom).ThenBy(l => l.HourTo).Select(l => l.Simplify()).ToArray() }).ToArray(); } } } return(Ok( new { addresses = addresses.ToArray(), error = errorMessage, processedCsvCatgs = processedCsvCatgs == null ? null : processedCsvCatgs.Select(l => l.Simplify()).ToArray(), processedCsvTags = processedCsvTags == null ? null : processedCsvTags.Select(l => l.Simplify()).ToArray(), processedCsvCityDetails = processedCsvCityDetails == null ? null : processedCsvCityDetails.Select(l => l.Simplify()).ToArray() })); } catch (Exception ex) { return(BadRequestEx(ex)); } finally { errorMessage = null; fileAddrCSV = null; fileTimeCSV = null; header = null; props = null; daysCsv = null; if (csvCategories != null) { csvCategories.Clear(); csvCategories = null; } if (csvTags != null) { csvTags.Clear(); csvTags = null; } if (csvCityDetails != null) { csvCityDetails.Clear(); csvCityDetails = null; } if (addresses != null) { addresses.Clear(); addresses = null; } if (lines != null) { lines.Clear(); lines = null; } } }
public Tuple <DataTable, DataTable> ProcessCsvFile(FileInfo fiCsvFile, FileInfo fiConfigFile) { DataTable dt = CsvTools.ReadCSVToTable(fiCsvFile.FullName, true); //get original data table if ((dt == null) || (dt.Rows.Count == 0)) { return(null); } //get its column headers var dtHeaders = new List <string>(); var dtTypes = new List <Type>(); foreach (DataColumn col in dt.Columns) { dtHeaders.Add(col.ColumnName); dtTypes.Add(col.DataType); } List <string> displayHeaders = null; //check if config file contains list of display headers if (fiConfigFile != null) { var configuration = new ConfigDictionary(fiConfigFile.FullName); Dictionary <string, string> configDict = configuration.GetTable(); if (configDict.ContainsKey(AnalysisKeys.DisplayColumns)) { displayHeaders = configDict[AnalysisKeys.DisplayColumns].Split(',').ToList(); } } //if config file does not exist or does not contain display headers then use the original headers if (displayHeaders == null) { displayHeaders = dtHeaders; //use existing headers if user supplies none. } //now determine how to display tracks in display datatable Type[] displayTypes = new Type[displayHeaders.Count]; bool[] canDisplay = new bool[displayHeaders.Count]; for (int i = 0; i < displayTypes.Length; i++) { displayTypes[i] = typeof(double); canDisplay[i] = false; if (dtHeaders.Contains(displayHeaders[i])) { canDisplay[i] = true; } } DataTable table2Display = DataTableTools.CreateTable(displayHeaders.ToArray(), displayTypes); foreach (DataRow row in dt.Rows) { DataRow newRow = table2Display.NewRow(); for (int i = 0; i < canDisplay.Length; i++) { if (canDisplay[i]) { newRow[displayHeaders[i]] = row[displayHeaders[i]]; } else { newRow[displayHeaders[i]] = 0.0; } } table2Display.Rows.Add(newRow); } //order the table if possible if (dt.Columns.Contains(AnalysisKeys.EventStartAbs)) { dt = DataTableTools.SortTable(dt, AnalysisKeys.EventStartAbs + " ASC"); } else if (dt.Columns.Contains(AnalysisKeys.EventCount)) { dt = DataTableTools.SortTable(dt, AnalysisKeys.EventCount + " ASC"); } else if (dt.Columns.Contains(AnalysisKeys.KeyRankOrder)) { dt = DataTableTools.SortTable(dt, AnalysisKeys.KeyRankOrder + " ASC"); } else if (dt.Columns.Contains(AnalysisKeys.KeyStartMinute)) { dt = DataTableTools.SortTable(dt, AnalysisKeys.KeyStartMinute + " ASC"); } //this depracted now that use class indexProperties to do normalisation //table2Display = NormaliseColumnsOfDataTable(table2Display); //add in column of weighted indices //bool addColumnOfweightedIndices = true; //if (addColumnOfweightedIndices) //{ // double[] comboWts = IndexCalculate.CalculateComboWeights(); // double[] weightedIndices = IndexCalculate.GetArrayOfWeightedAcousticIndices(dt, comboWts); // string colName = "WeightedIndex"; // DataTableTools.AddColumnOfDoubles2Table(table2Display, colName, weightedIndices); //} return(Tuple.Create(dt, table2Display)); } // ProcessCsvFile()