public static Csv CarData() { Csv expectedTable = new Csv(); //List<List<string>> expectedTable = new List<List<string>>(); expectedTable.Header.Add("cars"); expectedTable.Header.Add("hhsize"); List<string> list = new List<string>(); list.Add("1"); list.Add("1"); expectedTable.Records.Add(list); list = new List<string>(); list.Add("2"); list.Add("2"); expectedTable.Records.Add(list); list = new List<string>(); list.Add("2"); list.Add("3"); expectedTable.Records.Add(list); list = new List<string>(); list.Add("2"); list.Add("4"); expectedTable.Records.Add(list); list = new List<string>(); list.Add("3"); list.Add("5"); expectedTable.Records.Add(list); return expectedTable; }
static void Main(string[] args) { String csvContents = LoadCsvContents(); Csv csv = new Csv(csvContents); while(csv.HasMoreRows) { dynamic row = csv.GetNextRow(); Console.WriteLine("Id: {0}, Name: {1}, State: {2}, DOB: {3}", row.Id, row["Name"], row["State"], row.DateOfBirth); } csv.Reset(); while(csv.HasMoreRows) { dynamic row = csv.GetNextRow(); String[] values = row; Console.WriteLine(values.PrettyPrint()); } Console.ReadLine(); }
static void Main(string[] args) { Csv csv = new Csv(); Translate translate = new Translate(); //var csvDataSet = csv.ReadCsv(trainPath); //var translatedText = translate.TranslateDataSet(csvDataSet); //var resultShuffle = csv.GetShuffleSymptoms(csvDataSet); //csv.Write(@"C:\Users\Dima\Desktop\TrainDataSet4.csv", resultShuffle); IDataView dataSet = NeuralNetwork.LoadData(trainPath); IEstimator <ITransformer> pipeLine = NeuralNetwork.ProcessData(); NeuralNetwork.BuildAndTrainModel(dataSet, pipeLine, 0.3); NeuralNetwork.Evaluate(dataSet.Schema); Diseases dis = new Diseases() { Sym1 = translate.TranslateText("жгучая боль во время мочеиспускания", "ru-en"), Sym2 = translate.TranslateText("учащенное сердцебиение", "ru-en"), Sym3 = translate.TranslateText("повышение артериального давления", "ru-en"), Sym4 = translate.TranslateText("повышенное давление", "ru-en"), Sym5 = translate.TranslateText("Обезвоживание", "ru-en"), Sym6 = translate.TranslateText("Лихорадка", "ru-en"), Sex = translate.TranslateText("мужской", "ru-en"), Age = 3 }; Dictionary <string, float> result = NeuralNetwork.PredictDisease(dis); foreach (KeyValuePair <string, float> item in result) { Console.WriteLine($"Disease: {translate.TranslateText(item.Key)} Score: {item.Value * 100}%"); } }
public void CalculTauxProcedure() { var dt = new DataTable("choice"); dt.Columns.Add("Facteur", typeof(double)); dt.Columns.Add("TauxProces", typeof(double)); dt.Columns.Add("Type", typeof(string)); foreach (var m in multipliersTaux) { int count = 0, countSans = 0, countAvec = 0, countDemi = 0; for (int i = 1; i <= N; i++) { count++; var l = new Litige(m); if (l.decidePremiereInstance()) { countAvec++; } if (l.decidePremiereInstanceSansAvocat()) { countSans++; } if (l.decidePremiereInstanceDemiAvance()) { countDemi++; } } System.Console.WriteLine("Finished set {0} ; total {1} simulations", m, count); dt.Rows.Add(m, ((double)countSans) / N, "CPC sans avocat"); dt.Rows.Add(m, ((double)countAvec) / N, "CPC avec avocat"); dt.Rows.Add(m, ((double)countDemi) / N, "P-CPC avec avocat"); } var foTaux = new FileInfo(Path.Combine(doBase.FullName, "results-taux.csv")); Csv.Write(dt, foTaux); }
public void TabComplexCsvAlsoHandlesNaiveCsvTest() { // Arrange var headers = new List <string> { "H1", "H2", "H3" }; var lines = new List <string>() { "H1\tH2\tH3", "A\tB\tC", "D\tE\tF", "G\tH\tI", "J\tK\tL" }; var rows = new List <List <string> >() { new List <string> { "A", "B", "C" }, new List <string> { "D", "E", "F" }, new List <string> { "G", "H", "I" }, new List <string> { "J", "K", "L" } }; // Act var csv = new Csv(@"Data\TabNaive.csv", true, '\t'); // Assert CollectionAssert.AreEqual(headers, csv.Headers); var i = 0; foreach (var row in csv.Rows) { CollectionAssert.AreEqual(rows[i++], row); } }
public void TestTimeSpanRoundTrip() { int randomDataCount = 30; var random = TestHelpers.Random.GetRandom(); // we'll create 30 timespans, across 30 orders of magnitude to test all possible versions of timespan // encoding var data = new CsvTestClass[randomDataCount]; for (int i = 0; i < data.Length; i++) { var ticks = Math.Pow(i, 10) * random.NextDouble(); data[i] = new CsvTestClass { SomeNumber = random.Next(), SomeTimeSpan = TimeSpan.FromTicks((long)ticks), }; } var file = new FileInfo("testCsvRoundTrip.csv"); Csv.WriteToCsv(file, data); var actual = Csv.ReadFromCsv <CsvTestClass>(file).ToArray(); Assert.AreEqual(30, actual.Length); for (var i = 0; i < data.Length; i++) { var expectedRow = data[i]; var actualRow = actual[i]; Assert.AreEqual(expectedRow.SomeNumber, actualRow.SomeNumber); Assert.AreEqual(expectedRow.SomeTimeSpan.Ticks, actualRow.SomeTimeSpan.Ticks); } file.Delete(); }
public void ComplexCsvFromStreamTest() { // Arrange var headers = new List <string> { "H1", "H2", "H3" }; var rows = new List <List <string> >() { new List <string> { string.Format("{0}{3}{1}{3}{2}", "A", "B", "C", NewLine.Windows()), string.Format("{0}{3}{1}{3}{2}", "D", "E", "F", NewLine.Windows()), string.Format("{0}{3}{1}{3}{2}", "G", "H", "I", NewLine.Windows()), }, new List <string> { string.Format("{0}{3}{1}{3}{2}", "J", "K", "L", NewLine.Windows()), string.Format("{0}{3}{1}{3}{2}", "M", "N", "O", NewLine.Windows()), string.Format("{0}{3}{1}{3}{2}", "P", "Q", "R", NewLine.Windows()), } }; // Act var csvAsString = "H1,H2,H3\r\n\"A\r\nB\r\nC\",\"D\r\nE\r\nF\",\"G\r\nH\r\nI\"\r\n\"J\r\nK\r\nL\",\"M\r\nN\r\nO\",\"P\r\nQ\r\nR\"\r\n"; var csv = new Csv(csvAsString.AsStream()); // Assert CollectionAssert.AreEqual(headers, csv.Headers); Assert.AreEqual(rows.Count, csv.Rows.Count); var i = 0; foreach (var row in csv.Rows) { CollectionAssert.AreEqual(rows[i++], row); } }
public static int Write <T>(T obj, FileType?outputType = FileType.CSV) { var queue = ObjectProcessing.ObjectQueue; queue.Enqueue(obj); int count = 0; while (queue.Count > 0) { count++; var(Name, Body) = ObjectProcessing.GetInformation(queue.Dequeue()); switch (outputType) { case FileType.CSV: Csv.WriteFile(Name, Body); break; case FileType.Excel: Excel.WriteFile(Name, Body); break; case FileType.Json: Json.WriteFile(Name, Body); break; case FileType.XML: Xml.WriteFile(Name, Body); break; default: break; } ; } return(count); }
public override List <FileInfo> WriteSpectrumIndicesFiles(DirectoryInfo destination, string fileNameBase, IEnumerable <SpectralIndexBase> results) { //get selectors and removed unwanted because these indices were never calculated. var spectralIndexBases = results.ToList(); var selectors = spectralIndexBases.First().GetSelectors(); // TODO: REMOVE unused index filter with new Spectral Indices child class foreach (var indexName in ContentSignatures.UnusedIndexNames) { selectors.Remove(indexName); } var spectralIndexFiles = new List <FileInfo>(selectors.Count); foreach (var kvp in selectors) { // write spectrogram to disk as CSV file var filename = FilenameHelpers.AnalysisResultPath(destination, fileNameBase, TowseyContentDescription + "." + kvp.Key, "csv").ToFileInfo(); spectralIndexFiles.Add(filename); Csv.WriteMatrixToCsv(filename, spectralIndexBases, kvp.Value); } return(spectralIndexFiles); }
public void TestParseWithNoColumnSpecAndNoHeader() { var csv = @"1997;Ford;E350;2,34 2000;Mercury;Cougar;2,38"; var result = Csv.Parse(new ParseInput() { ColumnSpecifications = new ColumnSpecification[0], Delimiter = ";", Csv = csv }, new ParseOption() { ContainsHeaderRow = false, CultureInfo = "fi-FI" }); var resultJArray = result.ToJson() as JArray; var resultXml = result.ToXml(); var resultData = result.Data; Assert.That(resultData.Count, Is.EqualTo(2)); Assert.That(resultJArray.Count, Is.EqualTo(2)); Assert.That(resultXml, Does.Contain("<0>2000</0>")); Assert.That(resultJArray[0]["3"].Value <string>(), Is.EqualTo("2,34")); }
public static IDictionary <TKey, TValue> LoadDictionary <TKey, TValue>(string file, TypeConverter keyTypeConverter, TypeConverter valueTypeConverter) { IDictionary <TKey, TValue> dict = new Dictionary <TKey, TValue>(); if (!File.Exists(file)) { return(dict); } var sr = new StreamReader(file); while (!sr.EndOfStream) { var fields = Csv.RecordSplit(sr.ReadLine(), ',', '"'); TKey key = (TKey)keyTypeConverter.ConvertFromString(fields[0]); TValue value = (TValue)valueTypeConverter.ConvertFromString(fields[1]); dict.Add(key, value); } sr.Close(); return(dict); }
public void TestWriteReadSpectrogram() { var random = TestHelpers.Random.GetRandom(); var testSpectra = random.NextMatrix(100, 50); var testFile = this.outputDirectory.CombineFile("test.matrix.csv"); Csv.WriteMatrixToCsv(testFile, testSpectra); var matrix = IndexMatrices.ReadSpectrogram(testFile, out var binCount); Assert.AreEqual(100, matrix.GetLength(0)); Assert.AreEqual(50, matrix.GetLength(1)); Assert.AreEqual(50, binCount); var actualEnumerator = matrix.GetEnumerator(); foreach (var expected in testSpectra) { actualEnumerator.MoveNext(); Assert.AreEqual(expected, (double)actualEnumerator.Current, 1E-14, $"delta: {expected - (double)actualEnumerator.Current}"); } }
private void OpenCSVFile(string scvFileName) { Csv csv = new Csv(); csv.FileOpen(scvFileName); // структура csv файла должна быть строгой: дата, номер, плановое значение, актуальное значение // данные начинаются с первого столбца, первой строки таблицы // разделители для данных должны быть стандартные: между ячейками ',' между строками '"' (OpenOffice предлагает их по-умолчанию для сохранения файла csv) for (int i = 0; i < csv.Rows.Count; ++i) { var csvRow = csv.Rows[i]; if (csvRow.Count >= 4) { SourceDataRow dataRow = new SourceDataRow(); if (!string.IsNullOrWhiteSpace(csvRow[0])) { dataRow.RegDate = DateTime.Parse(csvRow[0]); } if (!string.IsNullOrWhiteSpace(csvRow[1])) { dataRow.Number = int.Parse(csvRow[1]); } if (!string.IsNullOrWhiteSpace(csvRow[2])) { dataRow.PlanValue = int.Parse(csvRow[2]); } if (!string.IsNullOrWhiteSpace(csvRow[3])) { dataRow.ActualValue = int.Parse(csvRow[3]); } sourceDataTable.Add(dataRow); } } }
//[TestMethod] private void TestPrintAndReload(params ITestCollection[] collections) { Csv csv = new Csv(testCsvPath); foreach (var col in collections) { foreach (var ele in col) { csv.SaveObject(ele, collectionId: col.ID); } } csv.Save(); Csv load = new Csv(testCsvPath); load.Load(); List <string> errors = new List <string>(); foreach (ITestCollection col in collections) { var length = col.Count; var loadedList = new List <object>(); for (int i = 0; i < length; ++i) { var originalObj = col[i]; var loadedObj = load.GetObject(col.Type, i, col.ID); loadedList.Add(loadedObj); if (!originalObj.Equals(loadedObj)) { errors.Add($"Error deserializing element '{i}' with collection '{col.ID}'"); } } } Assert.IsTrue(errors.Count == 0, $"One or more reloads failed: {string.Join(",", errors)}"); }
public CsvResult DownloadCompleted(Csv csv) { return(new CsvResult { Data = csv }); }
public void TestParseWillAllKindOfDataTypes() { var csv = @"THIS;is;header;row;with;some;random;stuff ;yes 1997;""Fo;rd"";2,34;true;1;4294967296;f;2008-09-15;2008-05-01 7:34:42Z 2000;Mercury;2,38;false;0;4294967296;g;2009-09-15T06:30:41.7752486;Thu, 01 May 2008 07:34:42 GMT"; var result = Csv.Parse(new ParseInput() { ColumnSpecifications = new[] { new ColumnSpecification() { Name = "Int", Type = ColumnType.Int }, new ColumnSpecification() { Name = "String", Type = ColumnType.String }, new ColumnSpecification() { Name = "Decimal", Type = ColumnType.Decimal }, new ColumnSpecification() { Name = "Bool", Type = ColumnType.Boolean }, new ColumnSpecification() { Name = "Bool2", Type = ColumnType.Boolean }, new ColumnSpecification() { Name = "Long", Type = ColumnType.Long }, new ColumnSpecification() { Name = "Char", Type = ColumnType.Char }, new ColumnSpecification() { Name = "DateTime", Type = ColumnType.DateTime }, new ColumnSpecification() { Name = "DateTime2", Type = ColumnType.DateTime }, }, Delimiter = ";", Csv = csv }, new ParseOption() { ContainsHeaderRow = true, CultureInfo = "fi-FI" }); var resultJson = (JArray)result.ToJson(); Assert.That(resultJson[0]["Long"].Value <long>(), Is.EqualTo(4294967296)); var resultXml = result.ToXml(); Assert.That(resultXml, Does.Contain("<DateTime2>1.5.2008 10.34.42</DateTime2>")); var resultData = result.Data; var itemArray = resultData[0]; Assert.That(itemArray[0].GetType(), Is.EqualTo(typeof(int))); Assert.That(itemArray[0], Is.EqualTo(1997)); Assert.That(itemArray[1].GetType(), Is.EqualTo(typeof(string))); Assert.That(itemArray[1], Is.EqualTo("Fo;rd")); Assert.That(itemArray[2].GetType(), Is.EqualTo(typeof(decimal))); Assert.That(itemArray[2], Is.EqualTo(2.34d)); Assert.That(itemArray[3].GetType(), Is.EqualTo(typeof(bool))); Assert.That(itemArray[3], Is.EqualTo(true)); Assert.That(itemArray[4].GetType(), Is.EqualTo(typeof(bool))); Assert.That(itemArray[4], Is.EqualTo(true)); Assert.That(itemArray[5].GetType(), Is.EqualTo(typeof(long))); Assert.That(itemArray[5], Is.EqualTo(4294967296)); Assert.That(itemArray[6].GetType(), Is.EqualTo(typeof(char))); Assert.That(itemArray[6], Is.EqualTo('f')); Assert.That(itemArray[7].GetType(), Is.EqualTo(typeof(DateTime))); Assert.That(itemArray[7], Is.EqualTo(new DateTime(2008, 9, 15))); Assert.That(itemArray[8].GetType(), Is.EqualTo(typeof(DateTime))); Assert.That(itemArray[8], Is.EqualTo(new DateTime(2008, 5, 1, 10, 34, 42))); }
private string rotaCidadeExportar() { DataTable dtRotaCidade; string msg = ""; string qry; janela.MsgAppend("Exportando tabela de rotas para as cidades"); qry = @"select * from VW_CIDADES_POCKET"; dtRotaCidade = D.Bd.DataTablePreenche(qry); lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + rotaCidadeArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + rotaCidadeArquivoEnviarNome); csv.EscreveCsv(dtRotaCidade, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + rotaCidadeArquivoEnviarNome); return msg; }
private string clientesCadastradosExportar() { DataTable dtClientesCadastrados; string msg = ""; string qry; janela.MsgAppend("Exportando tabela de CNPJs e CPFs"); qry = @"select distinct CGC_CPF as CPNJ_CPF from cliente"; dtClientesCadastrados = D.Bd.DataTablePreenche(qry); lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + clientesCadastradosArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + clientesCadastradosArquivoEnviarNome); csv.EscreveCsv(dtClientesCadastrados, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + clientesCadastradosArquivoEnviarNome); return msg; }
private string funcionarioTabelaExportar() { DataTable dt; string msg = ""; string qry; janela.MsgAppend("Exportando tabela de vendedores "); qry = @"SELECT CODIGO, NOME, DESCONTO_MAXIMO, ACRESCIMO_MAXIMO FROM FUNCIONARIO WHERE PARTICIPA_FORCA_VENDA ='1'"; dt = D.Bd.DataTablePreenche(qry); lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + funcionarioArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + funcionarioArquivoEnviarNome); csv.EscreveCsv(dt, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + funcionarioArquivoEnviarNome); return msg; }
private string tabelaPrecoExportar() { DataTable dtTabelaPreco; string msg = ""; string qry; janela.MsgAppend("Exportando tabela de preço " ); qry = @"select CODIGO,DESCRICAO,PERCENTUALAJUSTE,TIPOAJUSTE from TABELAPRECO"; dtTabelaPreco = D.Bd.DataTablePreenche(qry); lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + tabelaPrecoArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + tabelaPrecoArquivoEnviarNome); csv.EscreveCsv(dtTabelaPreco, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + tabelaPrecoArquivoEnviarNome); return msg; }
private string tabelaTabelaPrecoFormaPagamentoExportar() { DataTable dt; string msg = ""; string qry; janela.MsgAppend("Exportando as formas de pagamento das tabelas de preço"); qry = @" SELECT COD_TABELAPRECO, COD_FORMAPAGAMENTO FROM FORMAPAGAMENTOTABELAPRECO"; dt = D.Bd.DataTablePreenche(qry); lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + tabelaPrecoFormaPagamentoArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + tabelaPrecoFormaPagamentoArquivoEnviarNome); csv.EscreveCsv(dt, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + tabelaPrecoFormaPagamentoArquivoEnviarNome); return msg; }
private bool TryGetValue(Csv table, string valueA, string valueB, out string value) { int aIndex = table.ColumnIndex(CategoryAName); if (aIndex <= 0) { throw new Exception(); } int bIndex = table.ColumnIndex(CategoryBName); if (bIndex <= 0) { throw new Exception(); } for (int recordIndex = 0; recordIndex < table.Records.Count; recordIndex++) { List<string> record = table.Records[recordIndex]; string recordValueA = record[aIndex]; string recordValueB = record[bIndex]; if (recordValueA == valueA && recordValueB == valueB) { int valueIndex = table.ColumnIndex(ValueVariableName); if (valueIndex <= 0) { throw new Exception(); } value = record[valueIndex]; return true; } } value = null; return false; }
private string tabelaMotivoExportar() { DataTable dt; string msg = ""; string qry; janela.MsgAppend( "Exportando tabela de motivos de recusa de pedidos " ); qry = @"SELECT CODIGO, DESCRICAO FROM MOTIVONAOCOMPRA"; dt = D.Bd.DataTablePreenche(qry); lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + motivoArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + motivoArquivoEnviarNome); csv.EscreveCsv(dt, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + motivoArquivoEnviarNome); return msg; }
public void Test() { /// So this thing takes some data with a value column and one or more factor labels /// /// each label is a categorical value and as a set of possible values /// the function combines all permutations of factors and fills in missing values /// const string FillValue = "-1"; List<string> categoryAValues = new List<string>(new string[] { "A", "B", "C", "D" }); List<string> categoryBValues = new List<string>(new string[] { "1", "2", "3", }); Csv table = new Csv(); table.Header.Add(CategoryAName); table.Header.Add(CategoryBName); table.Header.Add(ValueVariableName); table.Records.Add(new List<string>(new string[] { categoryAValues[0], categoryBValues[0], "1", })); table.Records.Add(new List<string>(new string[] { categoryAValues[1], categoryBValues[1], "2", })); table.Records.Add(new List<string>(new string[] { categoryAValues[2], categoryBValues[2], "3", })); table.Records.Add(new List<string>(new string[] { categoryAValues[3], categoryBValues[0], "4", })); string inputFileName= FileFunctions.TempoaryOutputFileName(".csv"); CsvFunctions.Write(inputFileName, table); string outputFileName = FileFunctions.TempoaryOutputFileName(".csv"); List<Tuple<string, List<string>>> categories = new List<Tuple<string, List<string>>>(); categories.Add(new Tuple<string, List<string>>(CategoryAName, categoryAValues)); categories.Add(new Tuple<string, List<string>>(CategoryBName, categoryBValues)); AdapterFunctions.FillCategoryTimeSeries(inputFileName, outputFileName, categories, ValueVariableName, FillValue); Csv newTable = CsvFunctions.Read(outputFileName); if (false || newTable.Header.Count != 3 || !newTable.Header.Contains(ValueVariableName) || !newTable.Header.Contains(CategoryAName) || !newTable.Header.Contains(CategoryBName) || newTable.Records.Count != categoryAValues.Count * categoryBValues.Count ) { throw new Exception(); } for (int categoryAIndex = 0; categoryAIndex < categoryAValues.Count; categoryAIndex++) { string valueA = categoryAValues[categoryAIndex]; for (int categoryBIndex = 0; categoryBIndex < categoryBValues.Count; categoryBIndex++) { string valueB = categoryBValues[categoryBIndex]; string newValue; if (!TryGetValue(newTable, valueA, valueB, out newValue)) { throw new Exception(); } string value; if (!TryGetValue(table, valueA, valueB, out value)) { if (newValue != FillValue) { throw new Exception(); } } else { if (newValue != value) { throw new Exception(); } } } } }
public static async void Initialize() { Logger = new Logger(); Logger.Log( $"Starting [{DateTime.Now.ToLongTimeString()} - {ServerUtils.GetOsName()}]...", null); Configuration = new Configuration(); Configuration.Initialize(); Fingerprint = new Fingerprint(); //Levels = new Levels(); Csv = new Csv(); //PlayerDb = new PlayerDb(); //AllianceDb = new AllianceDb(); /*for (int i = 0; i <= await PlayerDb.CountAsync() + 1; i++) * { * await PlayerDb.DeleteAsync(i); * }*/ //PlayerDb = new PlayerDb(); /*Logger.Log( * $"Successfully loaded MySql with {await PlayerDb.CountAsync()} player(s)", * null);*/ ObjectCache = new ObjectCache(); //Players = new Players(); //Alliances = new Alliances(); //Leaderboard = new Leaderboard(); StartTime = DateTime.UtcNow; Netty = new NettyService(); if (Configuration.Name == "") { Logger.Log("The name must not be empty.", null, Logger.ErrorLevel.Warning); Program.Shutdown(); } else { Name = Configuration.Name; } Map = 7; Brawler = 0; Room = 0; Trophies = Configuration.Trophies; Skin = 0; Region = Configuration.Region; RoomID = 0; Box = 3; Tickets = 99999; Gold = 99999; Gems = 99999; Skin = 0; ProfileIcon = 0; NameColor = 0; ChatMessage = string.Empty; MessageTick = 0; StarPower = 76; Gadget = 255; UseGadget = true; await Task.Run(Netty.RunServerAsync); }
public CsvTest() { _csv = new Csv(); }
/// <summary> /// Do your analysis. This method is called once per segment (typically one-minute segments). /// </summary> /// <param name="audioRecording"></param> /// <param name="configuration"></param> /// <param name="segmentStartOffset"></param> /// <param name="getSpectralIndexes"></param> /// <param name="outputDirectory"></param> /// <param name="imageWidth"></param> /// <returns></returns> public override RecognizerResults Recognize(AudioRecording audioRecording, Config configuration, TimeSpan segmentStartOffset, Lazy <IndexCalculateResult[]> getSpectralIndexes, DirectoryInfo outputDirectory, int?imageWidth) { const double minAmplitudeThreshold = 0.1; const int percentile = 5; const double scoreThreshold = 0.3; const bool doFiltering = true; const int windowWidth = 1024; const int signalBuffer = windowWidth * 2; //string path = @"C:\SensorNetworks\WavFiles\Freshwater\savedfortest.wav"; //audioRecording.Save(path); // this does not work int sr = audioRecording.SampleRate; int nyquist = audioRecording.Nyquist; // Get a value from the config file - with a backup default //int minHz = (int?)configuration[AnalysisKeys.MinHz] ?? 600; // Get a value from the config file - with no default, throw an exception if value is not present //int maxHz = ((int?)configuration[AnalysisKeys.MaxHz]).Value; // Get a value from the config file - without a string accessor, as a double //double someExampleSettingA = (double?)configuration.someExampleSettingA ?? 0.0; // common properties //string speciesName = (string)configuration[AnalysisKeys.SpeciesName] ?? "<no species>"; //string abbreviatedSpeciesName = (string)configuration[AnalysisKeys.AbbreviatedSpeciesName] ?? "<no.sp>"; // min score for an acceptable event double eventThreshold = (double)configuration.GetDoubleOrNull(AnalysisKeys.EventThreshold); // get samples var samples = audioRecording.WavReader.Samples; double[] bandPassFilteredSignal = null; if (doFiltering) { // high pass filter int windowLength = 71; double[] highPassFilteredSignal; DSP_IIRFilter.ApplyMovingAvHighPassFilter(samples, windowLength, out highPassFilteredSignal); //DSP_IIRFilter filter2 = new DSP_IIRFilter("Chebyshev_Highpass_400"); //int order2 = filter2.order; //filter2.ApplyIIRFilter(samples, out highPassFilteredSignal); // Amplify 40dB and clip to +/-1.0; double factor = 100; // equiv to 20dB highPassFilteredSignal = DspFilters.AmplifyAndClip(highPassFilteredSignal, factor); //low pass filter string filterName = "Chebyshev_Lowpass_5000, scale*5"; DSP_IIRFilter filter = new DSP_IIRFilter(filterName); int order = filter.order; //System.LoggedConsole.WriteLine("\nTest " + filterName + ", order=" + order); filter.ApplyIIRFilter(highPassFilteredSignal, out bandPassFilteredSignal); } else // do not filter because already filtered - using Chris's filtered recording { bandPassFilteredSignal = samples; } // calculate an amplitude threshold that is above Nth percentile of amplitudes in the subsample int[] histogramOfAmplitudes; double minAmplitude; double maxAmplitude; double binWidth; int window = 66; Histogram.GetHistogramOfWaveAmplitudes(bandPassFilteredSignal, window, out histogramOfAmplitudes, out minAmplitude, out maxAmplitude, out binWidth); int percentileBin = Histogram.GetPercentileBin(histogramOfAmplitudes, percentile); double amplitudeThreshold = (percentileBin + 1) * binWidth; if (amplitudeThreshold < minAmplitudeThreshold) { amplitudeThreshold = minAmplitudeThreshold; } bool doAnalysisOfKnownExamples = true; if (doAnalysisOfKnownExamples) { // go to fixed location to check //1:02.07, 1:07.67, 1:12.27, 1:12.42, 1:12.59, 1:12.8, 1.34.3, 1:35.3, 1:40.16, 1:50.0, 2:05.9, 2:06.62, 2:17.57, 2:21.0 //2:26.33, 2:43.07, 2:43.15, 3:16.55, 3:35.09, 4:22.44, 4:29.9, 4:42.6, 4:51.48, 5:01.8, 5:21.15, 5:22.72, 5:32.37, 5.36.1, //5:42.82, 6:03.5, 6:19.93, 6:21.55, 6:42.0, 6:42.15, 6:46.44, 7:12.17, 7:42.65, 7:45.86, 7:46.18, 7:52.38, 7:59.11, 8:10.63, //8:14.4, 8:14.63, 8_15_240, 8_46_590, 8_56_590, 9_25_77, 9_28_94, 9_30_5, 9_43_9, 10_03_19, 10_24_26, 10_24_36, 10_38_8, //10_41_08, 10_50_9, 11_05_13, 11_08_63, 11_44_66, 11_50_36, 11_51_2, 12_04_93, 12_10_05, 12_20_78, 12_27_0, 12_38_5, //13_02_25, 13_08_18, 13_12_8, 13_25_24, 13_36_0, 13_50_4, 13_51_2, 13_57_87, 14_15_00, 15_09_74, 15_12_14, 15_25_79 //double[] times = { 2.2, 26.589, 29.62 }; //double[] times = { 2.2, 3.68, 10.83, 24.95, 26.589, 27.2, 29.62 }; //double[] times = { 2.2, 3.68, 10.83, 24.95, 26.589, 27.2, 29.62, 31.39, 62.1, 67.67, 72.27, 72.42, 72.59, 72.8, 94.3, 95.3, // 100.16, 110.0, 125.9, 126.62, 137.57, 141.0, 146.33, 163.07, 163.17, 196.55, 215.09, 262.44, 269.9, 282.6, // 291.48, 301.85, 321.18, 322.72, 332.37, 336.1, 342.82, 363.5, 379.93, 381.55, 402.0, 402.15, 406.44, 432.17, // 462.65, 465.86, 466.18, 472.38, 479.14, 490.63, 494.4, 494.63, 495.240, 526.590, 536.590, 565.82, 568.94, // 570.5, 583.9, 603.19, 624.26, 624.36, 638.8, 641.08, 650.9, 65.13, 68.63, 704.66, // 710.36, 711.2, 724.93, 730.05, 740.78, 747.05, 758.5, 782.25, 788.18, 792.8, // 805.24, 816.03, 830.4, 831.2, 837.87, 855.02, 909.74, 912.14, 925.81 }; var filePath = new FileInfo(@"C:\SensorNetworks\WavFiles\Freshwater\GruntSummaryRevisedAndEditedByMichael.csv"); List <CatFishCallData> data = Csv.ReadFromCsv <CatFishCallData>(filePath, true).ToList(); //var catFishCallDatas = data as IList<CatFishCallData> ?? data.ToList(); int count = data.Count(); var subSamplesDirectory = outputDirectory.CreateSubdirectory("testSubsamples_5000LPFilter"); //for (int t = 0; t < times.Length; t++) foreach (var fishCall in data) { //Image bmp1 = IctalurusFurcatus.AnalyseLocation(bandPassFilteredSignal, sr, times[t], windowWidth); // use following line where using time in seconds //int location = (int)Math.Round(times[t] * sr); //assume location points to start of grunt //double[] subsample = DataTools.Subarray(bandPassFilteredSignal, location - signalBuffer, 2 * signalBuffer); // use following line where using sample int location1 = fishCall.Sample / 2; //assume Chris's sample location points to centre of grunt. Divide by 2 because original recording was 44100. int location = (int)Math.Round(fishCall.TimeSeconds * sr); //assume location points to centre of grunt double[] subsample = DataTools.Subarray(bandPassFilteredSignal, location - signalBuffer, 2 * signalBuffer); // calculate an amplitude threshold that is above 95th percentile of amplitudes in the subsample //int[] histogramOfAmplitudes; //double minAmplitude; //double maxAmplitude; //double binWidth; //int window = 70; //int percentile = 90; //Histogram.GetHistogramOfWaveAmplitudes(subsample, window, out histogramOfAmplitudes, out minAmplitude, out maxAmplitude, out binWidth); //int percentileBin = Histogram.GetPercentileBin(histogramOfAmplitudes, percentile); //double amplitudeThreshold = (percentileBin + 1) * binWidth; //if (amplitudeThreshold < minAmplitudeThreshold) amplitudeThreshold = minAmplitudeThreshold; double[] scores1 = AnalyseWaveformAtLocation(subsample, amplitudeThreshold, scoreThreshold); string title1 = $"scores={fishCall.Timehms}"; Image bmp1 = GraphsAndCharts.DrawGraph(title1, scores1, subsample.Length, 300, 1); //bmp1.Save(path1.FullName); string title2 = $"tStart={fishCall.Timehms}"; Image bmp2 = GraphsAndCharts.DrawWaveform(title2, subsample, 1); var path1 = subSamplesDirectory.CombineFile($"scoresForTestSubsample_{fishCall.TimeSeconds}secs.png"); //var path2 = subSamplesDirectory.CombineFile($@"testSubsample_{times[t]}secs.wav.png"); Image[] imageList = { bmp2, bmp1 }; Image bmp3 = ImageTools.CombineImagesVertically(imageList); bmp3.Save(path1.FullName); //write wave form to txt file for later work in XLS //var path3 = subSamplesDirectory.CombineFile($@"testSubsample_{times[t]}secs.wav.csv"); //signalBuffer = 800; //double[] subsample2 = DataTools.Subarray(bandPassFilteredSignal, location - signalBuffer, 3 * signalBuffer); //FileTools.WriteArray2File(subsample2, path3.FullName); } } int signalLength = bandPassFilteredSignal.Length; // count number of 1000 sample segments int blockLength = 1000; int blockCount = signalLength / blockLength; int[] indexOfMax = new int[blockCount]; double[] maxInBlock = new double[blockCount]; for (int i = 0; i < blockCount; i++) { double max = -2.0; int blockStart = blockLength * i; for (int s = 0; s < blockLength; s++) { double absValue = Math.Abs(bandPassFilteredSignal[blockStart + s]); if (absValue > max) { max = absValue; maxInBlock[i] = max; indexOfMax[i] = blockStart + s; } } } // transfer max values to a list var indexList = new List <int>(); for (int i = 1; i < blockCount - 1; i++) { // only find the blocks that contain a max value that is > neighbouring blocks if (maxInBlock[i] > maxInBlock[i - 1] && maxInBlock[i] > maxInBlock[i + 1]) { indexList.Add(indexOfMax[i]); } //ALTERNATIVELY // look at max in each block //indexList.Add(indexOfMax[i]); } // now process neighbourhood of each max int binCount = windowWidth / 2; FFT.WindowFunc wf = FFT.Hamming; var fft = new FFT(windowWidth, wf); int maxHz = 1000; double hzPerBin = nyquist / (double)binCount; int requiredBinCount = (int)Math.Round(maxHz / hzPerBin); // init list of events List <AcousticEvent> events = new List <AcousticEvent>(); double[] scores = new double[signalLength]; // init of score array int id = 0; foreach (int location in indexList) { //System.LoggedConsole.WriteLine("Location " + location + ", id=" + id); int start = location - binCount; if (start < 0) { continue; } int end = location + binCount; if (end >= signalLength) { continue; } double[] subsampleWav = DataTools.Subarray(bandPassFilteredSignal, start, windowWidth); var spectrum = fft.Invoke(subsampleWav); // convert to power spectrum = DataTools.SquareValues(spectrum); spectrum = DataTools.filterMovingAverageOdd(spectrum, 3); spectrum = DataTools.normalise(spectrum); var subBandSpectrum = DataTools.Subarray(spectrum, 1, requiredBinCount); // ignore DC in bin zero. // now do some tests on spectrum to determine if it is a candidate grunt bool eventFound = false; double[] scoreArray = CalculateScores(subBandSpectrum, windowWidth); double score = scoreArray[0]; if (score > scoreThreshold) { eventFound = true; } if (eventFound) { for (int i = location - binCount; i < location + binCount; i++) { scores[location] = score; } var startTime = TimeSpan.FromSeconds((location - binCount) / (double)sr); string startLabel = startTime.Minutes + "." + startTime.Seconds + "." + startTime.Milliseconds; Image image4 = GraphsAndCharts.DrawWaveAndFft(subsampleWav, sr, startTime, spectrum, maxHz * 2, scoreArray); var path4 = outputDirectory.CreateSubdirectory("subsamples").CombineFile($@"subsample_{location}_{startLabel}.png"); image4.Save(path4.FullName); // have an event, store the data in the AcousticEvent class double duration = 0.2; int minFreq = 50; int maxFreq = 1000; var anEvent = new AcousticEvent(segmentStartOffset, startTime.TotalSeconds, duration, minFreq, maxFreq); anEvent.Name = "grunt"; //anEvent.Name = DataTools.WriteArrayAsCsvLine(subBandSpectrum, "f4"); anEvent.Score = score; events.Add(anEvent); } id++; } // make a spectrogram var config = new SonogramConfig { NoiseReductionType = NoiseReductionType.Standard, NoiseReductionParameter = configuration.GetDoubleOrNull(AnalysisKeys.NoiseBgThreshold) ?? 0.0, }; var sonogram = (BaseSonogram) new SpectrogramStandard(config, audioRecording.WavReader); //// when the value is accessed, the indices are calculated //var indices = getSpectralIndexes.Value; //// check if the indices have been calculated - you shouldn't actually need this //if (getSpectralIndexes.IsValueCreated) //{ // // then indices have been calculated before //} var plot = new Plot(this.DisplayName, scores, eventThreshold); return(new RecognizerResults() { Events = events, Hits = null, //ScoreTrack = null, Plots = plot.AsList(), Sonogram = sonogram, }); }
/// <summary> /// Apply feature learning process on a set of target (1-minute) recordings (inputPath) /// according to the a set of centroids learned using feature learning process. /// Output feature vectors (outputPath). /// </summary> public static void UnsupervisedFeatureExtraction(FeatureLearningSettings config, List <double[][]> allCentroids, string inputPath, string outputPath) { var simVecDir = Directory.CreateDirectory(Path.Combine(outputPath, "SimilarityVectors")); int frameSize = config.FrameSize; int finalBinCount = config.FinalBinCount; FreqScaleType scaleType = config.FrequencyScaleType; var settings = new SpectrogramSettings() { WindowSize = frameSize, // the duration of each frame (according to the default value (i.e., 1024) of frame size) is 0.04644 seconds // The question is how many single-frames (i.e., patch height is equal to 1) should be selected to form one second // The "WindowOverlap" is calculated to answer this question // each 24 single-frames duration is equal to 1 second // note that the "WindowOverlap" value should be recalculated if frame size is changed // this has not yet been considered in the Config file! WindowOverlap = 0.10725204, DoMelScale = (scaleType == FreqScaleType.Mel) ? true : false, MelBinCount = (scaleType == FreqScaleType.Mel) ? finalBinCount : frameSize / 2, NoiseReductionType = NoiseReductionType.None, NoiseReductionParameter = 0.0, }; double frameStep = frameSize * (1 - settings.WindowOverlap); int minFreqBin = config.MinFreqBin; int maxFreqBin = config.MaxFreqBin; int numFreqBand = config.NumFreqBand; int patchWidth = (maxFreqBin - minFreqBin + 1) / numFreqBand; int patchHeight = config.PatchHeight; // the number of frames that their feature vectors will be concatenated in order to preserve temporal information. int frameWindowLength = config.FrameWindowLength; // the step size to make a window of frames int stepSize = config.StepSize; // the factor of downsampling int maxPoolingFactor = config.MaxPoolingFactor; // check whether there is any file in the folder/subfolders if (Directory.GetFiles(inputPath, "*", SearchOption.AllDirectories).Length == 0) { throw new ArgumentException("The folder of recordings is empty..."); } //***** // lists of features for all processing files // the key is the file name, and the value is the features for different bands Dictionary <string, List <double[, ]> > allFilesMinFeatureVectors = new Dictionary <string, List <double[, ]> >(); Dictionary <string, List <double[, ]> > allFilesMeanFeatureVectors = new Dictionary <string, List <double[, ]> >(); Dictionary <string, List <double[, ]> > allFilesMaxFeatureVectors = new Dictionary <string, List <double[, ]> >(); Dictionary <string, List <double[, ]> > allFilesStdFeatureVectors = new Dictionary <string, List <double[, ]> >(); Dictionary <string, List <double[, ]> > allFilesSkewnessFeatureVectors = new Dictionary <string, List <double[, ]> >(); double[,] inputMatrix; List <AudioRecording> recordings = new List <AudioRecording>(); foreach (string filePath in Directory.GetFiles(inputPath, "*.wav")) { FileInfo fileInfo = filePath.ToFileInfo(); // process the wav file if it is not empty if (fileInfo.Length != 0) { var recording = new AudioRecording(filePath); settings.SourceFileName = recording.BaseName; if (config.DoSegmentation) { recordings = PatchSampling.GetSubsegmentsSamples(recording, config.SubsegmentDurationInSeconds, frameStep); } else { recordings.Add(recording); } for (int s = 0; s < recordings.Count; s++) { string pathToSimilarityVectorsFile = Path.Combine(simVecDir.FullName, fileInfo.Name + "-" + s.ToString() + ".csv"); var amplitudeSpectrogram = new AmplitudeSpectrogram(settings, recordings[s].WavReader); var decibelSpectrogram = new DecibelSpectrogram(amplitudeSpectrogram); // DO RMS NORMALIZATION //sonogram.Data = SNR.RmsNormalization(sonogram.Data); // DO NOISE REDUCTION if (config.DoNoiseReduction) { decibelSpectrogram.Data = PcaWhitening.NoiseReduction(decibelSpectrogram.Data); } // check whether the full band spectrogram is needed or a matrix with arbitrary freq bins if (minFreqBin != 1 || maxFreqBin != finalBinCount) { inputMatrix = PatchSampling.GetArbitraryFreqBandMatrix(decibelSpectrogram.Data, minFreqBin, maxFreqBin); } else { inputMatrix = decibelSpectrogram.Data; } // creating matrices from different freq bands of the source spectrogram List <double[, ]> allSubmatrices2 = PatchSampling.GetFreqBandMatrices(inputMatrix, numFreqBand); double[][,] matrices2 = allSubmatrices2.ToArray(); List <double[, ]> allSequentialPatchMatrix = new List <double[, ]>(); for (int i = 0; i < matrices2.GetLength(0); i++) { // downsampling the input matrix by a factor of n (MaxPoolingFactor) using max pooling double[,] downsampledMatrix = FeatureLearning.MaxPooling(matrices2[i], config.MaxPoolingFactor); int rows = downsampledMatrix.GetLength(0); int columns = downsampledMatrix.GetLength(1); var sequentialPatches = PatchSampling.GetPatches(downsampledMatrix, patchWidth, patchHeight, (rows / patchHeight) * (columns / patchWidth), PatchSampling.SamplingMethod.Sequential); allSequentialPatchMatrix.Add(sequentialPatches.ToMatrix()); } // +++++++++++++++++++++++++++++++++++Feature Transformation // to do the feature transformation, we normalize centroids and // sequential patches from the input spectrogram to unit length // Then, we calculate the dot product of each patch with the centroids' matrix List <double[][]> allNormCentroids = new List <double[][]>(); for (int i = 0; i < allCentroids.Count; i++) { // double check the index of the list double[][] normCentroids = new double[allCentroids.ToArray()[i].GetLength(0)][]; for (int j = 0; j < allCentroids.ToArray()[i].GetLength(0); j++) { normCentroids[j] = ART_2A.NormaliseVector(allCentroids.ToArray()[i][j]); } allNormCentroids.Add(normCentroids); } List <double[][]> allFeatureTransVectors = new List <double[][]>(); // processing the sequential patch matrix for each band for (int i = 0; i < allSequentialPatchMatrix.Count; i++) { List <double[]> featureTransVectors = new List <double[]>(); double[][] similarityVectors = new double[allSequentialPatchMatrix.ToArray()[i].GetLength(0)][]; for (int j = 0; j < allSequentialPatchMatrix.ToArray()[i].GetLength(0); j++) { // normalize each patch to unit length var inputVector = allSequentialPatchMatrix.ToArray()[i].ToJagged()[j]; var normVector = inputVector; // to avoid vectors with NaN values, only normalize those that their norm is not equal to zero. if (inputVector.Euclidean() != 0) { normVector = ART_2A.NormaliseVector(inputVector); } similarityVectors[j] = allNormCentroids.ToArray()[i].ToMatrix().Dot(normVector); } Csv.WriteMatrixToCsv(pathToSimilarityVectorsFile.ToFileInfo(), similarityVectors.ToMatrix()); // To preserve the temporal information, we can concatenate the similarity vectors of a group of frames // using FrameWindowLength // patchId refers to the patch id that has been processed so far according to the step size. // if we want no overlap between different frame windows, then stepSize = frameWindowLength int patchId = 0; while (patchId + frameWindowLength - 1 < similarityVectors.GetLength(0)) { List <double[]> patchGroup = new List <double[]>(); for (int k = 0; k < frameWindowLength; k++) { patchGroup.Add(similarityVectors[k + patchId]); } featureTransVectors.Add(DataTools.ConcatenateVectors(patchGroup)); patchId = patchId + stepSize; } allFeatureTransVectors.Add(featureTransVectors.ToArray()); } // +++++++++++++++++++++++++++++++++++Feature Transformation // +++++++++++++++++++++++++++++++++++Temporal Summarization // Based on the resolution to generate features, the "numFrames" parameter will be set. // Each 24 single-frame patches form 1 second // for each 24 patch, we generate 5 vectors of min, mean, std, and max (plus skewness from Accord.net) // The pre-assumption is that each input recording is 1 minute long // store features of different bands in lists List <double[, ]> allMinFeatureVectors = new List <double[, ]>(); List <double[, ]> allMeanFeatureVectors = new List <double[, ]>(); List <double[, ]> allMaxFeatureVectors = new List <double[, ]>(); List <double[, ]> allStdFeatureVectors = new List <double[, ]>(); List <double[, ]> allSkewnessFeatureVectors = new List <double[, ]>(); // Each 24 frames form 1 second using WindowOverlap // factors such as stepSize, and maxPoolingFactor should be considered in temporal summarization. int numFrames = 24 / (patchHeight * stepSize * maxPoolingFactor); foreach (var freqBandFeature in allFeatureTransVectors) { List <double[]> minFeatureVectors = new List <double[]>(); List <double[]> meanFeatureVectors = new List <double[]>(); List <double[]> maxFeatureVectors = new List <double[]>(); List <double[]> stdFeatureVectors = new List <double[]>(); List <double[]> skewnessFeatureVectors = new List <double[]>(); int c = 0; while (c + numFrames <= freqBandFeature.GetLength(0)) { // First, make a list of patches that would be equal to the needed resolution (1 second, 60 second, etc.) List <double[]> sequencesOfFramesList = new List <double[]>(); for (int i = c; i < c + numFrames; i++) { sequencesOfFramesList.Add(freqBandFeature[i]); } List <double> min = new List <double>(); List <double> mean = new List <double>(); List <double> std = new List <double>(); List <double> max = new List <double>(); List <double> skewness = new List <double>(); double[,] sequencesOfFrames = sequencesOfFramesList.ToArray().ToMatrix(); // Second, calculate mean, max, and standard deviation (plus skewness) of vectors element-wise for (int j = 0; j < sequencesOfFrames.GetLength(1); j++) { double[] temp = new double[sequencesOfFrames.GetLength(0)]; for (int k = 0; k < sequencesOfFrames.GetLength(0); k++) { temp[k] = sequencesOfFrames[k, j]; } min.Add(temp.GetMinValue()); mean.Add(AutoAndCrossCorrelation.GetAverage(temp)); std.Add(AutoAndCrossCorrelation.GetStdev(temp)); max.Add(temp.GetMaxValue()); skewness.Add(temp.Skewness()); } minFeatureVectors.Add(min.ToArray()); meanFeatureVectors.Add(mean.ToArray()); maxFeatureVectors.Add(max.ToArray()); stdFeatureVectors.Add(std.ToArray()); skewnessFeatureVectors.Add(skewness.ToArray()); c += numFrames; } // when (freqBandFeature.GetLength(0) % numFrames) != 0, it means there are a number of frames (< numFrames) // (or the whole) at the end of the target recording , left unprocessed. // this would be problematic when an the resolution to generate the feature vector is 1 min, // but the the length of the target recording is a bit less than one min. if (freqBandFeature.GetLength(0) % numFrames != 0 && freqBandFeature.GetLength(0) % numFrames > 1) { // First, make a list of patches that would be less than the required resolution List <double[]> sequencesOfFramesList = new List <double[]>(); int unprocessedFrames = freqBandFeature.GetLength(0) % numFrames; for (int i = freqBandFeature.GetLength(0) - unprocessedFrames; i < freqBandFeature.GetLength(0); i++) { sequencesOfFramesList.Add(freqBandFeature[i]); } List <double> min = new List <double>(); List <double> mean = new List <double>(); List <double> std = new List <double>(); List <double> max = new List <double>(); List <double> skewness = new List <double>(); double[,] sequencesOfFrames = sequencesOfFramesList.ToArray().ToMatrix(); // Second, calculate mean, max, and standard deviation (plus skewness) of vectors element-wise for (int j = 0; j < sequencesOfFrames.GetLength(1); j++) { double[] temp = new double[sequencesOfFrames.GetLength(0)]; for (int k = 0; k < sequencesOfFrames.GetLength(0); k++) { temp[k] = sequencesOfFrames[k, j]; } min.Add(temp.GetMinValue()); mean.Add(AutoAndCrossCorrelation.GetAverage(temp)); std.Add(AutoAndCrossCorrelation.GetStdev(temp)); max.Add(temp.GetMaxValue()); skewness.Add(temp.Skewness()); } minFeatureVectors.Add(min.ToArray()); meanFeatureVectors.Add(mean.ToArray()); maxFeatureVectors.Add(max.ToArray()); stdFeatureVectors.Add(std.ToArray()); skewnessFeatureVectors.Add(skewness.ToArray()); } allMinFeatureVectors.Add(minFeatureVectors.ToArray().ToMatrix()); allMeanFeatureVectors.Add(meanFeatureVectors.ToArray().ToMatrix()); allMaxFeatureVectors.Add(maxFeatureVectors.ToArray().ToMatrix()); allStdFeatureVectors.Add(stdFeatureVectors.ToArray().ToMatrix()); allSkewnessFeatureVectors.Add(skewnessFeatureVectors.ToArray().ToMatrix()); } //***** // the keys of the following dictionaries contain file name // and their values are a list<double[,]> which the list.count is // the number of all subsegments for which features are extracted // the number of freq bands defined as an user-defined parameter. // the 2D-array is the feature vectors. allFilesMinFeatureVectors.Add(fileInfo.Name + "-" + s.ToString(), allMinFeatureVectors); allFilesMeanFeatureVectors.Add(fileInfo.Name + "-" + s.ToString(), allMeanFeatureVectors); allFilesMaxFeatureVectors.Add(fileInfo.Name + "-" + s.ToString(), allMaxFeatureVectors); allFilesStdFeatureVectors.Add(fileInfo.Name + "-" + s.ToString(), allStdFeatureVectors); allFilesSkewnessFeatureVectors.Add(fileInfo.Name + "-" + s.ToString(), allSkewnessFeatureVectors); // +++++++++++++++++++++++++++++++++++Temporal Summarization } } } // ++++++++++++++++++++++++++++++++++Writing features to one file // First, concatenate mean, max, std for each second. // Then, write the features of each pre-defined frequency band into a separate CSV file. var filesName = allFilesMeanFeatureVectors.Keys.ToArray(); var minFeatures = allFilesMinFeatureVectors.Values.ToArray(); var meanFeatures = allFilesMeanFeatureVectors.Values.ToArray(); var maxFeatures = allFilesMaxFeatureVectors.Values.ToArray(); var stdFeatures = allFilesStdFeatureVectors.Values.ToArray(); var skewnessFeatures = allFilesSkewnessFeatureVectors.Values.ToArray(); // The number of elements in the list shows the number of freq bands // the size of each element in the list shows the number of files processed to generate feature for. // the dimensions of the matrix shows the number of feature vectors generated for each file and the length of feature vector var allMins = new List <double[][, ]>(); var allMeans = new List <double[][, ]>(); var allMaxs = new List <double[][, ]>(); var allStds = new List <double[][, ]>(); var allSkewness = new List <double[][, ]>(); // looping over freq bands for (int i = 0; i < meanFeatures[0].Count; i++) { var mins = new List <double[, ]>(); var means = new List <double[, ]>(); var maxs = new List <double[, ]>(); var stds = new List <double[, ]>(); var skewnesses = new List <double[, ]>(); // looping over all files for (int k = 0; k < meanFeatures.Length; k++) { mins.Add(minFeatures[k].ToArray()[i]); means.Add(meanFeatures[k].ToArray()[i]); maxs.Add(maxFeatures[k].ToArray()[i]); stds.Add(stdFeatures[k].ToArray()[i]); skewnesses.Add(skewnessFeatures[k].ToArray()[i]); } allMins.Add(mins.ToArray()); allMeans.Add(means.ToArray()); allMaxs.Add(maxs.ToArray()); allStds.Add(stds.ToArray()); allSkewness.Add(skewnesses.ToArray()); } // each element of meanFeatures array is a list of features for different frequency bands. // looping over the number of freq bands for (int i = 0; i < allMeans.ToArray().GetLength(0); i++) { // creating output feature file based on the number of freq bands var outputFeatureFile = Path.Combine(outputPath, "FeatureVectors-" + i.ToString() + ".csv"); // creating the header for CSV file List <string> header = new List <string>(); header.Add("file name"); for (int j = 0; j < allMins.ToArray()[i][0].GetLength(1); j++) { header.Add("min" + j.ToString()); } for (int j = 0; j < allMeans.ToArray()[i][0].GetLength(1); j++) { header.Add("mean" + j.ToString()); } for (int j = 0; j < allMaxs.ToArray()[i][0].GetLength(1); j++) { header.Add("max" + j.ToString()); } for (int j = 0; j < allStds.ToArray()[i][0].GetLength(1); j++) { header.Add("std" + j.ToString()); } for (int j = 0; j < allSkewness.ToArray()[i][0].GetLength(1); j++) { header.Add("skewness" + j.ToString()); } var csv = new StringBuilder(); string content = string.Empty; foreach (var entry in header.ToArray()) { content += entry.ToString() + ","; } csv.AppendLine(content); var allFilesFeatureVectors = new Dictionary <string, double[, ]>(); // looping over files for (int j = 0; j < allMeans.ToArray()[i].GetLength(0); j++) { // concatenating mean, std, and max vector together for the pre-defined resolution List <double[]> featureVectors = new List <double[]>(); for (int k = 0; k < allMeans.ToArray()[i][j].ToJagged().GetLength(0); k++) { List <double[]> featureList = new List <double[]> { allMins.ToArray()[i][j].ToJagged()[k], allMeans.ToArray()[i][j].ToJagged()[k], allMaxs.ToArray()[i][j].ToJagged()[k], allStds.ToArray()[i][j].ToJagged()[k], allSkewness.ToArray()[i][j].ToJagged()[k], }; double[] featureVector = DataTools.ConcatenateVectors(featureList); featureVectors.Add(featureVector); } allFilesFeatureVectors.Add(filesName[j], featureVectors.ToArray().ToMatrix()); } // writing feature vectors to CSV file foreach (var entry in allFilesFeatureVectors) { content = string.Empty; content += entry.Key.ToString() + ","; foreach (var cent in entry.Value) { content += cent.ToString() + ","; } csv.AppendLine(content); } File.WriteAllText(outputFeatureFile, csv.ToString()); } }
private string tabelaParametroExportar() { DataTable dt; string msg = ""; string qry; janela.MsgAppend( "Exportando tabela de parametros" ); qry = @"select NOME, TIPO, VALOR from PARAMETRO where substring(NOME from 1 for 6) = 'POCKET'"; dt = D.Bd.DataTablePreenche(qry); lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + parametroArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + parametroArquivoEnviarNome); csv.EscreveCsv(dt, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + parametroArquivoEnviarNome); return msg; }
public override void WriteEventsFile(FileInfo destination, IEnumerable <EventBase> results) { Csv.WriteToCsv(destination, results.Select(x => (AcousticEvent)x)); }
private string tabelaSaldoGradeExportar() { DataTable dt; string msg = ""; string qry=""; janela.MsgAppend( "Exportando informações do saldo da grade " ); if (D.Loja != "000000") { qry = @" SELECT G.COD_PRODUTO, G.COD_GRADE, G.COD_ITEMGRADE, G.COD_ATRIBUTO, G.COD_ITEMATRIBUTO, (G.QUANTIDADEESTOQUE - G.QUANTIDADEEMPENHADA) AS QUANTIDADEESTOQUE, P.CODIGO FROM PRODUTO P INNER JOIN SALDO S ON S.COD_PRODUTO = P.CODIGO INNER JOIN SALDOGRADE G ON P.CODIGO = G.COD_PRODUTO WHERE (G.QUANTIDADEESTOQUE - G.QUANTIDADEEMPENHADA > 0) AND (S.PRECOVENDA IS NOT NULL) AND (P.APLICACAO IN ('A', 'V', 'B')) AND (P.ATIVO = '1') AND (S.QUANTIDADEESTOQUE - S.QUANTIDADEEMPENHADA > 0) and S.COD_LOJA='" + D.Loja + "'"; } // else // { // qry = @" // SELECT P.CODIGO, P.DESCRICAO, P.COD_UNIDADE_VENDA, P.COD_GRADE, u.fracionada, // P.REFERENCIA, S.PRECOVENDA, S.PRECOPROMOCAO, S.DATAINICIOPROMOCAO, S.DATAFIMPROMOCAO, // (S.QUANTIDADEESTOQUE - S.QUANTIDADEEMPENHADA) as QUANTIDADEESTOQUE, // sum(s.quantidadeestoque - s.quantidadeempenhada) as estoque // FROM saldo s // join produto p on (p.codigo = s.cod_produto) // join unidade u on (u.codigo = p.cod_unidade_venda) // WHERE PRECOVENDA is NOT NULL and P.APLICACAO IN ('A','V','B')and // P.ATIVO ='1' and // (S.QUANTIDADEESTOQUE - S.QUANTIDADEEMPENHADA) > 0 // group by P.CODIGO, P.DESCRICAO, P.COD_UNIDADE_VENDA, P.COD_GRADE, u.fracionada, P.REFERENCIA, S.PRECOVENDA, S.PRECOPROMOCAO, S.DATAINICIOPROMOCAO, S.DATAFIMPROMOCAO, // (S.QUANTIDADEESTOQUE - S.QUANTIDADEEMPENHADA)"; // } dt = D.Bd.DataTablePreenche(qry); lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + saldoGradeArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + saldoGradeArquivoEnviarNome); csv.EscreveCsv(dt, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + saldoGradeArquivoEnviarNome); return msg; }
private string tabelaAtributoExportar() { DataTable dt; string msg = ""; string qry; janela.MsgAppend( "Exportando informações dos atributos " ); qry = @"SELECT CODIGO, DESCRICAO FROM ATRIBUTO"; dt = D.Bd.DataTablePreenche(qry); lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + atributoArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + atributoArquivoEnviarNome); csv.EscreveCsv(dt, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + atributoArquivoEnviarNome); return msg; }
/// <summary> /// Exporta os títulos em aberto de um cliente /// Se o codFuncionario for nulo exporta o de todos os clientes /// </summary> /// <param name="codFuncionario"></param> /// <returns></returns> private string titulosEmAbertoExportar(string codFuncionario) { List<string> titulosEmAbertoLst; string msg = ""; string qryTitulos = @" select c.codigo as id_cliente, tr.COD_ESPECIEFINANCEIRA as id_especie_financeira, tr.VALOR as valor, tr.DATAVENCIMENTO as vencimento_data, tr.VALOR_PAGO as pago, tr.VALORJUROS_DEVIDO as juros_dinheiro, tr.SALDO_RECEBER_JUROS as a_receber from VW_TITULOS_ABERTOS_RECEBER tr, CLIENTE c where tr.VERIFICAR_CREDITO = 1 and tr.cod_sacado = 'C' || c.codigo and c.listanegra=1 and current_date > tr.datavencimento "; if(Parametro.EnviarTodosClientesParaTodosOsVendedores==false) { qryTitulos += " and c.COD_FUNCIONARIO= '" + codFuncionario + "'"; } StringBuilder TitulosAbertosEnviarNome = new StringBuilder("TITULOS_ABERTOS"); DataTable dtTitulosAbertos = D.Bd.DataTablePreenche(qryTitulos); TitulosAbertosEnviarNome.Append(codFuncionario); titulosEmAbertoLst = new List<string>(); titulosEmAbertoLst.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + TitulosAbertosEnviarNome + ".csv"); Csv csvCliente = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + TitulosAbertosEnviarNome + ".csv"); csvCliente.EscreveCsv(dtTitulosAbertos, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + TitulosAbertosEnviarNome + ".csv"); NeoZip.Zip.ZipFiles(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + TitulosAbertosEnviarNome + ".zip", titulosEmAbertoLst); lstTituloAbertoEnviar.Add(TitulosAbertosEnviarNome + ".zip"); return msg; }
/// <summary> /// Opens the file path specified as a CSV. /// </summary> /// <param name="strPath"></param> /// <param name="csvDelimiter"></param> /// <param name="fileHasHeaders"></param> /// <returns></returns> public static IDataReader OpenCsvFile(this string strPath, char csvDelimiter = ',', bool fileHasHeaders = true) { return(Csv.CreateDataReader(strPath, csvDelimiter, fileHasHeaders)); }
private string clientesExportar() { DataTable dtClientes; string msg = ""; StringBuilder clienteArquivoEnviarNome; List<string> lstClienteCompactar; // Só possui um cliente de cada vez mas é melhor do que fazer outra função que compacte apenas um arquivo StringBuilder qryCliente; List<string> lstVendedor = new List<string>(); lstVendedor = D.Bd.LstT("select CODIGO from funcionario where PARTICIPA_FORCA_VENDA='1'"); if(Parametro.EnviarTodosClientesParaTodosOsVendedores==false){ janela.MsgAppend("Exportando " + lstVendedor.Count + " arquivos de clientes"); qryCliente = new StringBuilder(@" select * from VW_CLIENTE_POCKET where COD_FUNCIONARIO="); StringBuilder qryCodFuncionario; for (int i = 0; i < lstVendedor.Count; ++i) { //Exportando clientes clienteArquivoEnviarNome = new StringBuilder("CLR"); qryCodFuncionario = new StringBuilder("'"); qryCodFuncionario.Append(lstVendedor[i]).Append("'"); dtClientes = D.Bd.DataTablePreenche(qryCliente.ToString() + qryCodFuncionario.ToString()); clienteArquivoEnviarNome.Append(lstVendedor[i]); lstClienteCompactar = new List<string>(); lstClienteCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + clienteArquivoEnviarNome + ".csv"); Csv csvCliente = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + clienteArquivoEnviarNome); csvCliente.EscreveCsv(dtClientes, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + clienteArquivoEnviarNome + ".csv"); NeoZip.Zip.ZipFiles(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + clienteArquivoEnviarNome + ".zip", lstClienteCompactar); lstClienteEnviar.Add(clienteArquivoEnviarNome + ".zip"); titulosEmAbertoExportar(lstVendedor[i]); } } else{ // janela.MsgAppend("Exportando todos os clientes para todos os " + lstVendedor.Count + " vendedores"); qryCliente = new StringBuilder(@" select * from VW_CLIENTE_POCKET"); for (int i = 0; i < lstVendedor.Count; ++i) { //Exportando clientes clienteArquivoEnviarNome = new StringBuilder("CLR"); dtClientes = D.Bd.DataTablePreenche(qryCliente.ToString()); clienteArquivoEnviarNome.Append(lstVendedor[i]); lstClienteCompactar = new List<string>(); lstClienteCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + clienteArquivoEnviarNome + ".csv"); Csv csvCliente = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + clienteArquivoEnviarNome); csvCliente.EscreveCsv(dtClientes, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + clienteArquivoEnviarNome + ".csv"); NeoZip.Zip.ZipFiles(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + clienteArquivoEnviarNome + ".zip", lstClienteCompactar); lstClienteEnviar.Add(clienteArquivoEnviarNome + ".zip"); titulosEmAbertoExportar(lstVendedor[i]); } } return msg; }
public override void WriteEventsFile(FileInfo destination, IEnumerable <EventBase> results) { Csv.WriteToCsv(destination, results); }
private string produtoExportar() { DataTable dtProdutos; string msg = ""; string qry=""; string ApenasProdutoComEstoquePositivo =" and (S.QUANTIDADEESTOQUE - S.QUANTIDADEEMPENHADA) > 0 "; if (Parametro.VenderSemEstoque) ApenasProdutoComEstoquePositivo = ""; janela.MsgAppend( "Exportando produtos " ); if(D.Loja!="000000"){ qry = @" SELECT P.CODIGO, P.DESCRICAO, P.COD_UNIDADE_VENDA, P.COD_GRADE, P.REFERENCIA, P.PERMITIR_VENDER_PESSOA_FISICA as PERMITIR_VENDA_NAO_CONTRIBUINTE, COALESCE(S.PRECOVENDA, 0.0) as PRECOVENDA, COALESCE(S.PRECOPROMOCAO, 0.0) as PRECOPROMOCAO, S.DATAINICIOPROMOCAO, S.DATAFIMPROMOCAO, S.QUANTIDADEESTOQUE - S.QUANTIDADEEMPENHADA AS QUANTIDADEESTOQUE, U.FRACIONADA, (case when (U.fator is null) then '1' else case when (U.fator in ('0')) then '1' else U.fator end end) as UNIDADE_FATOR FROM PRODUTO P INNER JOIN SALDO S ON S.COD_PRODUTO = P.CODIGO INNER JOIN UNIDADE U ON P.COD_UNIDADE_VENDA = U.CODIGO WHERE P.APLICACAO IN ('A','V','B')and P.ATIVO ='1' " + ApenasProdutoComEstoquePositivo + @" and S.COD_LOJA='" + D.Loja + "'"; } dtProdutos = D.Bd.DataTablePreenche(qry); // Criar pasta lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + produtoArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + produtoArquivoEnviarNome); csv.EscreveCsv(dtProdutos, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + produtoArquivoEnviarNome); return msg; }
public void Execute(Arguments arguments) { LoggedConsole.WriteLine("feature learning process..."); var inputDir = @"D:\Mahnoosh\Liz\Least_Bittern\"; var inputPath = Path.Combine(inputDir, "TrainSet\\one_min_recordings"); var trainSetPath = Path.Combine(inputDir, "TrainSet\\train_data"); // var testSetPath = Path.Combine(inputDir, "TestSet"); var configPath = @"D:\Mahnoosh\Liz\Least_Bittern\FeatureLearningConfig.yml"; var resultDir = Path.Combine(inputDir, "FeatureLearning"); Directory.CreateDirectory(resultDir); // var outputMelImagePath = Path.Combine(resultDir, "MelScaleSpectrogram.png"); // var outputNormMelImagePath = Path.Combine(resultDir, "NormalizedMelScaleSpectrogram.png"); // var outputNoiseReducedMelImagePath = Path.Combine(resultDir, "NoiseReducedMelSpectrogram.png"); // var outputReSpecImagePath = Path.Combine(resultDir, "ReconstrcutedSpectrogram.png"); // var outputClusterImagePath = Path.Combine(resultDir, "Clusters.bmp"); // +++++++++++++++++++++++++++++++++++++++++++++++++patch sampling from 1-min recordings var configFile = configPath.ToFileInfo(); if (configFile == null) { throw new FileNotFoundException("No config file argument provided"); } else if (!configFile.Exists) { throw new ArgumentException($"Config file {configFile.FullName} not found"); } var configuration = ConfigFile.Deserialize <FeatureLearningSettings>(configFile); int patchWidth = (configuration.MaxFreqBin - configuration.MinFreqBin + 1) / configuration.NumFreqBand; var clusteringOutputList = FeatureLearning.UnsupervisedFeatureLearning(configuration, inputPath); List <double[][]> allBandsCentroids = new List <double[][]>(); for (int i = 0; i < clusteringOutputList.Count; i++) { var clusteringOutput = clusteringOutputList[i]; // writing centroids to a csv file // note that Csv.WriteToCsv can't write data types like dictionary<int, double[]> (problems with arrays) // I converted the dictionary values to a matrix and used the Csv.WriteMatrixToCsv // it might be a better way to do this string pathToClusterCsvFile = Path.Combine(resultDir, "ClusterCentroids" + i.ToString() + ".csv"); var clusterCentroids = clusteringOutput.ClusterIdCentroid.Values.ToArray(); Csv.WriteMatrixToCsv(pathToClusterCsvFile.ToFileInfo(), clusterCentroids.ToMatrix()); // sorting clusters based on size and output it to a csv file Dictionary <int, double> clusterIdSize = clusteringOutput.ClusterIdSize; int[] sortOrder = KmeansClustering.SortClustersBasedOnSize(clusterIdSize); // Write cluster ID and size to a CSV file string pathToClusterSizeCsvFile = Path.Combine(resultDir, "ClusterSize" + i.ToString() + ".csv"); Csv.WriteToCsv(pathToClusterSizeCsvFile.ToFileInfo(), clusterIdSize); // Draw cluster image directly from clustering output List <KeyValuePair <int, double[]> > list = clusteringOutput.ClusterIdCentroid.ToList(); double[][] centroids = new double[list.Count][]; for (int j = 0; j < list.Count; j++) { centroids[j] = list[j].Value; } allBandsCentroids.Add(centroids); List <double[, ]> allCentroids = new List <double[, ]>(); for (int k = 0; k < centroids.Length; k++) { // convert each centroid to a matrix in order of cluster ID // double[,] cent = PatchSampling.ArrayToMatrixByColumn(centroids[i], patchWidth, patchHeight); // OR: in order of cluster size double[,] cent = MatrixTools.ArrayToMatrixByColumn(centroids[sortOrder[k]], patchWidth, configuration.PatchHeight); // normalize each centroid double[,] normCent = DataTools.normalise(cent); // add a row of zero to each centroid double[,] cent2 = PatchSampling.AddRow(normCent); allCentroids.Add(cent2); } // concatenate all centroids double[,] mergedCentroidMatrix = PatchSampling.ListOf2DArrayToOne2DArray(allCentroids); // Draw clusters var clusterImage = ImageTools.DrawMatrixWithoutNormalisation(mergedCentroidMatrix); clusterImage.RotateFlip(RotateFlipType.Rotate270FlipNone); var outputClusteringImage = Path.Combine(resultDir, "ClustersWithGrid" + i.ToString() + ".bmp"); clusterImage.Save(outputClusteringImage); } // extracting features FeatureExtraction.UnsupervisedFeatureExtraction(configuration, allBandsCentroids, trainSetPath, resultDir); LoggedConsole.WriteLine("Done..."); }
public static void Execute(Arguments arguments) { if (arguments == null) { throw new NoDeveloperMethodException(); } string date = "# DATE AND TIME: " + DateTime.Now; LoggedConsole.WriteLine("# DRAW LONG DURATION SPECTROGRAMS DERIVED FROM CSV FILES OF SPECTRAL INDICES OBTAINED FROM AN AUDIO RECORDING"); LoggedConsole.WriteLine(date); LoggedConsole.WriteLine("# Spectrogram Config file: " + arguments.FalseColourSpectrogramConfig); LoggedConsole.WriteLine("# Index Properties Config file: " + arguments.IndexPropertiesConfig); LoggedConsole.WriteLine(); (FileInfo indexGenerationDataFile, FileInfo indexDistributionsFile) = ZoomParameters.CheckNeededFilesExist(arguments.InputDataDirectory.ToDirectoryInfo()); var indexGenerationData = Json.Deserialize <IndexGenerationData>(indexGenerationDataFile); // spectral distribution statistics is required only when calcualting difference spectrograms. Dictionary <string, IndexDistributions.SpectralStats> indexDistributionsData = null; if (indexDistributionsFile != null && indexDistributionsFile.Exists) { indexDistributionsData = IndexDistributions.Deserialize(indexDistributionsFile); } // this config can be found in IndexGenerationData. If config argument not specified, simply take it from icd file LdSpectrogramConfig config; if (arguments.FalseColourSpectrogramConfig == null) { config = indexGenerationData.LongDurationSpectrogramConfig; } else { config = LdSpectrogramConfig.ReadYamlToConfig(arguments.FalseColourSpectrogramConfig.ToFileInfo()); } FilenameHelpers.ParseAnalysisFileName(indexGenerationDataFile, out var originalBaseName, out var _, out var _); // CHECK FOR ERROR SEGMENTS - get zero signal array var input = arguments.InputDataDirectory.ToDirectoryInfo(); var csvFile = new FileInfo(Path.Combine(input.FullName, originalBaseName + "__Towsey.Acoustic.Indices.csv")); //Dictionary<string, double[]> summaryIndices = CsvTools.ReadCSVFile2Dictionary(csvFile.FullName); //var summaryIndices = Csv.ReadFromCsv<Dictionary<string, double[]>>(csvFile); var summaryIndices = Csv.ReadFromCsv <SummaryIndexValues>(csvFile); var indexErrors = GapsAndJoins.DataIntegrityCheckForZeroSignal(summaryIndices); //config.IndexCalculationDuration = TimeSpan.FromSeconds(1.0); //config.XAxisTicInterval = TimeSpan.FromSeconds(60.0); //config.IndexCalculationDuration = TimeSpan.FromSeconds(60.0); //config.XAxisTicInterval = TimeSpan.FromSeconds(3600.0); LDSpectrogramRGB.DrawSpectrogramsFromSpectralIndices( inputDirectory: input, outputDirectory: arguments.OutputDirectory.ToDirectoryInfo(), ldSpectrogramConfig: config, indexPropertiesConfigPath: arguments.IndexPropertiesConfig.ToFileInfo(), indexGenerationData: indexGenerationData, basename: originalBaseName, analysisType: AcousticIndices.TowseyAcoustic, indexSpectrograms: null, indexStatistics: indexDistributionsData, segmentErrors: indexErrors, imageChrome: false.ToImageChrome()); Log.Success("Draw Long Duration Spectrograms complete!"); }
public AnalysisResult2 Analyze <T>(AnalysisSettings analysisSettings, SegmentSettings <T> segmentSettings) { var acousticIndicesConfiguration = (AcousticIndicesConfig)analysisSettings.AnalysisAnalyzerSpecificConfiguration; var indexCalculationDuration = acousticIndicesConfiguration.IndexCalculationDuration.Seconds(); var audioFile = segmentSettings.SegmentAudioFile; var recording = new AudioRecording(audioFile.FullName); var outputDirectory = segmentSettings.SegmentOutputDirectory; var analysisResults = new AnalysisResult2(analysisSettings, segmentSettings, recording.Duration); analysisResults.AnalysisIdentifier = this.Identifier; // calculate indices for each subsegment IndexCalculateResult[] subsegmentResults = CalculateIndicesInSubsegments( recording, segmentSettings.SegmentStartOffset, segmentSettings.AnalysisIdealSegmentDuration, indexCalculationDuration, acousticIndicesConfiguration.IndexProperties, segmentSettings.Segment.SourceMetadata.SampleRate, acousticIndicesConfiguration); var trackScores = new List <Plot>(subsegmentResults.Length); var tracks = new List <Track>(subsegmentResults.Length); analysisResults.SummaryIndices = new SummaryIndexBase[subsegmentResults.Length]; analysisResults.SpectralIndices = new SpectralIndexBase[subsegmentResults.Length]; for (int i = 0; i < subsegmentResults.Length; i++) { var indexCalculateResult = subsegmentResults[i]; indexCalculateResult.SummaryIndexValues.FileName = segmentSettings.Segment.SourceMetadata.Identifier; indexCalculateResult.SpectralIndexValues.FileName = segmentSettings.Segment.SourceMetadata.Identifier; analysisResults.SummaryIndices[i] = indexCalculateResult.SummaryIndexValues; analysisResults.SpectralIndices[i] = indexCalculateResult.SpectralIndexValues; trackScores.AddRange(indexCalculateResult.TrackScores); if (indexCalculateResult.Tracks != null) { tracks.AddRange(indexCalculateResult.Tracks); } } if (analysisSettings.AnalysisDataSaveBehavior) { this.WriteSummaryIndicesFile(segmentSettings.SegmentSummaryIndicesFile, analysisResults.SummaryIndices); analysisResults.SummaryIndicesFile = segmentSettings.SegmentSummaryIndicesFile; } if (analysisSettings.AnalysisDataSaveBehavior) { analysisResults.SpectraIndicesFiles = WriteSpectrumIndicesFilesCustom( segmentSettings.SegmentSpectrumIndicesDirectory, Path.GetFileNameWithoutExtension(segmentSettings.SegmentAudioFile.Name), analysisResults.SpectralIndices); } // write the segment spectrogram (typically of one minute duration) to CSV // this is required if you want to produced zoomed spectrograms at a resolution greater than 0.2 seconds/pixel bool saveSonogramData = analysisSettings.Configuration.GetBoolOrNull(AnalysisKeys.SaveSonogramData) ?? false; if (saveSonogramData || analysisSettings.AnalysisImageSaveBehavior.ShouldSave(analysisResults.Events.Length)) { var sonoConfig = new SonogramConfig(); // default values config sonoConfig.SourceFName = recording.FilePath; sonoConfig.WindowSize = acousticIndicesConfiguration.FrameLength; sonoConfig.WindowStep = analysisSettings.Configuration.GetIntOrNull(AnalysisKeys.FrameStep) ?? sonoConfig.WindowSize; // default = no overlap sonoConfig.WindowOverlap = (sonoConfig.WindowSize - sonoConfig.WindowStep) / (double)sonoConfig.WindowSize; // Linear or Octave frequency scale? bool octaveScale = analysisSettings.Configuration.GetBoolOrNull(AnalysisKeys.KeyOctaveFreqScale) ?? false; if (octaveScale) { sonoConfig.WindowStep = sonoConfig.WindowSize; sonoConfig.WindowOverlap = (sonoConfig.WindowSize - sonoConfig.WindowStep) / (double)sonoConfig.WindowSize; } ////sonoConfig.NoiseReductionType = NoiseReductionType.NONE; // the default ////sonoConfig.NoiseReductionType = NoiseReductionType.STANDARD; var sonogram = new SpectrogramStandard(sonoConfig, recording.WavReader); // remove the DC row of the spectrogram sonogram.Data = MatrixTools.Submatrix(sonogram.Data, 0, 1, sonogram.Data.GetLength(0) - 1, sonogram.Data.GetLength(1) - 1); if (analysisSettings.AnalysisImageSaveBehavior.ShouldSave()) { string imagePath = Path.Combine(outputDirectory.FullName, segmentSettings.SegmentImageFile.Name); // NOTE: hits (SPT in this case) is intentionally not supported var image = DrawSonogram(sonogram, null, trackScores, tracks); image.Save(imagePath); analysisResults.ImageFile = new FileInfo(imagePath); } if (saveSonogramData) { string csvPath = Path.Combine(outputDirectory.FullName, recording.BaseName + ".csv"); Csv.WriteMatrixToCsv(csvPath.ToFileInfo(), sonogram.Data); } } return(analysisResults); }
public void TestLoadCsv() { Csv csv = new Csv(testCsvPath); csv.Load(); }
private string tabelaItemTabelaPrecoExportar() { DataTable dt; string msg = ""; string qry; janela.MsgAppend("Exportando tabela de preço específica " ); qry = @"select COD_TABELAPRECO, COD_PRODUTO, VALOR, QTD_MINIMA, VALOR1, QTD_MINIMA1, VALOR2, QTD_MINIMA2, VALOR3, QTD_MINIMA3, DESCONTO_MAXIMO, ACRESCIMO_MAXIMO, TIPOVALOR from ITEMTABELAPRECOPRODUTO"; dt = D.Bd.DataTablePreenche(qry); lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + itemTabelaPrecoArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + itemTabelaPrecoArquivoEnviarNome); csv.EscreveCsv(dt, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + itemTabelaPrecoArquivoEnviarNome); return msg; }
private string tabelaCidadeExportar() { DataTable dt; string msg = ""; string qry; janela.MsgAppend( "Exportando tabela de cidades " ); qry = @" SELECT CODIGO, DESCRICAO, COD_UF FROM CIDADE WHERE COD_UF='BA' UNION SELECT CIDADE.CODIGO, CIDADE.descricao, CIDADE.COD_UF from CIDADE, cliente WHERE cliente.cod_cidade = cidade.codigo "; dt = D.Bd.DataTablePreenche(qry); lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + cidadeArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + cidadeArquivoEnviarNome); csv.EscreveCsv(dt, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + cidadeArquivoEnviarNome); return msg; }
public static void WriteEventsFileStatic(FileInfo destination, IEnumerable <EventBase> results) { Csv.WriteToCsv(destination, results); }
/// <summary> /// Reads a CSV string into an IDataReader /// </summary> /// <param name="data"></param> /// <param name="csvDelimiter"></param> /// <param name="hasHeaders"></param> /// <returns></returns> public static IDataReader ReadCsvString(this string data, char csvDelimiter = ',', bool hasHeaders = true) { return(Csv.ReadString(data, hasHeaders, csvDelimiter)); }
private string tabelaEspecieFinaceiraExportar() { DataTable dt; string msg = ""; string qry; janela.MsgAppend("Exportando tabela espécie financeira" ); qry = @"SELECT CODIGO, DESCRICAO, VERIFICA_CREDITO FROM ESPECIEFINANCEIRA"; dt = D.Bd.DataTablePreenche(qry); lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + especieFinanceiraArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + especieFinanceiraArquivoEnviarNome); csv.EscreveCsv(dt, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + especieFinanceiraArquivoEnviarNome); return msg; }
public override void WriteSummaryIndicesFile(FileInfo destination, IEnumerable <SummaryIndexBase> results) { Csv.WriteToCsv(destination, results); }
private string tabelaFormaPagamentoExportar() { DataTable dt; string msg = ""; string qry; janela.MsgAppend("Exportando tabela forma de pagamento "); qry = @"select f.codigo, f.prazo_medio, f.descricao, (select count(i.cod_formapagamento)from itemformapagamento i where i.cod_formapagamento = f.codigo) as parcelas, parcela_minima from formapagamento f WHERE PERMITIRRECEBIMENTO='1'"; dt = D.Bd.DataTablePreenche(qry); lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + formaPagamentoArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + formaPagamentoArquivoEnviarNome); csv.EscreveCsv(dt, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + formaPagamentoArquivoEnviarNome); return msg; }
public static void GenerateSpectrograms() { var recordingDir = @"M:\Liz\SupervisedPatchSamplingSet\Recordings\"; var resultDir = @"M:\Liz\SupervisedPatchSamplingSet\"; // check whether there is any file in the folder/subfolders if (Directory.GetFiles(recordingDir, "*", SearchOption.AllDirectories).Length == 0) { throw new ArgumentException("The folder of recordings is empty..."); } int frameSize = 1024; int finalBinCount = 256; FreqScaleType scaleType = FreqScaleType.Mel; var settings = new SpectrogramSettings() { WindowSize = frameSize, // the duration of each frame (according to the default value (i.e., 1024) of frame size) is 0.04644 seconds // The question is how many single-frames (i.e., patch height is equal to 1) should be selected to form one second // The "WindowOverlap" is calculated to answer this question // each 24 single-frames duration is equal to 1 second // note that the "WindowOverlap" value should be recalculated if frame size is changed // this has not yet been considered in the Config file! WindowOverlap = 0.10725204, DoMelScale = (scaleType == FreqScaleType.Mel) ? true : false, MelBinCount = (scaleType == FreqScaleType.Mel) ? finalBinCount : frameSize / 2, NoiseReductionType = NoiseReductionType.None, NoiseReductionParameter = 0.0, }; foreach (string filePath in Directory.GetFiles(recordingDir, "*.wav")) { FileInfo fileInfo = filePath.ToFileInfo(); // process the wav file if it is not empty if (fileInfo.Length != 0) { var recording = new AudioRecording(filePath); settings.SourceFileName = recording.BaseName; var amplitudeSpectrogram = new AmplitudeSpectrogram(settings, recording.WavReader); var decibelSpectrogram = new DecibelSpectrogram(amplitudeSpectrogram); // DO NOISE REDUCTION decibelSpectrogram.Data = PcaWhitening.NoiseReduction(decibelSpectrogram.Data); // draw the spectrogram var attributes = new SpectrogramAttributes() { NyquistFrequency = decibelSpectrogram.Attributes.NyquistFrequency, Duration = decibelSpectrogram.Attributes.Duration, }; Image image = DecibelSpectrogram.DrawSpectrogramAnnotated(decibelSpectrogram.Data, settings, attributes); string pathToSpectrogramFiles = Path.Combine(resultDir, "Spectrograms", settings.SourceFileName + ".bmp"); image.Save(pathToSpectrogramFiles); // write the matrix to a csv file string pathToMatrixFiles = Path.Combine(resultDir, "Matrices", settings.SourceFileName + ".csv"); Csv.WriteMatrixToCsv(pathToMatrixFiles.ToFileInfo(), decibelSpectrogram.Data); } } }
private string tabelaItemFormaPagamentoExportar() { DataTable dt; string msg = ""; string qry; janela.MsgAppend( "Exportando tabela item forma pagamento" ); qry = @" SELECT i.COD_FORMAPAGAMENTO, i.CODIGO, i.COD_ESPECIEFINANCEIRA, i.PRAZOVENCIMENTO, i.PERCENTUALPAGAMENTO FROM ITEMFORMAPAGAMENTO i, FORMAPAGAMENTO F where i.COD_FORMAPAGAMENTO = f.CODIGO AND f.PERMITIRRECEBIMENTO = 1"; dt = D.Bd.DataTablePreenche(qry); lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + itemFormaPagamentoArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + itemFormaPagamentoArquivoEnviarNome); csv.EscreveCsv(dt, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + itemFormaPagamentoArquivoEnviarNome); return msg; }
public void WriteSummaryIndicesFile(FileInfo destination, IEnumerable <SummaryIndexBase> results) { Csv.WriteToCsv(destination, results.Cast <SummaryIndexValues>()); }
private string tabelaItemGradeExportar() { DataTable dt; string msg = ""; string qry; janela.MsgAppend( "Exportando informações dos itens das grades " ); qry = @"SELECT COD_GRADE, CODIGO, DESCRICAO FROM ITEMGRADE"; dt = D.Bd.DataTablePreenche(qry); lstArquivosCompactar.Add(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + itemGradeArquivoEnviarNome); Csv csv = new Csv(D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + itemGradeArquivoEnviarNome); csv.EscreveCsv(dt, D.ApplicationDirectory + D.TabelasSincronizacaoDiretorio + itemGradeArquivoEnviarNome); return msg; }