public void TestKmeans() { FloatMatrixIndexer data = new FloatMatrixIndexer(new float[, ] { { 1, 2, 3, 1 }, { 2, 3, 4, 2 } }); KmeansClustering.GenerateClusters(data.Transpose(), 2, 100, 1, i => { }, out var clusterCenters, out var clusterIndices); Assert.AreEqual(2, clusterCenters.GetLength(0)); }
/* K MEANS CLUSTERING ALGORITHM - My implementation */ public Cluster[] computeClusters(List <Frame> framesToSearch) { KmeansClustering.Entity[] dataset = new KmeansClustering.Entity[framesToSearch.Count]; int i, K = this.UAVs.Length; i = 0; foreach (Frame f in framesToSearch) { dataset[i] = new KmeansClustering.Entity(2); dataset[i].setIthFeature(0, f.center.x); dataset[i].setIthFeature(1, f.center.z); // Debug.Log(dataset[i]); i++; } /* dataset = center of frames * K = number of UAVS */ KmeansClustering clusterer = new KmeansClustering(dataset, this.UAVs.Length); KmeansClustering.ClusteringSolution sol = clusterer.runKmeans(5); // 10 random starts //final number of clusters K could be less than number of UAV if entities are less than K. Cluster[] clustersArray = new Cluster[sol.finalK]; for (i = 0; i < sol.finalK; i++) { clustersArray [i] = new Cluster(); } i = 0; foreach (Frame f in framesToSearch) { //Debug.Log ("Point: "+dataset[i]+" - Cluster: " +sol.mapDataPointToCluster[i]); clustersArray[sol.mapDataPointToCluster[i]].addFrame(f); i++; } foreach (Cluster c in clustersArray) { c.debugCentroid(); foreach (Frame f in c.frames) { f.displayDebug(colorList[currentColor], f.profitDensity); } currentColor++; currentColor %= colorList.Length; } return(clustersArray); }
/* K MEANS CLUSTERING ALGORITHM - ALGLIB */ public Cluster[] computeClustersWithAlgLib(List <Frame> framesToSearch) { int i, K = this.UAVs.Length; Cluster[] clustersArray = new Cluster[K]; for (i = 0; i < K; i++) { clustersArray [i] = new Cluster(); } // array of points (frames' centers used in the clustering algorithm) double[,] dataset = new double[framesToSearch.Count, 2]; //{{1,1},{1,2},{4,1},{2,3},{4,1.5}}; i = 0; foreach (Frame f in framesToSearch) { dataset[i, 0] = f.center.x; dataset[i, 1] = f.center.z; i++; } alglib.kmeansreport rep = KmeansClustering.computeClusters(dataset, K); i = 0; foreach (Frame f in framesToSearch) { // Debug.Log(dataset[i,0] + "," + dataset[i,1] + " - cluster:" + rep.cidx[i]); clustersArray[rep.cidx[i]].addFrame(f); i++; } foreach (Cluster c in clustersArray) { c.debugCentroid(); foreach (Frame f in c.frames) { f.displayDebug(colorList[currentColor], f.profitDensity); } currentColor++; currentColor %= colorList.Length; } return(clustersArray); }
public void Execute(Arguments arguments) { LoggedConsole.WriteLine("feature learning process..."); var inputDir = @"D:\Mahnoosh\Liz\Least_Bittern\"; var inputPath = Path.Combine(inputDir, "TrainSet\\one_min_recordings"); var trainSetPath = Path.Combine(inputDir, "TrainSet\\train_data"); // var testSetPath = Path.Combine(inputDir, "TestSet"); var configPath = @"D:\Mahnoosh\Liz\Least_Bittern\FeatureLearningConfig.yml"; var resultDir = Path.Combine(inputDir, "FeatureLearning"); Directory.CreateDirectory(resultDir); // var outputMelImagePath = Path.Combine(resultDir, "MelScaleSpectrogram.png"); // var outputNormMelImagePath = Path.Combine(resultDir, "NormalizedMelScaleSpectrogram.png"); // var outputNoiseReducedMelImagePath = Path.Combine(resultDir, "NoiseReducedMelSpectrogram.png"); // var outputReSpecImagePath = Path.Combine(resultDir, "ReconstrcutedSpectrogram.png"); // var outputClusterImagePath = Path.Combine(resultDir, "Clusters.bmp"); // +++++++++++++++++++++++++++++++++++++++++++++++++patch sampling from 1-min recordings var configFile = configPath.ToFileInfo(); if (configFile == null) { throw new FileNotFoundException("No config file argument provided"); } else if (!configFile.Exists) { throw new ArgumentException($"Config file {configFile.FullName} not found"); } var configuration = ConfigFile.Deserialize <FeatureLearningSettings>(configFile); int patchWidth = (configuration.MaxFreqBin - configuration.MinFreqBin + 1) / configuration.NumFreqBand; var clusteringOutputList = FeatureLearning.UnsupervisedFeatureLearning(configuration, inputPath); List <double[][]> allBandsCentroids = new List <double[][]>(); for (int i = 0; i < clusteringOutputList.Count; i++) { var clusteringOutput = clusteringOutputList[i]; // writing centroids to a csv file // note that Csv.WriteToCsv can't write data types like dictionary<int, double[]> (problems with arrays) // I converted the dictionary values to a matrix and used the Csv.WriteMatrixToCsv // it might be a better way to do this string pathToClusterCsvFile = Path.Combine(resultDir, "ClusterCentroids" + i.ToString() + ".csv"); var clusterCentroids = clusteringOutput.ClusterIdCentroid.Values.ToArray(); Csv.WriteMatrixToCsv(pathToClusterCsvFile.ToFileInfo(), clusterCentroids.ToMatrix()); // sorting clusters based on size and output it to a csv file Dictionary <int, double> clusterIdSize = clusteringOutput.ClusterIdSize; int[] sortOrder = KmeansClustering.SortClustersBasedOnSize(clusterIdSize); // Write cluster ID and size to a CSV file string pathToClusterSizeCsvFile = Path.Combine(resultDir, "ClusterSize" + i.ToString() + ".csv"); Csv.WriteToCsv(pathToClusterSizeCsvFile.ToFileInfo(), clusterIdSize); // Draw cluster image directly from clustering output List <KeyValuePair <int, double[]> > list = clusteringOutput.ClusterIdCentroid.ToList(); double[][] centroids = new double[list.Count][]; for (int j = 0; j < list.Count; j++) { centroids[j] = list[j].Value; } allBandsCentroids.Add(centroids); List <double[, ]> allCentroids = new List <double[, ]>(); for (int k = 0; k < centroids.Length; k++) { // convert each centroid to a matrix in order of cluster ID // double[,] cent = PatchSampling.ArrayToMatrixByColumn(centroids[i], patchWidth, patchHeight); // OR: in order of cluster size double[,] cent = MatrixTools.ArrayToMatrixByColumn(centroids[sortOrder[k]], patchWidth, configuration.PatchHeight); // normalize each centroid double[,] normCent = DataTools.normalise(cent); // add a row of zero to each centroid double[,] cent2 = PatchSampling.AddRow(normCent); allCentroids.Add(cent2); } // concatenate all centroids double[,] mergedCentroidMatrix = PatchSampling.ListOf2DArrayToOne2DArray(allCentroids); // Draw clusters var clusterImage = ImageTools.DrawMatrixWithoutNormalisation(mergedCentroidMatrix); clusterImage.RotateFlip(RotateFlipType.Rotate270FlipNone); var outputClusteringImage = Path.Combine(resultDir, "ClustersWithGrid" + i.ToString() + ".bmp"); clusterImage.Save(outputClusteringImage); } // extracting features FeatureExtraction.UnsupervisedFeatureExtraction(configuration, allBandsCentroids, trainSetPath, resultDir); LoggedConsole.WriteLine("Done..."); }
public void TestFeatureLearning() { // var outputDir = this.outputDirectory; var resultDir = PathHelper.ResolveAssetPath("FeatureLearning"); var folderPath = Path.Combine(resultDir, "random_audio_segments"); // Liz // PathHelper.ResolveAssetPath(@"C:\Users\kholghim\Mahnoosh\PcaWhitening\random_audio_segments\1192_1000"); // var resultDir = PathHelper.ResolveAssetPath(@"C:\Users\kholghim\Mahnoosh\PcaWhitening"); var outputMelImagePath = Path.Combine(resultDir, "MelScaleSpectrogram.png"); var outputNormMelImagePath = Path.Combine(resultDir, "NormalizedMelScaleSpectrogram.png"); var outputNoiseReducedMelImagePath = Path.Combine(resultDir, "NoiseReducedMelSpectrogram.png"); var outputReSpecImagePath = Path.Combine(resultDir, "ReconstrcutedSpectrogram.png"); // var outputClusterImagePath = Path.Combine(resultDir, "Clusters.bmp"); // +++++++++++++++++++++++++++++++++++++++++++++++++patch sampling from 1000 random 1-min recordings from Gympie // check whether there is any file in the folder/subfolders if (Directory.GetFiles(folderPath, "*", SearchOption.AllDirectories).Length == 0) { throw new ArgumentException("The folder of recordings is empty..."); } // get the nyquist value from the first wav file in the folder of recordings int nq = new AudioRecording(Directory.GetFiles(folderPath, "*.wav")[0]).Nyquist; int nyquist = nq; // 11025; int frameSize = 1024; int finalBinCount = 128; // 256; // 100; // 40; // 200; // int hertzInterval = 1000; FreqScaleType scaleType = FreqScaleType.Mel; var freqScale = new FrequencyScale(scaleType, nyquist, frameSize, finalBinCount, hertzInterval); var fst = freqScale.ScaleType; var sonoConfig = new SonogramConfig { WindowSize = frameSize, // since each 24 frames duration is equal to 1 second WindowOverlap = 0.1028, DoMelScale = (scaleType == FreqScaleType.Mel) ? true : false, MelBinCount = (scaleType == FreqScaleType.Mel) ? finalBinCount : frameSize / 2, NoiseReductionType = NoiseReductionType.None, }; /* * // testing * var recordingPath3 = PathHelper.ResolveAsset(folderPath, "SM304264_0+1_20160421_024539_46-47min.wav"); * var recording3 = new AudioRecording(recordingPath3); * var sonogram3 = new SpectrogramStandard(sonoConfig, recording3.WavReader); * * // DO DRAW SPECTROGRAM * var image4 = sonogram3.GetImageFullyAnnotated(sonogram3.GetImage(), "MELSPECTROGRAM: " + fst.ToString(), freqScale.GridLineLocations); * image4.Save(outputMelImagePath); * * // Do RMS normalization * sonogram3.Data = SNR.RmsNormalization(sonogram3.Data); * var image5 = sonogram3.GetImageFullyAnnotated(sonogram3.GetImage(), "NORMALISEDMELSPECTROGRAM: " + fst.ToString(), freqScale.GridLineLocations); * image5.Save(outputNormMelImagePath); * * // NOISE REDUCTION * sonogram3.Data = PcaWhitening.NoiseReduction(sonogram3.Data); * var image6 = sonogram3.GetImageFullyAnnotated(sonogram3.GetImage(), "NOISEREDUCEDMELSPECTROGRAM: " + fst.ToString(), freqScale.GridLineLocations); * image6.Save(outputNoiseReducedMelImagePath); * * //testing */ // Define the minFreBin and MaxFreqBin to be able to work at arbitrary frequency bin bounds. // The default value is minFreqBin = 1 and maxFreqBin = finalBinCount. // To work with arbitrary frequency bin bounds we need to manually set these two parameters. int minFreqBin = 40; //1 int maxFreqBin = 80; //finalBinCount; int numFreqBand = 1; //4; int patchWidth = (maxFreqBin - minFreqBin + 1) / numFreqBand; // finalBinCount / numFreqBand; int patchHeight = 1; // 2; // 4; // 16; // 6; // Frame size int numRandomPatches = 20; // 40; // 80; // 30; // 100; // 500; // // int fileCount = Directory.GetFiles(folderPath, "*.wav").Length; // Define variable number of "randomPatch" lists based on "numFreqBand" Dictionary <string, List <double[, ]> > randomPatchLists = new Dictionary <string, List <double[, ]> >(); for (int i = 0; i < numFreqBand; i++) { randomPatchLists.Add(string.Format("randomPatch{0}", i.ToString()), new List <double[, ]>()); } List <double[, ]> randomPatches = new List <double[, ]>(); /* * foreach (string filePath in Directory.GetFiles(folderPath, "*.wav")) * { * FileInfo f = filePath.ToFileInfo(); * if (f.Length == 0) * { * Debug.WriteLine(f.Name); * } * } */ double[,] inputMatrix; foreach (string filePath in Directory.GetFiles(folderPath, "*.wav")) { FileInfo fileInfo = filePath.ToFileInfo(); // process the wav file if it is not empty if (fileInfo.Length != 0) { var recording = new AudioRecording(filePath); sonoConfig.SourceFName = recording.BaseName; var sonogram = new SpectrogramStandard(sonoConfig, recording.WavReader); // DO RMS NORMALIZATION sonogram.Data = SNR.RmsNormalization(sonogram.Data); // DO NOISE REDUCTION // sonogram.Data = SNR.NoiseReduce_Median(sonogram.Data, nhBackgroundThreshold: 2.0); sonogram.Data = PcaWhitening.NoiseReduction(sonogram.Data); // check whether the full band spectrogram is needed or a matrix with arbitrary freq bins if (minFreqBin != 1 || maxFreqBin != finalBinCount) { inputMatrix = PatchSampling.GetArbitraryFreqBandMatrix(sonogram.Data, minFreqBin, maxFreqBin); } else { inputMatrix = sonogram.Data; } // creating matrices from different freq bands of the source spectrogram List <double[, ]> allSubmatrices = PatchSampling.GetFreqBandMatrices(inputMatrix, numFreqBand); // Second: selecting random patches from each freq band matrix and add them to the corresponding patch list int count = 0; while (count < allSubmatrices.Count) { randomPatchLists[$"randomPatch{count.ToString()}"].Add(PatchSampling .GetPatches(allSubmatrices.ToArray()[count], patchWidth, patchHeight, numRandomPatches, PatchSampling.SamplingMethod.Random).ToMatrix()); count++; } } } foreach (string key in randomPatchLists.Keys) { randomPatches.Add(PatchSampling.ListOf2DArrayToOne2DArray(randomPatchLists[key])); } // convert list of random patches matrices to one matrix int numberOfClusters = 50; //256; // 128; // 64; // 32; // 10; // List <double[][]> allBandsCentroids = new List <double[][]>(); List <KMeansClusterCollection> allClusteringOutput = new List <KMeansClusterCollection>(); for (int i = 0; i < randomPatches.Count; i++) { double[,] patchMatrix = randomPatches[i]; // Apply PCA Whitening var whitenedSpectrogram = PcaWhitening.Whitening(true, patchMatrix); // Do k-means clustering var clusteringOutput = KmeansClustering.Clustering(whitenedSpectrogram.Reversion, numberOfClusters); // var clusteringOutput = KmeansClustering.Clustering(patchMatrix, noOfClusters, pathToClusterCsvFile); // writing centroids to a csv file // note that Csv.WriteToCsv can't write data types like dictionary<int, double[]> (problems with arrays) // I converted the dictionary values to a matrix and used the Csv.WriteMatrixToCsv // it might be a better way to do this string pathToClusterCsvFile = Path.Combine(resultDir, "ClusterCentroids" + i.ToString() + ".csv"); var clusterCentroids = clusteringOutput.ClusterIdCentroid.Values.ToArray(); Csv.WriteMatrixToCsv(pathToClusterCsvFile.ToFileInfo(), clusterCentroids.ToMatrix()); //Csv.WriteToCsv(pathToClusterCsvFile.ToFileInfo(), clusterCentroids); // sorting clusters based on size and output it to a csv file Dictionary <int, double> clusterIdSize = clusteringOutput.ClusterIdSize; int[] sortOrder = KmeansClustering.SortClustersBasedOnSize(clusterIdSize); // Write cluster ID and size to a CSV file string pathToClusterSizeCsvFile = Path.Combine(resultDir, "ClusterSize" + i.ToString() + ".csv"); Csv.WriteToCsv(pathToClusterSizeCsvFile.ToFileInfo(), clusterIdSize); // Draw cluster image directly from clustering output List <KeyValuePair <int, double[]> > list = clusteringOutput.ClusterIdCentroid.ToList(); double[][] centroids = new double[list.Count][]; for (int j = 0; j < list.Count; j++) { centroids[j] = list[j].Value; } allBandsCentroids.Add(centroids); allClusteringOutput.Add(clusteringOutput.Clusters); List <double[, ]> allCentroids = new List <double[, ]>(); for (int k = 0; k < centroids.Length; k++) { // convert each centroid to a matrix in order of cluster ID // double[,] cent = PatchSampling.ArrayToMatrixByColumn(centroids[i], patchWidth, patchHeight); // OR: in order of cluster size double[,] cent = MatrixTools.ArrayToMatrixByColumn(centroids[sortOrder[k]], patchWidth, patchHeight); // normalize each centroid double[,] normCent = DataTools.normalise(cent); // add a row of zero to each centroid double[,] cent2 = PatchSampling.AddRow(normCent); allCentroids.Add(cent2); } // concatenate all centroids double[,] mergedCentroidMatrix = PatchSampling.ListOf2DArrayToOne2DArray(allCentroids); // Draw clusters // int gridInterval = 1000; // var freqScale = new FrequencyScale(FreqScaleType.Mel, nyquist, frameSize, finalBinCount, gridInterval); var clusterImage = ImageTools.DrawMatrixWithoutNormalisation(mergedCentroidMatrix); clusterImage.RotateFlip(RotateFlipType.Rotate270FlipNone); // clusterImage.Save(outputClusterImagePath, ImageFormat.Bmp); var outputClusteringImage = Path.Combine(resultDir, "ClustersWithGrid" + i.ToString() + ".bmp"); // Image bmp = Image.Load<Rgb24>(filename); FrequencyScale.DrawFrequencyLinesOnImage((Image <Rgb24>)clusterImage, freqScale, includeLabels: false); clusterImage.Save(outputClusteringImage); } //+++++++++++++++++++++++++++++++++++++++++++++++++++++Processing and generating features for the target recordings var recording2Path = PathHelper.ResolveAsset("Recordings", "BAC2_20071008-085040.wav"); // var recording2Path = PathHelper.ResolveAsset(folderPath, "gympie_np_1192_353972_20160303_055854_60_0.wav"); // folder with 1000 files // var recording2Path = PathHelper.ResolveAsset(folderPath, "gympie_np_1192_353887_20151230_042625_60_0.wav"); // folder with 1000 files // var recording2Path = PathHelper.ResolveAsset(folderPath, "gympie_np_1192_354744_20151018_053923_60_0.wav"); // folder with 100 files var recording2 = new AudioRecording(recording2Path); var sonogram2 = new SpectrogramStandard(sonoConfig, recording2.WavReader); // DO DRAW SPECTROGRAM var image = sonogram2.GetImageFullyAnnotated(sonogram2.GetImage(), "MELSPECTROGRAM: " + fst.ToString(), freqScale.GridLineLocations); image.Save(outputMelImagePath); // Do RMS normalization sonogram2.Data = SNR.RmsNormalization(sonogram2.Data); var image2 = sonogram2.GetImageFullyAnnotated(sonogram2.GetImage(), "NORMALISEDMELSPECTROGRAM: " + fst.ToString(), freqScale.GridLineLocations); image2.Save(outputNormMelImagePath); // NOISE REDUCTION sonogram2.Data = PcaWhitening.NoiseReduction(sonogram2.Data); var image3 = sonogram2.GetImageFullyAnnotated(sonogram2.GetImage(), "NOISEREDUCEDMELSPECTROGRAM: " + fst.ToString(), freqScale.GridLineLocations); image3.Save(outputNoiseReducedMelImagePath); // check whether the full band spectrogram is needed or a matrix with arbitrary freq bins if (minFreqBin != 1 || maxFreqBin != finalBinCount) { inputMatrix = PatchSampling.GetArbitraryFreqBandMatrix(sonogram2.Data, minFreqBin, maxFreqBin); } else { inputMatrix = sonogram2.Data; } // extracting sequential patches from the target spectrogram List <double[, ]> allSubmatrices2 = PatchSampling.GetFreqBandMatrices(inputMatrix, numFreqBand); double[][,] matrices2 = allSubmatrices2.ToArray(); List <double[, ]> allSequentialPatchMatrix = new List <double[, ]>(); for (int i = 0; i < matrices2.GetLength(0); i++) { int rows = matrices2[i].GetLength(0); int columns = matrices2[i].GetLength(1); var sequentialPatches = PatchSampling.GetPatches(matrices2[i], patchWidth, patchHeight, (rows / patchHeight) * (columns / patchWidth), PatchSampling.SamplingMethod.Sequential); allSequentialPatchMatrix.Add(sequentialPatches.ToMatrix()); } // +++++++++++++++++++++++++++++++++++Feature Transformation // to do the feature transformation, we normalize centroids and // sequential patches from the input spectrogram to unit length // Then, we calculate the dot product of each patch with the centroids' matrix List <double[][]> allNormCentroids = new List <double[][]>(); for (int i = 0; i < allBandsCentroids.Count; i++) { // double check the index of the list double[][] normCentroids = new double[allBandsCentroids.ToArray()[i].GetLength(0)][]; for (int j = 0; j < allBandsCentroids.ToArray()[i].GetLength(0); j++) { normCentroids[j] = ART_2A.NormaliseVector(allBandsCentroids.ToArray()[i][j]); } allNormCentroids.Add(normCentroids); } List <double[][]> allFeatureTransVectors = new List <double[][]>(); for (int i = 0; i < allSequentialPatchMatrix.Count; i++) { double[][] featureTransVectors = new double[allSequentialPatchMatrix.ToArray()[i].GetLength(0)][]; for (int j = 0; j < allSequentialPatchMatrix.ToArray()[i].GetLength(0); j++) { var normVector = ART_2A.NormaliseVector(allSequentialPatchMatrix.ToArray()[i] .ToJagged()[j]); // normalize each patch to unit length featureTransVectors[j] = allNormCentroids.ToArray()[i].ToMatrix().Dot(normVector); } allFeatureTransVectors.Add(featureTransVectors); } // +++++++++++++++++++++++++++++++++++Feature Transformation // +++++++++++++++++++++++++++++++++++Temporal Summarization // The resolution to generate features is 1 second // Each 24 single-frame patches form 1 second // for each 24 patch, we generate 3 vectors of mean, std, and max // The pre-assumption is that each input spectrogram is 1 minute List <double[, ]> allMeanFeatureVectors = new List <double[, ]>(); List <double[, ]> allMaxFeatureVectors = new List <double[, ]>(); List <double[, ]> allStdFeatureVectors = new List <double[, ]>(); // number of frames needs to be concatenated to form 1 second. Each 24 frames make 1 second. int numFrames = (24 / patchHeight) * 60; foreach (var freqBandFeature in allFeatureTransVectors) { // store features of different bands in lists List <double[]> meanFeatureVectors = new List <double[]>(); List <double[]> maxFeatureVectors = new List <double[]>(); List <double[]> stdFeatureVectors = new List <double[]>(); int c = 0; while (c + numFrames < freqBandFeature.GetLength(0)) { // First, make a list of patches that would be equal to 1 second List <double[]> sequencesOfFramesList = new List <double[]>(); for (int i = c; i < c + numFrames; i++) { sequencesOfFramesList.Add(freqBandFeature[i]); } List <double> mean = new List <double>(); List <double> std = new List <double>(); List <double> max = new List <double>(); double[,] sequencesOfFrames = sequencesOfFramesList.ToArray().ToMatrix(); // int len = sequencesOfFrames.GetLength(1); // Second, calculate mean, max, and standard deviation of six vectors element-wise for (int j = 0; j < sequencesOfFrames.GetLength(1); j++) { double[] temp = new double[sequencesOfFrames.GetLength(0)]; for (int k = 0; k < sequencesOfFrames.GetLength(0); k++) { temp[k] = sequencesOfFrames[k, j]; } mean.Add(AutoAndCrossCorrelation.GetAverage(temp)); std.Add(AutoAndCrossCorrelation.GetStdev(temp)); max.Add(temp.GetMaxValue()); } meanFeatureVectors.Add(mean.ToArray()); maxFeatureVectors.Add(max.ToArray()); stdFeatureVectors.Add(std.ToArray()); c += numFrames; } allMeanFeatureVectors.Add(meanFeatureVectors.ToArray().ToMatrix()); allMaxFeatureVectors.Add(maxFeatureVectors.ToArray().ToMatrix()); allStdFeatureVectors.Add(stdFeatureVectors.ToArray().ToMatrix()); } // +++++++++++++++++++++++++++++++++++Temporal Summarization // ++++++++++++++++++++++++++++++++++Writing features to file // First, concatenate mean, max, std for each second. // Then write to CSV file. for (int j = 0; j < allMeanFeatureVectors.Count; j++) { // write the features of each pre-defined frequency band into a separate CSV file var outputFeatureFile = Path.Combine(resultDir, "FeatureVectors" + j.ToString() + ".csv"); // creating the header for CSV file List <string> header = new List <string>(); for (int i = 0; i < allMeanFeatureVectors.ToArray()[j].GetLength(1); i++) { header.Add("mean" + i.ToString()); } for (int i = 0; i < allMaxFeatureVectors.ToArray()[j].GetLength(1); i++) { header.Add("max" + i.ToString()); } for (int i = 0; i < allStdFeatureVectors.ToArray()[j].GetLength(1); i++) { header.Add("std" + i.ToString()); } // concatenating mean, std, and max vector together for each 1 second List <double[]> featureVectors = new List <double[]>(); for (int i = 0; i < allMeanFeatureVectors.ToArray()[j].ToJagged().GetLength(0); i++) { List <double[]> featureList = new List <double[]> { allMeanFeatureVectors.ToArray()[j].ToJagged()[i], allMaxFeatureVectors.ToArray()[j].ToJagged()[i], allStdFeatureVectors.ToArray()[j].ToJagged()[i], }; double[] featureVector = DataTools.ConcatenateVectors(featureList); featureVectors.Add(featureVector); } // writing feature vectors to CSV file using (StreamWriter file = new StreamWriter(outputFeatureFile)) { // writing the header to CSV file foreach (var entry in header.ToArray()) { file.Write(entry + ","); } file.Write(Environment.NewLine); foreach (var entry in featureVectors.ToArray()) { foreach (var value in entry) { file.Write(value + ","); } file.Write(Environment.NewLine); } } } /* * // Reconstructing the target spectrogram based on clusters' centroids * List<double[,]> convertedSpec = new List<double[,]>(); * int columnPerFreqBand = sonogram2.Data.GetLength(1) / numFreqBand; * for (int i = 0; i < allSequentialPatchMatrix.Count; i++) * { * double[,] reconstructedSpec2 = KmeansClustering.ReconstructSpectrogram(allSequentialPatchMatrix.ToArray()[i], allClusteringOutput.ToArray()[i]); * convertedSpec.Add(PatchSampling.ConvertPatches(reconstructedSpec2, patchWidth, patchHeight, columnPerFreqBand)); * } * * sonogram2.Data = PatchSampling.ConcatFreqBandMatrices(convertedSpec); * * // DO DRAW SPECTROGRAM * var reconstructedSpecImage = sonogram2.GetImageFullyAnnotated(sonogram2.GetImage(), "RECONSTRUCTEDSPECTROGRAM: " + freqScale.ScaleType.ToString(), freqScale.GridLineLocations); * reconstructedSpecImage.Save(outputReSpecImagePath); */ }
public void TestKmeansClustering() { var outputDir = this.outputDirectory; var recordingsPath = PathHelper.ResolveAssetPath("FeatureLearning"); var folderPath = Path.Combine(recordingsPath, "random_audio_segments"); var outputImagePath = Path.Combine(outputDir.FullName, "ReconstrcutedSpectrogram.png"); // check whether there is any file in the folder/subfolders if (Directory.GetFiles(folderPath, "*", SearchOption.AllDirectories).Length == 0) { throw new ArgumentException("The folder of recordings is empty. Test will fail!"); } // get the nyquist value from the first wav file in the folder of recordings int nq = new AudioRecording(Directory.GetFiles(folderPath, "*.wav")[0]).Nyquist; int nyquist = nq; int frameSize = 1024; int finalBinCount = 128; int hertzInterval = 1000; FreqScaleType scaleType = FreqScaleType.Mel; var freqScale = new FrequencyScale(scaleType, nyquist, frameSize, finalBinCount, hertzInterval); var sonoConfig = new SonogramConfig { WindowSize = frameSize, //WindowOverlap is set based on the fact that each 24 frames is equal to 1 second WindowOverlap = 0.1028, DoMelScale = (scaleType == FreqScaleType.Mel) ? true : false, MelBinCount = (scaleType == FreqScaleType.Mel) ? finalBinCount : frameSize / 2, NoiseReductionType = NoiseReductionType.None, }; int numberOfFreqBand = 4; int patchWidth = finalBinCount / numberOfFreqBand; int patchHeight = 1; int numberOfRandomPatches = 20; // Define variable number of "randomPatch" lists based on "numberOfFreqBand" Dictionary <string, List <double[, ]> > randomPatchLists = new Dictionary <string, List <double[, ]> >(); for (int i = 0; i < numberOfFreqBand; i++) { randomPatchLists.Add(string.Format("randomPatch{0}", i.ToString()), new List <double[, ]>()); } List <double[, ]> randomPatches = new List <double[, ]>(); foreach (string filePath in Directory.GetFiles(folderPath, "*.wav")) { FileInfo fileInfo = filePath.ToFileInfo(); // process the wav file if it is not empty if (fileInfo.Length != 0) { var recording = new AudioRecording(filePath); sonoConfig.SourceFName = recording.BaseName; var sonogram = new SpectrogramStandard(sonoConfig, recording.WavReader); // DO RMS NORMALIZATION sonogram.Data = SNR.RmsNormalization(sonogram.Data); // DO NOISE REDUCTION sonogram.Data = PcaWhitening.NoiseReduction(sonogram.Data); // creating matrices from different freq bands of the source spectrogram List <double[, ]> allSubmatrices = PatchSampling.GetFreqBandMatrices(sonogram.Data, numberOfFreqBand); // Second: selecting random patches from each freq band matrix and add them to the corresponding patch list int count = 0; while (count < allSubmatrices.Count) { randomPatchLists[string.Format("randomPatch{0}", count.ToString())].Add(PatchSampling.GetPatches(allSubmatrices.ToArray()[count], patchWidth, patchHeight, numberOfRandomPatches, PatchSampling.SamplingMethod.Random).ToMatrix()); count++; } } } foreach (string key in randomPatchLists.Keys) { randomPatches.Add(PatchSampling.ListOf2DArrayToOne2DArray(randomPatchLists[key])); } // convert list of random patches matrices to one matrix int numberOfClusters = 32; List <KMeansClusterCollection> allClusteringOutput = new List <KMeansClusterCollection>(); for (int i = 0; i < randomPatches.Count; i++) { double[,] patchMatrix = randomPatches[i]; // Do k-means clustering string pathToClusterCsvFile = Path.Combine(outputDir.FullName, "ClusterCentroids" + i.ToString() + ".csv"); var clusteringOutput = KmeansClustering.Clustering(patchMatrix, numberOfClusters); // sorting clusters based on size and output it to a csv file Dictionary <int, double> clusterIdSize = clusteringOutput.ClusterIdSize; int[] sortOrder = KmeansClustering.SortClustersBasedOnSize(clusterIdSize); // Write cluster ID and size to a CSV file string pathToClusterSizeCsvFile = Path.Combine(outputDir.FullName, "ClusterSize" + i.ToString() + ".csv"); Csv.WriteToCsv(pathToClusterSizeCsvFile.ToFileInfo(), clusterIdSize); // Draw cluster image directly from clustering output List <KeyValuePair <int, double[]> > listCluster = clusteringOutput.ClusterIdCentroid.ToList(); double[][] centroids = new double[listCluster.Count][]; for (int j = 0; j < listCluster.Count; j++) { centroids[j] = listCluster[j].Value; } allClusteringOutput.Add(clusteringOutput.Clusters); List <double[, ]> allCentroids = new List <double[, ]>(); for (int k = 0; k < centroids.Length; k++) { // convert each centroid to a matrix in order of cluster ID // OR: in order of cluster size double[,] centroid = MatrixTools.ArrayToMatrixByColumn(centroids[sortOrder[k]], patchWidth, patchHeight); // normalize each centroid double[,] normalizedCentroid = DataTools.normalise(centroid); // add a row of zero to each centroid double[,] newCentroid = PatchSampling.AddRow(normalizedCentroid); allCentroids.Add(newCentroid); } // concatenate all centroids double[,] mergedCentroidMatrix = PatchSampling.ListOf2DArrayToOne2DArray(allCentroids); // Draw clusters var clusterImage = ImageTools.DrawMatrixWithoutNormalisation(mergedCentroidMatrix); clusterImage.RotateFlip(RotateFlipType.Rotate270FlipNone); var outputClusteringImage = Path.Combine(outputDir.FullName, "ClustersWithGrid" + i.ToString() + ".bmp"); FrequencyScale.DrawFrequencyLinesOnImage((Bitmap)clusterImage, freqScale, includeLabels: false); clusterImage.Save(outputClusteringImage); } //+++++++++++++++++++++++++++++++++++++++++++Reconstructing a target spectrogram from sequential patches and the cluster centroids var recording2Path = PathHelper.ResolveAsset("Recordings", "BAC2_20071008-085040.wav"); var recording2 = new AudioRecording(recording2Path); var sonogram2 = new SpectrogramStandard(sonoConfig, recording2.WavReader); var targetSpec = sonogram2.Data; // Do RMS normalization sonogram2.Data = SNR.RmsNormalization(sonogram2.Data); // NOISE REDUCTION sonogram2.Data = PcaWhitening.NoiseReduction(sonogram2.Data); // extracting sequential patches from the target spectrogram List <double[, ]> allSubmatrices2 = PatchSampling.GetFreqBandMatrices(sonogram2.Data, numberOfFreqBand); double[][,] matrices2 = allSubmatrices2.ToArray(); List <double[, ]> allSequentialPatchMatrix = new List <double[, ]>(); for (int i = 0; i < matrices2.GetLength(0); i++) { int rows = matrices2[i].GetLength(0); int columns = matrices2[i].GetLength(1); var sequentialPatches = PatchSampling.GetPatches(matrices2[i], patchWidth, patchHeight, (rows / patchHeight) * (columns / patchWidth), PatchSampling.SamplingMethod.Sequential); allSequentialPatchMatrix.Add(sequentialPatches.ToMatrix()); } List <double[, ]> convertedSpectrogram = new List <double[, ]>(); int columnPerFreqBand = sonogram2.Data.GetLength(1) / numberOfFreqBand; for (int i = 0; i < allSequentialPatchMatrix.Count; i++) { double[,] reconstructedSpec2 = KmeansClustering.ReconstructSpectrogram(allSequentialPatchMatrix.ToArray()[i], allClusteringOutput.ToArray()[i]); convertedSpectrogram.Add(PatchSampling.ConvertPatches(reconstructedSpec2, patchWidth, patchHeight, columnPerFreqBand)); } sonogram2.Data = PatchSampling.ConcatFreqBandMatrices(convertedSpectrogram); // DO DRAW SPECTROGRAM var reconstructedSpecImage = sonogram2.GetImageFullyAnnotated(sonogram2.GetImage(), "RECONSTRUCTEDSPECTROGRAM: " + freqScale.ScaleType.ToString(), freqScale.GridLineLocations); reconstructedSpecImage.Save(outputImagePath, ImageFormat.Png); // DO UNIT TESTING Assert.AreEqual(targetSpec.GetLength(0), sonogram2.Data.GetLength(0)); Assert.AreEqual(targetSpec.GetLength(1), sonogram2.Data.GetLength(1)); }