public void GaussNormalizationTests_test1() { // Creates learning api object LearningApi api = new LearningApi(loadMetaData1()); //Real dataset must be defined as object type, because data can be numeric, binary and classification api.UseActionModule <object[][], object[][]>((input, ctx) => { return(GetRealDataSample()); }); //this call must be first in the pipeline api.UseDefaultDataMapper(); // api.UseGaussNormalizer(); // var result = api.Run() as double [][]; //Test result for normalization var expected = GetGaussNormalizedDataSample(); for (int i = 0; i < expected.Length; i++) { for (int j = 0; j < expected[0].Length; j++) { Assert.Equal(Math.Round(result[i][j], 4), expected[i][j]); } } // return; }
public void ContinuousTrainData() { double[][] initialCentroids = new double[4][]; initialCentroids[0] = new double[] { 0.2, -4.0 }; initialCentroids[1] = new double[] { 0.2, -6.0 }; initialCentroids[2] = new double[] { 0.4, -4.0 }; initialCentroids[3] = new double[] { 0.4, -6.0 }; string[] attributes = new string[] { "x", "y" }; int numAttributes = attributes.Length; // 2 in this demo (x,y) int numClusters = 4; int maxCount = 300; SaveLoadSettings sett; var resp = SaveLoadSettings.JSON_Settings(@"C:\Data\Function1.json", out sett, true); AnomalyDetectionAPI kmeanApi = new AnomalyDetectionAPI(null, numClusters); LearningApi api = new LearningApi(loadMetaData1()); api.UseActionModule <object[][], object[][]>((input, ctx) => { var rawDatalist = getRealDataSample(@"C:\Data\Function1.csv").ToList(); double[][] oldSamples; var nn = kmeanApi.GetPreviousSamples(sett, out oldSamples); if (oldSamples != null) { foreach (var old in oldSamples) { var row = old.Cast <object>().ToArray(); rawDatalist.Add(row); } } return(rawDatalist.ToArray()); }); //this call must be first in the pipeline api.UseDefaultDataMapper(); api.UseGaussNormalizer(); var rawData = api.Run() as double[][]; Helpers.WriteToCSVFile(rawData); ClusteringSettings Settings = new ClusteringSettings(rawData, maxCount, numClusters, numAttributes, sett, KmeansAlgorithm: 1, InitialGuess: true, Replace: true); AnomalyDetectionResponse response = kmeanApi.ImportNewDataForClustering(Settings); }
public void Training() { int cnt = 0; double[][] initialCentroids = new double[4][]; initialCentroids[0] = new double[] { 0.4, 25.0 }; initialCentroids[1] = new double[] { 0.4, 15.0 }; initialCentroids[2] = new double[] { 0.6, 15.0 }; initialCentroids[3] = new double[] { 0.6, 25.0 }; string[] attributes = new string[] { "x", "y" }; int numAttributes = attributes.Length; int numClusters = 4; int maxCount = 300; ClusteringSettings clusterSettings = new ClusteringSettings(maxCount, numClusters, numAttributes, KmeansAlgorithm: 1, Replace: true); AnomalyDetectionAPI kmeanApi = new AnomalyDetectionAPI(clusterSettings); //AnomalyDetectionAPI(clusterSettings), Constractor should not be null when run Training() AnomalyDetectionResponse response; // Creates learning api object LearningApi api = new LearningApi(loadMetaData1()); api.UseActionModule <object[][], object[][]>((input, ctx) => { var rawDataArray = getData(cnt); return(rawDataArray); }); api.UseDefaultDataMapper(); api.UseGaussNormalizer(); // for (int i = 0; i < 15; i++) { cnt = i; var rawData = api.Run() as double[][]; response = kmeanApi.Training(rawData, initialCentroids); Helpers.WriteToCSVFile(kmeanApi.GetCentroid(), $"Data\\Centroid{i}.csv"); //response = kmeanApi.Save($"Function{i}.json"); } }
public void ContinuousTrainData2() { int cnt = 0; double[][] initialCentroids = new double[4][]; initialCentroids[0] = new double[] { 40.0, 10.0 }; initialCentroids[1] = new double[] { 20.0, 10.0 }; initialCentroids[2] = new double[] { 40.0, 20.0 }; initialCentroids[3] = new double[] { 20.0, 20.0 }; string[] attributes = new string[] { "x", "y" }; int numAttributes = attributes.Length; // 2 in this demo (x,y) int numClusters = 4; int maxCount = 300; SaveLoadSettings sett; var resp = SaveLoadSettings.JSON_Settings(@"C:\Data\Function1.json", out sett, true); AnomalyDetectionAPI kmeanApi = new AnomalyDetectionAPI(null, numClusters, initialCentroids); LearningApi api = new LearningApi(loadMetaData1()); api.UseActionModule <object[][], object[][]>((input, ctx) => { var rawDatalist = getData(cnt); return(rawDatalist); }); //this call must be first in the pipeline api.UseDefaultDataMapper(); api.UseGaussNormalizer(); for (int i = 0; i < 15; i++) { cnt = i; var rawData = api.Run() as double[][]; ClusteringSettings Settings = new ClusteringSettings(rawData, maxCount, numClusters, numAttributes, sett, KmeansAlgorithm: 1, InitialGuess: true, Replace: true); AnomalyDetectionResponse response = kmeanApi.ImportNewDataForClustering(Settings); } }
public void TestWithNormalize_GaussAndCentroid() { double[][] initialCentroids = new double[4][]; initialCentroids[0] = new double[] { 0.2, -4.0 }; initialCentroids[1] = new double[] { 0.2, -6.0 }; initialCentroids[2] = new double[] { 0.4, -4.0 }; initialCentroids[3] = new double[] { 0.4, -6.0 }; string[] attributes = new string[] { "x", "y" }; // Creates learning api object LearningApi api = new LearningApi(loadMetaData1()); //Real dataset must be defined as object type, because data can be numeric, binary and classification api.UseActionModule <object[][], object[][]>((input, ctx) => { return(getRealDataSample(@"C:\Data\Function15.csv")); }); //this call must be first in the pipeline api.UseDefaultDataMapper(); api.UseGaussNormalizer(); var rawData = api.Run() as double[][]; int numAttributes = attributes.Length; // 2 in this demo (height,weight) int numClusters = 4; // vary this to experiment (must be between 2 and number data tuples) int maxCount = 300; // trial and error SaveLoadSettings sett; var resp = SaveLoadSettings.JSON_Settings(@"C:\Data\Function15.json", out sett, true); AnomalyDetectionAPI kmeanApi = new AnomalyDetectionAPI(rawData, numClusters); ClusteringSettings Settings = new ClusteringSettings(rawData, maxCount, numClusters, numAttributes, sett, KmeansAlgorithm: 1, InitialGuess: true, Replace: true); AnomalyDetectionResponse response = kmeanApi.ImportNewDataForClustering(Settings); }
public void TestWithNormalize_Gauss() { // Creates learning api object LearningApi api = new LearningApi(loadMetaData1()); //Real dataset must be defined as object type, because data can be numeric, binary and classification api.UseActionModule <object[][], object[][]>((input, ctx) => { return(getRealDataSample(@"C:\Data\First.csv")); }); //this call must be first in the pipeline api.UseDefaultDataMapper(); // api.UseGaussNormalizer(); // var result = api.Run() as double[][]; Helpers.WriteToCSVFile(result); }