public void Test_FunctionRecognitionModuleSave(int MinNoiseForPrediction, int MaxNoiseForPrediction) { #region Train and Save var batch = 100; var funcData = FunctionGenerator.CreateFunction(500, 2, 2 * Math.PI / 100); LearningApi api = new LearningApi(); api.UseActionModule <object, double[][]>((notUsed, ctx) => { var similarFuncData = FunctionGenerator.CreateSimilarFromReferenceFunc(funcData.ToArray(), 7, 10); double[][] formattedData = formatData(similarFuncData); return(formattedData); }); double[][] initCentroids = new double[4][]; initCentroids[0] = new double[] { 1.53, 0.63 }; initCentroids[1] = new double[] { 4.68, -0.63 }; initCentroids[2] = new double[] { 7.85, 0.62 }; initCentroids[3] = new double[] { 10.99, -0.64 }; ClusteringSettings settings = new ClusteringSettings(0, numClusters: 4, numDims: 2, KmeansAlgorithm: 2, initialCentroids: initCentroids, tolerance: 0) { KmeansMaxIterations = 1000 }; api.UseKMeansFunctionRecognitionModule(settings); KMeansFunctionRecognitonScore res; while (batch-- > 0) { res = api.RunBatch() as KMeansFunctionRecognitonScore; } api.Save("sinusmodel"); #endregion #region Load And Predict var api2 = LearningApi.Load("sinusmodel"); var noisedFunc = FunctionGenerator.CreateSimilarFromReferenceFunc(funcData.ToArray(), MinNoiseForPrediction, MaxNoiseForPrediction); double[][] data = formatData(noisedFunc); var predictionResult = api2.Algorithm.Predict(data, null) as KMeansFunctionRecognitionResult; // TRUE positives if (MaxNoiseForPrediction <= 10) { Assert.True(predictionResult.Loss == 1.0); } // TRUE negatives else if (MaxNoiseForPrediction >= 25) { Assert.False(predictionResult.Loss == 1.0); } #endregion }
public void RBMBinaryDataCreation() { //size of image int size = 40; var context = getImgRecognitionDescriptor(size * size); LearningApi api = new LearningApi(context); api.UseActionModule <object, double[][]>((notUsed, ctx) => { double[][] trainData = new double[22][]; for (int j = 0; j < 1; j++) { //Path of training images. //return getImageData(size, $"{Directory.GetCurrentDirectory()}\\RestrictedBolzmannMachine2\\TrainingImages"); trainData = getImageData(size, imagePath + j); } return(trainData); }); IScore score = api.Run() as IScore; //DigitDatasetCSVFile.Close(); SmileyCSVFile.Close(); }
public void MinMaxNormalization_test1() { // Creates learning api object LearningApi api = new LearningApi(loadMetaData1()); //Real dataset must be defined as object type, because data can be numeric, binary and classification api.UseActionModule <object[][], object[][]>((input, ctx) => { return(GetRealDataSample()); }); //this call must be first in the pipeline api.UseDefaultDataMapper(); // api.UseMinMaxNormalizer(); // var result = api.Run() as double [][]; //Test result for normalization var expected = GetNormalizedDataSample(); for (int i = 0; i < expected.Length; i++) { for (int j = 0; j < expected[0].Length; j++) { Assert.Equal(Math.Round(result[i][j], 5), expected[i][j]); } } // return; }
public void TestWithNormalize_MinMax() { // Creates learning api object LearningApi api = new LearningApi(loadMetaData1()); //Real dataset must be defined as object type, because data can be numeric, binary and classification api.UseActionModule <object[][], object[][]>((input, ctx) => { return(getRealDataSample(@"C:\Data\First.csv")); }); //this call must be first in the pipeline api.UseDefaultDataMapper(); // api.UseMinMaxNormalizer(); ////use denormalizer on normalized data //api.UseMinMaxDeNormalizer(); // var result = api.Run() as double[][]; Helpers.WriteToCSVFile(result); }
public void SobelFilterTestForGrayscaleConversionImage() { LearningApi api = new LearningApi(); api.UseActionModule <double[, , ], double[, , ]>((input, ctx) => { string baseDirectory = AppDomain.CurrentDomain.BaseDirectory; string path = Path.Combine(baseDirectory, "TestInputimages\\TIM3.jpg"); //path to bin directory of project Bitmap bitmap = new Bitmap(path); double[,,] data = helper.ConvertBitmapToDouble(bitmap); // convert bitmap to double return(data); }); api.AddModule(new SobelConvolutionFilter()); double[,,] result = api.Run() as double[, , ]; Bitmap bitresult = helper.ConvertDoubleToBitmap(result);// convert double to bitmap string baseDirectory2 = AppDomain.CurrentDomain.BaseDirectory; string outpath = baseDirectory2 + "\\TestOutputImages\\"; if (!Directory.Exists(outpath)) { Directory.CreateDirectory(outpath); } bitresult.Save(outpath + "Output3.jpg"); }
public void OR_Test() { LearningApi api = new LearningApi(); api.UseActionModule <object, double[][]>((notUsed, ctx) => { ctx.DataDescriptor = get2DDescriptor(); double[][] data = new double[4][]; data[0] = new double[] { 0, 0, 0, 0.0 }; data[1] = new double[] { 0, 1, 1, 0.0 }; data[2] = new double[] { 1, 0, 1, 0.0 }; data[3] = new double[] { 1, 1, 1, 0.0 }; return(data); }); api.UseDeltaLearning(0.2, 1000); IScore score = api.Run() as IScore; double[][] testData = new double[3][]; testData[0] = new double[] { 0, 0, 0.0 }; testData[1] = new double[] { 1, 1, 0.0 }; testData[2] = new double[] { 0, 1, 0.0 }; var result = api.Algorithm.Predict(testData, api.Context) as DeltaLearningResult; Assert.True(result.PredictedResults[0] == 0); Assert.True(result.PredictedResults[1] == 1); Assert.True(result.PredictedResults[2] == 1); }
public void Feature_and_LabelIndex_Mapping_Test2() { var desc = loadMetaData_with_CategoricFeature(); // Creates learning api object LearningApi api = new LearningApi(desc); //Real dataset must be defined as object type, because data can be numeric, binary and classification api.UseActionModule <object[][], object[][]>((input, ctx) => { return(loadRealDataSample()); }); //this call must be first in the pipeline api.UseDefaultDataMapper(); // var result = api.Run() as double[][]; var featureNumber = result[0].Count() - 1;//- labelcolumn //string column is not counted because it is ignored column //there is one category column in features so the number of features is increased with (calssCount-1) // featureCount = columnCount- strinCoulumnCount-LabelColumn + calssCount-1 var featureCount = 4 - 1 - 1 + (3 - 1); Assert.Equal(featureNumber, featureCount); }
public void Test_LoadSave() { string moduleName = "test-action"; double[][] clusterCentars = new double[3][]; clusterCentars[0] = new double[] { 5.0, 5.0 }; clusterCentars[1] = new double[] { 15.0, 15.0 }; clusterCentars[2] = new double[] { 30.0, 30.0 }; string[] attributes = new string[] { "Height", "Weight" }; int numAttributes = attributes.Length; // 2 in this demo (height,weight) int numClusters = 3; // vary this to experiment (must be between 2 and number data tuples) int maxCount = 300; // trial and error ClusteringSettings settings = new ClusteringSettings(maxCount, numClusters, numAttributes, KmeansAlgorithm: 1); // Creates learning api object LearningApi api = new LearningApi(loadDescriptor()); // // Defines action method, which will generate training data. api.UseActionModule <object, double[][]>((data, ctx) => { var rawData = Helpers.CreateSampleData(clusterCentars, 2, 10000, 0.5); return(rawData); }, moduleName); api.UseKMeans(settings); var resp = api.Run() as KMeansScore; Assert.True(resp.Model.Clusters != null); Assert.True(resp.Model.Clusters.Length == clusterCentars.Length); var result = api.Algorithm.Predict(clusterCentars, api.Context) as KMeansResult; Assert.True(result.PredictedClusters[0] == 0); Assert.True(result.PredictedClusters[1] == 1); Assert.True(result.PredictedClusters[2] == 2); // This is where trained model is saved. api.Save(nameof(TestLoadSave)); // Loads the saved model. var loadedApi = LearningApi.Load(nameof(TestLoadSave)); // // Because we have used action method in the LearningApi, we will have to setup it again. // This is not required because API design limitation. It is restriction of .NET framework. It cannot persist code. loadedApi.ReplaceActionModule <object, double[][]>(moduleName, (data, ctx) => { var rawData = Helpers.CreateSampleData(clusterCentars, 2, 10000, 0.5); return(rawData); }); loadedApi.Run(); }
public void calculateCorrelation_test1() { // LearningApi api = new LearningApi(null); // Initialize data provider api.UseCsvDataProvider(@"CorrelationMatrix/corellation_data.csv", ',', true, 0); //Custom action of dataset api.UseActionModule <object[][], double[][]>((input, ctx) => { return(toColumnVector(input)); }); // api.UseMinMaxNormalizer(); var data = api.Run() as double[][]; var prov = api.GetModule <CsvDataProvider>("CsvDataProvider"); var strData = new List <string>(); var hed = prov.Header.ToList(); hed.Insert(0, ""); strData.Add(string.Join(",", hed.ToArray())); for (int i = 0; i < data.Length; i++) { var lst = new List <string>(); lst.Add(prov.Header[i]); for (int k = 0; k < i; k++) { lst.Add(" "); } for (int j = i; j < data.Length; j++) { var corValue = data[i].CorrCoeffOf(data[j]); if (double.IsNaN(corValue)) { continue; } lst.Add(corValue.ToString("n5", CultureInfo.InvariantCulture)); } strData.Add(string.Join(",", lst)); } Assert.True("Col1,1.00000,0.16892,0.99111,0.75077,-0.82354,-0.85164" == strData[1]); System.IO.File.WriteAllLines(@"CorrelationMatrix/strCorrlation.txt", strData); // return; }
public void SimpleSequenceWithGapsTest() { LearningApi api = new LearningApi(); api.UseActionModule <object, double[][]>((notUsed, ctx) => { const int maxSamples = 1000000; ctx.DataDescriptor = getDescriptor(); double[][] data = new double[maxSamples / 3][]; // // We generate following input vectors: // IN Val - Expected OUT Val // Every 3th number is given for (int i = 0; i < maxSamples / 3; i++) { data[i] = new double[2]; data[i][0] = i * 3; if ((i * 3) > (maxSamples / 2)) { data[i][1] = 1; } else { data[i][1] = 0; } } return(data); }); api.UsePerceptron(0.05, 1000); IScore score = api.Run() as IScore; double[][] testData = new double[6][]; testData[0] = new double[] { 2.0, 0.0 }; testData[1] = new double[] { 1, 0.0 }; testData[2] = new double[] { 3, 0.0 }; testData[3] = new double[] { 3002, 0.0 }; testData[4] = new double[] { 6002.0, 0.0 }; testData[5] = new double[] { 9005, 0.0 }; var result = api.Algorithm.Predict(testData, api.Context) as PerceptronResult; // TODO... THUS TEST iS NOT COMPLETED //Assert.True(result.PredictedValues[0] == 0); //Assert.True(result.PredictedValues[1] == 0); //Assert.True(result.PredictedValues[2] == 0); //Assert.True(result.PredictedValues[3] == 1); //Assert.True(result.PredictedValues[4] == 1); //Assert.True(result.PredictedValues[5] == 1); }
public void LogisticRegression_Test_iterations_20_learningrate_015() { var desc = loadMetaData(); LearningApi api = new LearningApi(desc); //Real dataset must be defined as object type, because data can be numeric, binary and classification api.UseActionModule <object[][], object[][]>((input, ctx) => { return(loadRealDataSample()); }); // Use mapper for data, which will extract (map) required columns api.UseDefaultDataMapper(); api.UseMinMaxNormalizer(); //run logistic regression for 10 iteration with learningRate=0.15 api.UseLogisticRegression(0.15, 20); api.Run(); IScore score = api.GetScore(); //Errors during each iteration Assert.Equal(Math.Round(score.Errors[0], 5), 0.24236); Assert.Equal(Math.Round(score.Errors[1], 5), 0.23707); Assert.Equal(Math.Round(score.Errors[2], 5), 0.23358); Assert.Equal(Math.Round(score.Errors[3], 5), 0.23001); Assert.Equal(Math.Round(score.Errors[4], 5), 0.22806); Assert.Equal(Math.Round(score.Errors[5], 5), 0.22506); Assert.Equal(Math.Round(score.Errors[6], 5), 0.22409); Assert.Equal(Math.Round(score.Errors[7], 5), 0.22134); Assert.Equal(Math.Round(score.Errors[8], 5), 0.22105); Assert.Equal(Math.Round(score.Errors[9], 5), 0.21840); Assert.Equal(Math.Round(score.Errors[10], 5), 0.21857); Assert.Equal(Math.Round(score.Errors[11], 5), 0.21595); Assert.Equal(Math.Round(score.Errors[12], 5), 0.21640); Assert.Equal(Math.Round(score.Errors[13], 5), 0.21381); Assert.Equal(Math.Round(score.Errors[14], 5), 0.21439); Assert.Equal(Math.Round(score.Errors[15], 5), 0.21189); Assert.Equal(Math.Round(score.Errors[16], 5), 0.21251); Assert.Equal(Math.Round(score.Errors[17], 5), 0.21015); Assert.Equal(Math.Round(score.Errors[18], 5), 0.21076); Assert.Equal(Math.Round(score.Errors[19], 5), 0.20860); //LG Model Best Found model in 20 iteration Assert.Equal(Math.Round(score.Weights[0], 5), 0.28363); Assert.Equal(Math.Round(score.Weights[1], 5), 0.37424); Assert.Equal(Math.Round(score.Weights[2], 5), 1.41890); Assert.Equal(Math.Round(score.Weights[3], 5), 1.01207); Assert.Equal(Math.Round(score.Weights[4], 5), -0.33841); Assert.Equal(Math.Round(score.Weights[5], 5), -0.33841); Assert.Equal(Math.Round(score.Weights[6], 5), -1.62489); }
public void ContinuousTrainData() { double[][] initialCentroids = new double[4][]; initialCentroids[0] = new double[] { 0.2, -4.0 }; initialCentroids[1] = new double[] { 0.2, -6.0 }; initialCentroids[2] = new double[] { 0.4, -4.0 }; initialCentroids[3] = new double[] { 0.4, -6.0 }; string[] attributes = new string[] { "x", "y" }; int numAttributes = attributes.Length; // 2 in this demo (x,y) int numClusters = 4; int maxCount = 300; SaveLoadSettings sett; var resp = SaveLoadSettings.JSON_Settings(@"C:\Data\Function1.json", out sett, true); AnomalyDetectionAPI kmeanApi = new AnomalyDetectionAPI(null, numClusters); LearningApi api = new LearningApi(loadMetaData1()); api.UseActionModule <object[][], object[][]>((input, ctx) => { var rawDatalist = getRealDataSample(@"C:\Data\Function1.csv").ToList(); double[][] oldSamples; var nn = kmeanApi.GetPreviousSamples(sett, out oldSamples); if (oldSamples != null) { foreach (var old in oldSamples) { var row = old.Cast <object>().ToArray(); rawDatalist.Add(row); } } return(rawDatalist.ToArray()); }); //this call must be first in the pipeline api.UseDefaultDataMapper(); api.UseGaussNormalizer(); var rawData = api.Run() as double[][]; Helpers.WriteToCSVFile(rawData); ClusteringSettings Settings = new ClusteringSettings(rawData, maxCount, numClusters, numAttributes, sett, KmeansAlgorithm: 1, InitialGuess: true, Replace: true); AnomalyDetectionResponse response = kmeanApi.ImportNewDataForClustering(Settings); }
public void LogisticRegression_Test_Real_Example() { string m_binary_data_path = @"SampleData\binary\admit_binary.csv"; var binary_path = System.IO.Path.Combine(Directory.GetCurrentDirectory(), m_binary_data_path); LearningApi api = new LearningApi(loadMetaData1()); api.UseCsvDataProvider(binary_path, ',', false, 1); // Use mapper for data, which will extract (map) required columns api.UseDefaultDataMapper(); api.UseMinMaxNormalizer(); //run logistic regression for 10 iteration with learningRate=0.15 api.UseLogisticRegression(0.00012, 200); var score = api.Run(); ///**************PREDICTION AFTER MODEL IS CREATED*********//// /////define data for testing (prediction) LearningApi apiPrediction = new LearningApi(loadMetaData1()); //Real dataset must be defined as object type, because data can be numeric, binary and classification apiPrediction.UseActionModule <object[][], object[][]>((input, ctx) => { var data = new object[5][] { new object[] { 660, 3.88, 2, 1 }, new object[] { 580, 3.36, 2, 0 }, new object[] { 640, 3.17, 2, 0 }, new object[] { 640, 3.51, 2, 0 }, new object[] { 800, 3.05, 2, 1 }, }; return(data); }); // Use mapper for data, which will extract (map) required columns apiPrediction.UseDefaultDataMapper(); apiPrediction.UseMinMaxNormalizer(); var testData = apiPrediction.Run(); //use previous trained model var result = api.Algorithm.Predict(testData as double[][], api.Context) as LogisticRegressionResult; // Assert.Equal(Math.Round(result.PredictedValues[0], 0), 0); Assert.Equal(Math.Round(result.PredictedValues[1], 0), 0); Assert.Equal(Math.Round(result.PredictedValues[2], 0), 0); Assert.Equal(Math.Round(result.PredictedValues[3], 0), 0); Assert.Equal(Math.Round(result.PredictedValues[3], 0), 0); }
public void ActionModuleChainTest() { LearningApi api = new LearningApi(null); api.UseActionModule <double[], double[]>((input, ctx) => { // This module provides some data. return(new double[] { 1.1, 2.2, 3.3, 4.4 }); }); api.UseActionModule <double[], double[]>((input, ctx) => { // This module manipulate the data. return(new double[] { input[0] + 1, input[1] + 1, input[2] + 1, input[3] + 1 }); }); var result = api.Run(); Assert.Equal(2.1, ((double[])result)[0]); Assert.Equal(5.4, ((double[])result)[3]); }
public void FullDataSetRBMTest() { const int bits = 10; LearningApi api = new LearningApi(); api.UseActionModule <object, double[][]>((notUsed, ctx) => { var maxSamples = (int)Math.Pow(2, bits); double[][] data = new double[maxSamples][]; for (int i = 0; i < maxSamples; i++) { data[i] = new double[bits]; var val = 1; for (int j = 0; j < bits; j++) { if ((val & i) >= 1) { data[i][j] = 1; } val = val << 1; } } ctx.DataDescriptor = getDescriptorForRbm_sample1(); return(data); }); api.UseRbm(0.01, 1000, bits, 7); RbmScore score = api.Run() as RbmScore; double[][] testData = new double[4][]; testData[0] = new double[] { 1, 1, 0, 0, 0, 0, 0, 0, 0, 0 }; testData[1] = new double[] { 0, 0, 0, 0, 1, 1, 0, 0, 0, 0 }; testData[2] = new double[] { 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 }; testData[3] = new double[] { 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 }; var result = api.Algorithm.Predict(testData, api.Context); // NOT FINISHED. //Assert.True(result[0] == 1); //Assert.True(result[1] == 0); //Assert.True(result[2] == 0); //Assert.True(result[3] == 0); //Assert.True(result[4] == 1); //Assert.True(result[5] == 0); }
public void SimpleRBMDeepTest() { LearningApi api = new LearningApi(); api.UseActionModule <object, double[][]>((notUsed, ctx) => { const int maxSamples = 12; ctx.DataDescriptor = getDescriptorForRbm_sample1(); double[][] data = new double[maxSamples][]; data[0] = new double[] { 1, 1, 0, 0, 0, 0 }; // A data[1] = new double[] { 0, 0, 1, 1, 0, 0 }; // B data[2] = new double[] { 0, 0, 0, 0, 1, 1 }; // C data[3] = new double[] { 1, 1, 0, 0, 0, 1 }; // noisy A data[4] = new double[] { 0, 0, 1, 1, 0, 0 }; // BRt data[5] = new double[] { 0, 0, 0, 0, 1, 1 }; // C data[6] = new double[] { 1, 0, 0, 0, 0, 0 }; // weak A data[7] = new double[] { 0, 0, 1, 0, 0, 0 }; // weak B data[8] = new double[] { 0, 0, 0, 0, 1, 0 }; // weak C data[9] = new double[] { 1, 1, 0, 1, 0, 0 }; // noisy A data[10] = new double[] { 1, 0, 1, 1, 0, 0 }; // noisy B data[11] = new double[] { 0, 0, 1, 0, 1, 1 }; // noisy C return(data); }); api.UseRbm(0.01, 1000, 6, 4); RbmScore score = api.Run() as RbmScore; double[][] testData = new double[4][]; Assert.True(score.Loss < 1.0); testData[0] = new double[] { 1, 1, 0, 0, 0, 0 }; testData[1] = new double[] { 0, 0, 0, 0, 1, 1 }; testData[2] = new double[] { 0, 1, 0, 0, 0, 0 }; testData[3] = new double[] { 0, 0, 0, 0, 1, 0 }; var result = api.Algorithm.Predict(testData, api.Context); // NOT FINISHED. //Assert.True(result[0] == 1); //Assert.True(result[1] == 0); //Assert.True(result[2] == 0); //Assert.True(result[3] == 0); //Assert.True(result[4] == 1); //Assert.True(result[5] == 0); }
public void DeltaRuleLearning_Test_iterations_1000_learningrate_02() { loadRealDataSample(); // System coefficients initialization. var desc = loadMetaData(); // Description of the system. LearningApi api = new LearningApi(desc); //Real dataset must be defined as object type, because data can be numeric, binary and classification api.UseActionModule <double[, ], double[, ]>((input, ctx) => { return(loadRealDataSample()); // return actual System coefficients data }); // run input = UseActionModule output //run Delta Rule for 1000 iterations with learningRate=0.2 api.UseDeltaRuleLearning(0.2, 1000); var result = api.Run() as double[]; Debug.WriteLine("************ Output Predictions***********"); for (int i = 0; i < result.Length; i++) { if (result[i] != 0) { Debug.WriteLine(result[i]); } } using (var fs = File.OpenRead(@"H_Test.csv")) using (var reader = new StreamReader(fs)) { while (!reader.EndOfStream) { var line = reader.ReadLine(); var values = line.Split(','); ti = new double[values.Length]; to = new double[values.Length]; for (int i = 0; i < values.Length; i++) { var val = values[i].Split(' '); double.TryParse(val[0], out x); double.TryParse(val[1], out y); ti[i] = x; to[i] = y; } } } for (int i = 0; i < to.Length; i++) { //Testing of Test data with Predicted System model Assert.Equal(Math.Round(result[i], 4), Math.Round(to[i], 4)); } }
public void Test_LoadSave() { string moduleName = "test-action"; double[][] clusterCentars = new double[3][]; clusterCentars[0] = new double[] { 5.0, 5.0 }; clusterCentars[1] = new double[] { 15.0, 15.0 }; clusterCentars[2] = new double[] { 30.0, 30.0 }; string[] attributes = new string[] { "Height", "Weight" }; int numAttributes = attributes.Length; // 2 in this demo (height,weight) int numClusters = 3; // vary this to experiment (must be between 2 and number data tuples) int maxCount = 300; // trial and error ClusteringSettings settings = new ClusteringSettings(maxCount, numClusters, numAttributes, KmeansAlgorithm: 1); // Creates learning api object LearningApi api = new LearningApi(loadDescriptor()); api.UseActionModule <object, double[][]>((data, ctx) => { var rawData = Helpers.CreateSampleData(clusterCentars, 2, 10000, 0.5); return(rawData); }, moduleName); api.UseKMeans(settings); var resp = api.Run() as KMeansScore; Assert.True(resp.Model.Clusters != null); Assert.True(resp.Model.Clusters.Length == clusterCentars.Length); var result = api.Algorithm.Predict(clusterCentars, api.Context) as KMeansResult; Assert.True(result.PredictedClusters[0] == 0); Assert.True(result.PredictedClusters[1] == 1); Assert.True(result.PredictedClusters[2] == 2); api.Save(nameof(LoadSaveTests)); var loadedApi = LearningApi.Load(nameof(LoadSaveTests)); loadedApi.ReplaceActionModule <object, double[][]>(moduleName, (data, ctx) => { var rawData = Helpers.CreateSampleData(clusterCentars, 2, 10000, 0.5); return(rawData); }); loadedApi.Run(); }
public void ActionModuleTest() { LearningApi api = new LearningApi(null); api.UseActionModule <double[], double[]>((input, ctx) => { return(new double[] { 1.1, 2.2, 3.3, 4.4 }); }); var result = api.Run(); Assert.Equal(1.1, ((double[])result)[0]); Assert.Equal(4.4, ((double[])result)[3]); }
public void SimpleSequenceTest() { LearningApi api = new LearningApi(); api.UseActionModule <object, double[][]>((notUsed, ctx) => { const int maxSamples = 10; ctx.DataDescriptor = getDescriptor(); double[][] data = new double[maxSamples][]; // // We generate following input vectors: // IN Val - Expected OUT Val // 1 - 0 // 2 - 0, // ... // maxSamples / 2 - 1, // maxSamples / 2 + 1 - 1, // maxSamples / 2 + 2 - 1, for (int i = 0; i < maxSamples; i++) { data[i] = new double[2]; data[i][0] = i; data[i][1] = (i > (maxSamples / 2)) ? 1 : 0; } return(data); }); api.UseDeltaLearning(0.2, 1000); IScore score = api.Run() as IScore; double[][] testData = new double[4][]; testData[0] = new double[] { 2.0, 0.0 }; testData[1] = new double[] { 4.0, 0.0 }; testData[2] = new double[] { 6.0, 0.0 }; testData[3] = new double[] { 8.0, 0.0 }; var result = api.Algorithm.Predict(testData, api.Context) as DeltaLearningResult; Assert.True(result.PredictedResults[0] == 0); Assert.True(result.PredictedResults[1] == 0); Assert.True(result.PredictedResults[2] == 1); Assert.True(result.PredictedResults[3] == 1); }
public void Training() { int cnt = 0; double[][] initialCentroids = new double[4][]; initialCentroids[0] = new double[] { 0.4, 25.0 }; initialCentroids[1] = new double[] { 0.4, 15.0 }; initialCentroids[2] = new double[] { 0.6, 15.0 }; initialCentroids[3] = new double[] { 0.6, 25.0 }; string[] attributes = new string[] { "x", "y" }; int numAttributes = attributes.Length; int numClusters = 4; int maxCount = 300; ClusteringSettings clusterSettings = new ClusteringSettings(maxCount, numClusters, numAttributes, KmeansAlgorithm: 1, Replace: true); AnomalyDetectionAPI kmeanApi = new AnomalyDetectionAPI(clusterSettings); //AnomalyDetectionAPI(clusterSettings), Constractor should not be null when run Training() AnomalyDetectionResponse response; // Creates learning api object LearningApi api = new LearningApi(loadMetaData1()); api.UseActionModule <object[][], object[][]>((input, ctx) => { var rawDataArray = getData(cnt); return(rawDataArray); }); api.UseDefaultDataMapper(); api.UseGaussNormalizer(); // for (int i = 0; i < 15; i++) { cnt = i; var rawData = api.Run() as double[][]; response = kmeanApi.Training(rawData, initialCentroids); Helpers.WriteToCSVFile(kmeanApi.GetCentroid(), $"Data\\Centroid{i}.csv"); //response = kmeanApi.Save($"Function{i}.json"); } }
public void ContinuousTrainData2() { int cnt = 0; double[][] initialCentroids = new double[4][]; initialCentroids[0] = new double[] { 40.0, 10.0 }; initialCentroids[1] = new double[] { 20.0, 10.0 }; initialCentroids[2] = new double[] { 40.0, 20.0 }; initialCentroids[3] = new double[] { 20.0, 20.0 }; string[] attributes = new string[] { "x", "y" }; int numAttributes = attributes.Length; // 2 in this demo (x,y) int numClusters = 4; int maxCount = 300; SaveLoadSettings sett; var resp = SaveLoadSettings.JSON_Settings(@"C:\Data\Function1.json", out sett, true); AnomalyDetectionAPI kmeanApi = new AnomalyDetectionAPI(null, numClusters, initialCentroids); LearningApi api = new LearningApi(loadMetaData1()); api.UseActionModule <object[][], object[][]>((input, ctx) => { var rawDatalist = getData(cnt); return(rawDatalist); }); //this call must be first in the pipeline api.UseDefaultDataMapper(); api.UseGaussNormalizer(); for (int i = 0; i < 15; i++) { cnt = i; var rawData = api.Run() as double[][]; ClusteringSettings Settings = new ClusteringSettings(rawData, maxCount, numClusters, numAttributes, sett, KmeansAlgorithm: 1, InitialGuess: true, Replace: true); AnomalyDetectionResponse response = kmeanApi.ImportNewDataForClustering(Settings); } }
public void GaussianAndMean() { LearningApi lApi = new LearningApi(); lApi.UseActionModule <double[, , ], double[, , ]>((input, ctx) => { Bitmap myBitmap = new Bitmap($"{appPath}/TestPicture/test.gif"); double[,,] data = new double[myBitmap.Width, myBitmap.Height, 3]; for (int x = 0; x < myBitmap.Width; x++) { for (int y = 0; y < myBitmap.Height; y++) { Color pixelColor = myBitmap.GetPixel(x, y); data[x, y, 0] = pixelColor.R; data[x, y, 1] = pixelColor.G; data[x, y, 2] = pixelColor.B; } } return(data); }); lApi.AddModule(new GaussianFilter()); lApi.AddModule(new MeanFilter()); double[,,] result = lApi.Run() as double[, , ]; Assert.True(result != null); Bitmap blurBitmap = new Bitmap(result.GetLength(0), result.GetLength(1)); for (int x = 0; x < result.GetLength(0); x++) { for (int y = 0; y < result.GetLength(1); y++) { Color pixelColor = Color.FromArgb((int)result[x, y, 0], (int)result[x, y, 1], (int)result[x, y, 2]); blurBitmap.SetPixel(x, y, pixelColor); } } SaveImage(blurBitmap, "GaussianAndMean.jpg"); }
/// <summary> /// Provide settings for K-Means algorithm. /// </summary> public MouseGestureRecognizer() { m_LearningApi = new LearningApi(); m_LearningApi.UseActionModule <object, double[][]>((notUsed, ctx) => { return(m_CurrentData); }); double[][] initCentroids = new double[2][]; initCentroids[0] = new double[] { 0.0, 0.0, 0.0 }; initCentroids[1] = new double[] { 400.0, 0.0, 0.0 }; m_Settings = new ClusteringSettings(KmeansMaxIterations: 1000, numClusters: 2, numDims: 3, KmeansAlgorithm: 2, initialCentroids: initCentroids, tolerance: 0); m_LearningApi.UseKMeansFunctionRecognitionModule(m_Settings); }
public void SimpleSequence2DTest() { LearningApi api = new LearningApi(); api.UseActionModule <object, double[][]>((notUsed, ctx) => { const int maxSamples = 10000; ctx.DataDescriptor = get2DDescriptor(); double[][] data = new double[maxSamples][]; for (int i = 0; i < maxSamples / 2; i++) { data[2 * i] = new double[3]; data[2 * i][0] = i; data[2 * i][1] = 5.0; data[2 * i][2] = 1.0; data[2 * i + 1] = new double[3]; data[2 * i + 1][0] = i; data[2 * i + 1][1] = -5.0; data[2 * i + 1][2] = 0.0; } return(data); }); api.UseDeltaLearning(0.2, 1000); IScore score = api.Run() as IScore; double[][] testData = new double[6][]; testData[0] = new double[] { 2.0, 5.0, 0.0 }; testData[1] = new double[] { 2, -5.0, 0.0 }; testData[2] = new double[] { 100, -5.0, 0.0 }; testData[3] = new double[] { 100, -5.0, 0.0 }; testData[4] = new double[] { 490, 5.0, 0.0 }; testData[5] = new double[] { 490, -5.0, 0.0 }; var result = api.Algorithm.Predict(testData, api.Context) as DeltaLearningResult; Assert.True(result.PredictedResults[0] == 1); Assert.True(result.PredictedResults[1] == 0); Assert.True(result.PredictedResults[2] == 0); Assert.True(result.PredictedResults[3] == 0); Assert.True(result.PredictedResults[4] == 1); Assert.True(result.PredictedResults[5] == 0); }
public void LogisticsRegression_Test_iterations_10_learningrate_013() { var desc = loadMetaData(); LearningApi api = new LearningApi(desc); //Real dataset must be defined as object type, because data can be numeric, binary and classification api.UseActionModule <object[][], object[][]>((input, ctx) => { return(loadRealDataSample()); }); // Use mapper for data, which will extract (map) required columns api.UseDefaultDataMapper(); api.UseMinMaxNormalizer(); //run logistic regression for 10 iterations with learningRate=0.13 api.UseLogisticRegression(0.13, 10); api.Run(); IScore score = api.GetScore(); //Errors during each iteration. IF the learningRate is suitable erros is descrising for every next iteration Assert.Equal(Math.Round(score.Errors[0], 5), 0.24278); Assert.Equal(Math.Round(score.Errors[1], 5), 0.23749); Assert.Equal(Math.Round(score.Errors[2], 5), 0.23359); Assert.Equal(Math.Round(score.Errors[3], 5), 0.23010); Assert.Equal(Math.Round(score.Errors[4], 5), 0.22740); Assert.Equal(Math.Round(score.Errors[5], 5), 0.22476); Assert.Equal(Math.Round(score.Errors[6], 5), 0.22271); Assert.Equal(Math.Round(score.Errors[7], 5), 0.22065); Assert.Equal(Math.Round(score.Errors[8], 5), 0.21902); Assert.Equal(Math.Round(score.Errors[9], 5), 0.21739); //LG Model Best Found model in 10 iteration Assert.Equal(Math.Round(score.Weights[0], 5), 0.06494); Assert.Equal(Math.Round(score.Weights[1], 5), 0.21584); Assert.Equal(Math.Round(score.Weights[2], 5), 0.89901); Assert.Equal(Math.Round(score.Weights[3], 5), 0.51497); Assert.Equal(Math.Round(score.Weights[4], 5), -0.30213); Assert.Equal(Math.Round(score.Weights[5], 5), -0.30213); Assert.Equal(Math.Round(score.Weights[6], 5), -0.85624); }
public void Test_OptimalNumberOfCLusters() { // directory to load string loadDirectory = rootFolder + "Functions\\"; string FunctionName = "SIN X"; //without extension string savePath = rootFolder + "Optimal Clusters\\" + FunctionName + " Results.csv"; double[][] function = Helpers.LoadFunctionData(loadDirectory + FunctionName + "\\" + FunctionName + ".csv"); function = TestFunctionGenerators.normalizeData(function); int numAttributes = 2; // 2 in this demo (height,weight) int numClusters = 0; // vary this to experiment (must be between 2 and number data tuples) int maxCount = 300; // trial and error ClusteringSettings settings = new ClusteringSettings(maxCount, numClusters, numAttributes, KmeansAlgorithm: 2); // Creates learning api object LearningApi api = new LearningApi(); api.UseActionModule <object, double[][]>((data, ctx) => { return(KMeansAlgorithm.transposeFunction(function)); }); api.UseKMeans(settings); // train var resp = api.Run() as KMeansScore; Assert.True(resp.Model.NumberOfClusters > 1); double[][] OptimalClustersResults = new double[4][]; OptimalClustersResults[0] = new double[] { 2, 3, 4, 5, 6, 7, 8, 9, 10 }; OptimalClustersResults[1] = resp.Model.D; OptimalClustersResults[2] = resp.Model.DPrime; OptimalClustersResults[3] = resp.Model.Fmin; Helpers.Write2CSVFile(OptimalClustersResults, savePath); // implement }
//[InlineData(new int[] { 6, 3 })] public void ImageRecognitionTest() { int size = 64; int numberOfOutputs = 1; int numberOfInputs = size * size; var context = getImgRecognitionDescriptor(numberOfInputs); LearningApi api = new LearningApi(context); api.UseActionModule <object, double[][]>((notUsed, ctx) => { return(getImageData(size, $"{Directory.GetCurrentDirectory()}\\MLPerceptron\\TrainingImages")); //return getSomOtherData($"{Directory.GetCurrentDirectory()}\\MLPerceptron\\TestFiles\\TrainingData.csv"); }); // High number of hidden neurons in first layer brings network to constant result for everything. // var hiddenLayerNeurons = new int[] { size*size, 3 }; var hiddenLayerNeurons = new int[] { 6000, 9, 3 }; api.UseMLPerceptron(0.01, 6, 1, 1, hiddenLayerNeurons); Stopwatch sw = new Stopwatch(); sw.Start(); IScore score = api.Run() as IScore; sw.Stop(); Trace.WriteLine($"Duration:{(double)(sw.ElapsedMilliseconds / 1000 / 60)} min"); var testImageData = getImageData(size, $"{Directory.GetCurrentDirectory()}\\MLPerceptron\\TestingImages"); //var testImageData= getSomOtherData($"{Directory.GetCurrentDirectory()}\\MLPerceptron\\TestFiles\\TestData.csv"); MLPerceptronResult result = api.Algorithm.Predict(testImageData, api.Context) as MLPerceptronResult; // float accuracy = MLPHelpers.GetAccuracy(testImageData, result.results, numberOfOutputs); }
public void TestWithNormalize_GaussAndCentroid() { double[][] initialCentroids = new double[4][]; initialCentroids[0] = new double[] { 0.2, -4.0 }; initialCentroids[1] = new double[] { 0.2, -6.0 }; initialCentroids[2] = new double[] { 0.4, -4.0 }; initialCentroids[3] = new double[] { 0.4, -6.0 }; string[] attributes = new string[] { "x", "y" }; // Creates learning api object LearningApi api = new LearningApi(loadMetaData1()); //Real dataset must be defined as object type, because data can be numeric, binary and classification api.UseActionModule <object[][], object[][]>((input, ctx) => { return(getRealDataSample(@"C:\Data\Function15.csv")); }); //this call must be first in the pipeline api.UseDefaultDataMapper(); api.UseGaussNormalizer(); var rawData = api.Run() as double[][]; int numAttributes = attributes.Length; // 2 in this demo (height,weight) int numClusters = 4; // vary this to experiment (must be between 2 and number data tuples) int maxCount = 300; // trial and error SaveLoadSettings sett; var resp = SaveLoadSettings.JSON_Settings(@"C:\Data\Function15.json", out sett, true); AnomalyDetectionAPI kmeanApi = new AnomalyDetectionAPI(rawData, numClusters); ClusteringSettings Settings = new ClusteringSettings(rawData, maxCount, numClusters, numAttributes, sett, KmeansAlgorithm: 1, InitialGuess: true, Replace: true); AnomalyDetectionResponse response = kmeanApi.ImportNewDataForClustering(Settings); }
public void TestMapWithLearningAPI() { List <string> labels = new List <string>(); var api = new LearningApi(); api.UseActionModule <List <double[]>, List <double[]> >((data, context) => { List <double[]> patterns = new List <double[]>(); var dimensions = 3; StreamReader reader = File.OpenText(path + "\\SelfOrganizingMap\\Food.csv"); ///<Summary>Ignore first line. reader.ReadLine(); while (!reader.EndOfStream) { string[] line = reader.ReadLine().Split(','); labels.Add(line[0]); double[] inputs = new double[dimensions]; for (int i = 0; i < dimensions; i++) { inputs[i] = double.Parse(line[i + 1]); } patterns.Add(inputs); } reader.Dispose(); return(patterns); }); api.AddModule(new Map(3, 10, 0.000001)); var r = api.Run() as Neuron[]; for (int i = 0; i < r.Length; i++) { System.Diagnostics.Debug.WriteLine("{0},{1},{2}", labels[i], r[i].m_X, r[i].m_Y); } }