private void LoadAnnButton_Click(object sender, RoutedEventArgs e) { Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.FileName = "NeuralNetwork"; // Default file name dlg.DefaultExt = ".klu"; // Default file extension dlg.Filter = "KLU ANN (.klu)|*.klu"; // Filter files by extension dlg.Title = "Load Neural Network"; // Show save file dialog box Nullable <bool> result = dlg.ShowDialog(); // Process save file dialog box results if (result == true) { _Network = (ActivationNetwork)ActivationNetwork.Load(dlg.FileName); _DataSetANN.Clear(); _ANN.NumLayers = _Network.LayersCount + 1; _ANN.SetNumNeurons(0, _Network.InputsCount); for (int i = 0; i < _Network.LayersCount; i++) { _ANN.SetNumNeurons(i + 1, _Network[i].NeuronsCount); if (i < (_Network.LayersCount - 1)) { _DataSetANN.Tables[0].Rows.Add(_Network[i].NeuronsCount); } } DrawANN(); } }
private void btnTraining_Click(object sender, EventArgs e) { if (Data.instance.images.Count == 0) { MessageBox.Show("No picture to learn!!"); } if (!File.Exists("an.bin")) { Data.instance.bpnnTraining(); MessageBox.Show("Computing BPNN finished"); } else { DialogResult confirm = MessageBox.Show("Are you sure to re-train the classsification network?", "Confirmation", MessageBoxButtons.YesNo); if (confirm == DialogResult.Yes) { Data.instance.bpnnTraining(); MessageBox.Show("Computing BPNN finished"); } else { Data.instance.bpnnNetwork = (ActivationNetwork)ActivationNetwork.Load("an.bin"); } } btnPredict.Enabled = true; }
public Form1() { InitializeComponent(); if (File.Exists("BPNNBrain.net")) { an = (ActivationNetwork)ActivationNetwork.Load("BPNNBrain.net"); } else if (File.Exists("SOMBrain.net")) { dn = (DistanceNetwork)DistanceNetwork.Load("SOMBrain.ann"); } }
public void LoadNetwork(string fileName) { network = (ActivationNetwork)ActivationNetwork.Load(fileName); teacher = new BackPropagationLearning(network); inputsCount = network.Layers[0].InputsCount; neuronsCount = new int[network.Layers.Length]; for (int i = 0; i < network.Layers.Length; ++i) { neuronsCount[i] = network.Layers[i].Neurons.Length; } }
//Going to Quiz Form private void btnQuiz_Click(object sender, EventArgs e) { //validate existence of files if (System.IO.File.Exists(savedANNetwork)) { //load Networks an = (ActivationNetwork)ActivationNetwork.Load(savedANNetwork); //go to form var form = new quizForm(an); form.ShowDialog(); } else { MessageBox.Show($"{savedANNetwork} not found"); } }
public Form1() { InitializeComponent(); // Fill the classes fixed position based on combobox foreach (String item in comboBox1.Items) { Data.instance.classes.Add(item); } // Try to locate and load the activation network from file “an.bin” if (File.Exists("an.bin")) { Data.instance.bpnnNetwork = (ActivationNetwork)ActivationNetwork.Load("an.bin"); if (Directory.Exists(Environment.CurrentDirectory + @"\assets")) { String[] categories = Directory.GetDirectories("assets"); foreach (var item in categories) { String category = new DirectoryInfo(item).Name; //Data.instance.classes.Add(category); String[] images = Directory.GetFiles(@"assets\" + category); foreach (var item2 in images) { Bitmap img = new Bitmap(item2); Data.instance.images.Add(img); Data.instance.indexClasses.Add(Data.instance.classes.IndexOf(category)); } } } MessageBox.Show("an.net success to be loaded"); } else { MessageBox.Show("There's no \"an.net\" exist"); } // The images’ extension must be either “jpg”, “jpeg”, or “png”. openFileDialog1.Filter = "Image |*.jpg;*.png;*.jpeg"; comboBox1.SelectedIndex = 0; }
public MainForm() { InitializeComponent(); picturesPath = Application.StartupPath + @"\pictures"; listAllImages = new List <String>(); // Check BPNNBrain.net file if (File.Exists("BPNNBrain.net")) { // Load BPNNBrain.net file Network.loadActivationNetwork = (ActivationNetwork)ActivationNetwork.Load("BPNNBrain.net"); } // Check SOMBrain.net file if (File.Exists("SOMBrain.net")) { // Load SOMBrain.net file Network.loadDistanceNetwork = (DistanceNetwork)DistanceNetwork.Load("SOMBrain.net"); } // Check pictures directory if (Directory.Exists(picturesPath)) { // Load All Images listAllImages = LoadImages(); // It means that user added some art if (Network.checkAddArt) { checkListAllImages = true; } else { checkListAllImages = false; } } }
private static void TestNetwork(Record[] inputData, String networkPath) { Network network = ActivationNetwork.Load(networkPath); Int32 correct = 0; Array.ForEach(inputData, record => { Double[] output = network.Compute(record.Input); Int32 decodedResult = DecodeResult(output); if (decodedResult == record.Label) { correct++; } else { } }); Console.WriteLine("{0} {1} {2}", correct, inputData.Length, (correct * 1.0) / inputData.Length); }
public void ZaladujSiecZPliku(string plik) { siecNeuronowa = (ActivationNetwork)ActivationNetwork.Load(plik); }
private void btnTestUseenData_Click(object sender, RoutedEventArgs e) { string srDataPath = "testing/iris.data"; int irNumberOfExamples = File.ReadAllLines(srDataPath).Count(); //the first array holds all of the instances double[][] input = new double[irNumberOfExamples][]; double[][] output = new double[irNumberOfExamples][]; List <double> lstCorrectOutPutclass = new List <double>(); List <double> lstOutPutClasses = new List <double>(); NumberFormatInfo formatProvider = new NumberFormatInfo(); formatProvider.NumberDecimalSeparator = "."; formatProvider.NumberGroupSeparator = ","; foreach (var vrPerLine in File.ReadAllLines(srDataPath)) { var vrOutPut = Convert.ToDouble(vrPerLine.Split(',').Last(), formatProvider); lstCorrectOutPutclass.Add(vrOutPut); if (lstOutPutClasses.Contains(vrOutPut) == false) { lstOutPutClasses.Add(vrOutPut); } } int irCounter = 0; foreach (var vrPerLine in File.ReadAllLines(srDataPath)) { input[irCounter] = vrPerLine.Split(',').SkipLast(1). Select(pr => Convert.ToDouble(pr.Replace("I", "0.0").Replace("M", "0.5").Replace("F", "1.0"), formatProvider)).ToArray(); output[irCounter] = new double[lstOutPutClasses.Count]; var vrCurrentOutClass = Convert.ToDouble(vrPerLine.Split(',').Last(), formatProvider); output[irCounter][lstOutPutClasses.IndexOf(vrCurrentOutClass)] = 1; irCounter++; } ActivationNetwork savedModel = (ActivationNetwork)ActivationNetwork.Load("iris_model.txt"); List <double> lstGuessedClasses = new List <double>(); var vrAccuracy = calculateAcurracy(savedModel, input, output, lstOutPutClasses, lstGuessedClasses); StringBuilder srBuild = new StringBuilder(); srBuild.AppendLine("accuracy of unseen data testing: " + vrAccuracy.ToString("N2").ToString()); int irIndexCounter = 0; foreach (var vrGuess in lstGuessedClasses) { srBuild.AppendLine($"test data index: {irIndexCounter}\t\tReal Class: {lstCorrectOutPutclass[irIndexCounter]}\t\tPredicted Class: {vrGuess}"); irIndexCounter++; } File.WriteAllText("unseen_data_test_results.txt", srBuild.ToString()); }
static void Main(string[] args) { int VERBOSITY = 0; StreamWriter logFile = new StreamWriter("logFileParallel.csv"); logFile.WriteLine("maze,trainingSet,trainSetSize,testSet,performance,lengthPerformance,trainingError,sizeOfSetUniqueElements"); for (int run = 0; run < 10; run++) { Console.WriteLine("****************************************************RUN NUMBER " + run); //--------------------------------------------- CREATE MAZE --------------------------------------------- Maze maze = null; string mazeType = ""; Dictionary <string, List <Sequence> > setsToUse = new Dictionary <string, List <Sequence> >(); for (int mazeToUse = 1; mazeToUse < 2; mazeToUse++) { if (mazeToUse == 0) { Generate_T(out maze, out setsToUse); mazeType = "Maze-T"; } if (mazeToUse == 1) { Generate_21(out maze, out setsToUse); mazeType = "Maze-21"; } if (mazeToUse == 2) { Generate_25(out maze, out setsToUse); mazeType = "Maze-25"; } //double randomPathQuality = 0.0; //ComputePathQualityRandom(maze, out randomPathQuality, 1000); //Console.WriteLine("Random Path Quality = " + randomPathQuality.ToString("N2")); //Console.ReadKey(); //Loop through all the training sets foreach (string trainSetName in setsToUse.Keys) { Console.WriteLine("------------------"); Console.WriteLine(mazeType + " " + trainSetName); List <Sequence> setToUse = setsToUse[trainSetName]; //--------------------------------------------- PREPARE DATA --------------------------------------------- bool forceBidirectionality = true; List <Triplet> triplets = GetTripletsFromSequences(setToUse, forceBidirectionality); if (VERBOSITY > 1) { Console.WriteLine("\n---------------------------------------------------------"); Console.WriteLine("TRAINING SET : " + trainSetName); Console.WriteLine("---------------------------------------------------------"); Console.WriteLine("Training set elements"); foreach (Triplet item in triplets) { Console.WriteLine(item.ToString()); } Console.WriteLine("---------------------------------------------------------"); } double[][] input; double[][] output; GetTrainingSet(maze, triplets, out input, out output); //FOR AUTOASSOCIATION //double[][] io; //GetTrainingSet(maze, triplets, out io); //--------------------------------------------- CREATE NETWORK --------------------------------------------- //Create the network & train //var function = new BipolarSigmoidFunction(); var function = new SigmoidFunction(2.0); ActivationNetwork goalNetwork = goalNetwork = new ActivationNetwork(function, 2 * maze.StatesCount, 20, maze.StatesCount); ParallelResilientBackpropagationLearning goalTeacher = new ParallelResilientBackpropagationLearning(goalNetwork); //BackPropagationLearning goalTeacher = new BackPropagationLearning(goalNetwork); int epoch = 0; double stopError = 0.1; int resets = 0; double minimumErrorReached = double.PositiveInfinity; while (minimumErrorReached > stopError && resets < 5) { goalNetwork.Randomize(); goalTeacher.Reset(0.0125); double error = double.PositiveInfinity; for (epoch = 0; epoch < 500 && error > stopError; epoch++) { error = goalTeacher.RunEpoch(input, output); //Console.WriteLine("Epoch " + epoch + " = \t" + error); if (error < minimumErrorReached) { minimumErrorReached = error; goalNetwork.Save("goalNetwork.mlp"); } } //Console.Write("Reset (" + error+")->"); Console.Write(".(" + error.ToString("N2") + ") "); resets++; } Console.WriteLine(); //Console.WriteLine("Best error obtained =" + minimumErrorReached); goalNetwork = ActivationNetwork.Load("goalNetwork.mlp") as ActivationNetwork; if (VERBOSITY > 0) { GenerateReport(maze, triplets, goalNetwork); } //--------------------------------------------- TEST --------------------------------------------- //Console.WriteLine("Finding paths..."); double score, lengthScore; int totalElements; double[,] pathMatrix; ComputePathMatrix(maze, goalNetwork, out score, out lengthScore, out pathMatrix, trainSetName, mazeType); //totalElements = maze.StatesCount * maze.StatesCount - maze.StatesCount; //Console.WriteLine("Success over whole input space = " + score.ToString("N2") + "% and lengthScore=" + lengthScore.ToString("N2") + " over " + totalElements + "elements"); //logFile.WriteLine(mazeType + "," + trainSetName + "," + triplets.Count + "," + "whole-input-space" + "," + score + "," + lengthScore + "," + minimumErrorReached + "," + totalElements); List <Triplet> setToEvaluate = triplets; EvaluateSpecificSet(maze, pathMatrix, setToEvaluate, out score, out lengthScore, out totalElements); Console.WriteLine("Success percentage over training set = " + score.ToString("N2") + "% and lengthScore=" + lengthScore.ToString("N2") + " over " + totalElements + "elements"); logFile.WriteLine(mazeType + "," + trainSetName + "," + triplets.Count + "," + "training-set" + "," + score + "," + lengthScore + "," + minimumErrorReached + "," + totalElements); EvaluateWithoutSpecificSet(maze, pathMatrix, setToEvaluate, out score, out lengthScore, out totalElements); Console.WriteLine("Success percentage over generalization set = " + score.ToString("N2") + "% and lengthScore=" + lengthScore.ToString("N2") + " over " + totalElements + "elements"); logFile.WriteLine(mazeType + "," + trainSetName + "," + triplets.Count + "," + "generalization-set" + "," + score + "," + lengthScore + "," + minimumErrorReached + "," + totalElements); //setToEvaluate = GenerateTestSet_1LengthPath(maze); //EvaluateSpecificSet(maze, pathMatrix, setToEvaluate, out score, out lengthScore, out totalElements); //Console.WriteLine("Success percentage over 1-length set = " + score.ToString("N2") + "% and lengthScore=" + lengthScore.ToString("N2") + " over " + totalElements + "elements"); //logFile.WriteLine(mazeType + "," + trainSetName + "," + triplets.Count + "," + "length-1-sequences" + "," + score + "," + lengthScore + "," + minimumErrorReached + "," + totalElements); logFile.Flush(); //Console.WriteLine("Finding paths, over."); //Console.ReadKey(); } } } logFile.Close(); Console.ReadKey(); }
void IMachineLearning.load(string path) { theNetwork = ActivationNetwork.Load(path) as ActivationNetwork; }
public override void Load() { _network = ActivationNetwork.Load("NeuralNetworkBot2.dat") as ActivationNetwork; }
/// <summary> /// Faz a previsão de pontos inéditos a rede neural treinada. /// </summary> /// <param name="dadosBase">Dados a serem comparados com os previstos.</param> /// <param name="dadosAuxiliares">Dados prévios aos dados base. Para Validação: dados de treinamento. Para Teste: dados de validação.</param> /// <param name="indiceID">Indice em que se inicia os dados base em relação aos dados totais. Para validação: tamanho dos dados de treinamento. Para teste: tamanho dos dados de treinamento somado ao tamanho dos de teste.</param> /// <returns></returns> private List <double> Prever(List <double> dadosBase, List <double> dadosAuxiliares, int indiceID) { network = (ActivationNetwork)ActivationNetwork.Load(@"C:\Users\Paulo\Desktop\NetworkTest.bin"); //criação da lista de dados provisória usada na previsão List <double> dadosPrevisao = new List <double>(); List <double> diferenca = new List <double>(); //lista contendo todos os ids de 1 a 52 List <int> ids = Serie.Ids; //variavel que possuirá o id binário int[] id = new int[6]; int tamanhoAux = (dadosAuxiliares.Count); //inicio do processo de adição de dados à lista fazendo que //o primeiro ponto previsto seja exatamente o ultimo dos dados auxiliares int con = (dadosAuxiliares.Count) - windowSize - 1; for (int i = con; i < tamanhoAux; i++) { //adiciona os valosres de data, a lista de dados para treino primeiro dadosPrevisao.Add(dadosAuxiliares[i]); } //definição do tamanho da solução, deve ser do tamanho do teste mais um int solutionSize = dadosBase.Count + 1; List <double> solution = new List <double>(); //definição do tamanho da entrada da rede neural para a previsão double[] networkInput = new double[windowSize + predictionSize * 6]; //variavel auxiliar para o id binário int contador = 0; con = indiceID - windowSize - 1; //inicia processo de predição deslocando de um por um os pontos previstos for (int i = 0, n = dadosBase.Count + 1; i < n; i = i + predictionSize) { int a = windowSize; contador = 0; // seta os valores da atual janela de previsão como entrada da rede neural for (int j = 0; j < windowSize + predictionSize; j++) { if (j < windowSize) { //entrada tem de ser formatada networkInput[j] = (dadosPrevisao[i + j] - Serie.Min) * fatorNormal - 1.0; } else { id = CUtil.ConversaoBinario(ids[con + i + a]); a++; for (int c = 0; c < 6; c++) { networkInput[windowSize + contador] = id[c]; contador++; } } }//fim do for interno for (int k = 0; k < network.Compute(networkInput).Length; k++) { if ((i + k) < solutionSize) { diferenca.Add((network.Compute(networkInput)[k] + 1.0) / fatorNormal + Serie.Min); dadosPrevisao.Add((network.Compute(networkInput)[k] + 1.0) / fatorNormal + Serie.Min); } } }//fim do for externo solution = Serie.DiferencaInversa(diferenca, Serie.Dados[indiceID, 1]); solution.RemoveAt(0); return(solution); }