public void MutateAddNode(NeuroNet nn) { bool active = false; foreach (var v in nn.Connections) { if (v.Active) { active = true; break; } } if (active) { int ind = 0; do { ind = GetRandomInt(nn.Connections.Count); } while (!nn.Connections[ind].Active); Connection con = nn.Connections[ind]; con.Active = false; //TODO: вместо неактивной связи ставим новый node и делаем связи от прошлой Node n = new Node(nn.Nodes.Count); Connection con1 = new Connection(GetNewID(), con.From, n.ID, GetRandom(-WeightDispersion, WeightDispersion)); Connection con2 = new Connection(GetNewID(), n.ID, con.To, GetRandom(-WeightDispersion, WeightDispersion)); nn.Nodes.Add(n); nn.Connections.Add(con1); nn.Connections.Add(con2); } }
internal void OpenNeuroNet(string fileName) { BinaryFormatter formatter = new BinaryFormatter(); var nn = (NeuroNet)formatter.Deserialize(new FileStream(fileName, FileMode.Open)); neuralNetwork = nn; }
public NeuroNet GenerateNeuroNet(int additionalNodes = 0, int additionalConnections = 1) { NeuroNet res = new NeuroNet(Inputs, Outputs); List <Node> nn = new List <Node>(); for (int i = 0; i < additionalNodes; ++i) { Node n = new Node(res.Nodes.Count); nn.Add(n); res.AddNode(n); } List <Node> avF = res.Nodes; List <Node> avT = res.Nodes.FindAll(x => x.ID >= Inputs); for (int i = 0; i < additionalNodes; ++i) { res.AddConnection(GetNewID(), avF[GetRandomInt(avF.Count)].ID, nn[i].ID, GetRandom(-WeightDispersion, WeightDispersion)); res.AddConnection(GetNewID(), nn[i].ID, avT[GetRandomInt(avT.Count)].ID, GetRandom(-WeightDispersion, WeightDispersion)); } for (int i = 0; i < additionalConnections; ++i) { res.AddConnection(GetNewID(), avF[GetRandomInt(avF.Count)].ID, avT[GetRandomInt(avT.Count)].ID, GetRandom(-WeightDispersion, WeightDispersion)); } return(res); }
private void NodeVizualization(NeuroNet neuroNet) { Vector3 pos = this.transform.position; foreach (var node in neuroNet.Input) { GameObject gameObject = Instantiate(Prefab, pos, Quaternion.identity); gameObject.transform.SetParent(Parent.transform); Nodes.Add(node.Index, gameObject); pos -= new Vector3(0, 0, Prefab.transform.localScale.magnitude * 1.5f); } pos += new Vector3(Prefab.transform.localScale.magnitude * 2, 0, 0); pos.z = this.transform.position.z; foreach (var nodeList in neuroNet.HiddenLayer) { foreach (var node in nodeList) { GameObject gameObject = Instantiate(Prefab, pos, Quaternion.identity); gameObject.transform.SetParent(Parent.transform); Nodes.Add(node.Index, gameObject); pos -= new Vector3(0, 0, Prefab.transform.localScale.magnitude * 1.5f); } pos += new Vector3(Prefab.transform.localScale.magnitude * 2, 0, 0); pos.z = this.transform.position.z; } foreach (var node in neuroNet.Output) { GameObject gameObject = Instantiate(Prefab, pos, Quaternion.identity); gameObject.transform.SetParent(Parent.transform); Nodes.Add(node.Index, gameObject); pos -= new Vector3(0, 0, Prefab.transform.localScale.magnitude * 1.5f); } }
public Learning(List <KeyValuePair <List <double>, List <double> > > dataSet, double learnCoef, NeuroNet neuroNet, double moment) { this.moment = moment; alpha = learnCoef; this.neuroNet = neuroNet; learningSet = dataSet; }
public void StructureMutation(NeuroNet nn, double fun) { Mutation m = new Mutation(); m.PrevParentFun = nn.ParentResult; long maxLen = nn.Inputs * (nn.Nodes.Count - nn.Inputs) + (nn.Nodes.Count - nn.Inputs) * (nn.Nodes.Count - nn.Inputs); double fully = (double)Math.Pow(((double)nn.Connections.Count / (double)maxLen), 4); double rands = GetRandom(0.0, 1.0); if (rands > fully) { m.Type = MutationType.AddConnection; var avT = nn.Nodes.Where(x => !x.IsInput).ToList(); int f; int t; do { f = nn.Nodes[GetRandomInt(nn.Nodes.Count)].ID; t = avT[GetRandomInt(avT.Count)].ID; } while (nn.Connections.Exists(x => x.From == f && x.To == t)); Connection con = new Connection(GetNewID(), f, t, GetRandom(-WeightDispersion, WeightDispersion)); m.NewConnection = con; nn.Connections.Add(con); } else { m.Type = MutationType.AddNode; Connection sel = nn.Connections[GetRandomInt(nn.Connections.Count)]; nn.Connections.Remove(sel); Node n = new Node(nn.Nodes.Count); Connection con1 = new Connection(GetNewID(), sel.From, n.ID, GetRandom(-WeightDispersion, WeightDispersion)); var avT = nn.Nodes.Where(x => !x.IsInput).ToList(); Connection con2 = new Connection(GetNewID(), n.ID, sel.To, GetRandom(-WeightDispersion, WeightDispersion)); nn.Connections.Add(con1); nn.Connections.Add(con2); nn.Nodes.Add(n); m.RemovedConnection = sel; m.AdditionalNode = n; m.NewConnection = con1; m.NewConnection2 = con2; } nn.Copies = 0; nn.OperationsAfterSM = 1; m.PreviousAge = nn.Age; nn.ParentResult = fun; nn.Age = 0; nn.MutationsDone.Push(m); }
public void LoadNet(NeuroNet nn) { if (nn.Inputs == this.Inputs && nn.Outputs == this.Outputs) { lock (_currentNets) { _currentNets.Add(nn); } Individuals += 1; } }
private void VisualizeNet(NeuroNet neuroNet) { NetVisualization netVisualization = Visual.GetComponent <NetVisualization>(); netVisualization.Visualize(neuroNet); float maxZ = netVisualization.MaxZPosition(); float middle = (Visual.transform.position - new Vector3(Visual.transform.position.x, Visual.transform.position.y, maxZ)).magnitude; middle /= 2; Visual.transform.position = new Vector3(Visual.transform.position.x, Visual.transform.position.y, this.transform.position.z + middle); netVisualization.Visualize(neuroNet); }
void Start() { input = new List <float>(); output = new List <float>() { 0, 0, 0 }; lastPos = this.transform.position; //Control = new NeuroNet(4, 3); Control = new EvolveNeuroNet(4, 3); Control.GenerateDefaultNet(); //Control.GenerateDefaultNet(5); }
public Test(int width, int height, string alphabet, int length, int tests_length) { this.alphabet = alphabet; this.tests_length = tests_length; this.length = length; tests = new string[tests_length]; img_process = new ImageProcess(width, height); System.Random rnd = new System.Random((int)System.DateTime.Now.Ticks); System.Console.WriteLine("Initialize network..."); letters_network = new NeuroNet(width * height, new int[] { 0, 2000, alphabet.Length * length }, new int[] { 0, 0 }, 1); read_filenames(); }
public NeuroNet MergeNeuroNets(NeuroNet a, NeuroNet b) { NeuroNet nn = new NeuroNet(Inputs, Outputs); a.Nodes.ForEach(x => { if (x.ID > Inputs + Outputs) { nn.AddNode(new Node(x.ID)); } }); a.Connections.ForEach(x => nn.AddConnection(x.ID, x.From, x.To, x.Weight, x.Active)); b.Nodes.ForEach(x => { if (!nn.Nodes.Exists(y => y.ID == x.ID)) { nn.AddNode(x); } }); b.Connections.ForEach(x => { Connection con = nn.Connections.Find(y => y.From == x.From && y.To == x.To); if (con == null) { nn.AddConnection(x); } else { con.Weight = (con.Weight / 2 + x.Weight / 2); con.ID = GetNewID(); if (con.Active && !x.Active) { con.Active = false; } else if (!con.Active == !x.Active) { if (GetRandom(0.0, 1.0) < MergeMutationChance) { x.Active = !x.Active; } } } }); return(nn); }
public void Visualize(NeuroNet neuroNet) { DeleteNodes(); NodeVizualization(neuroNet); foreach (var node in neuroNet.Input) { ConnectionVizualization(node); } foreach (var nodeList in neuroNet.HiddenLayer) { foreach (var node in nodeList) { ConnectionVizualization(node); } } }
private void btnUse_Click(object sender, EventArgs e) { int countInputNeurons = dbHandler.SelectCountInputParametersInTask(lbTaskSelected.Text); int countOutputNeurons = 1; ActivateFunction af = LibraryOfActivateFunctions. GetActivateFunction(dbHandler.SelectActivateFunctionTypeByNeuroNet(lbNetSelected.Text), LibraryOfActivateFunctions.GetterParameter.TypeOfActivateFunctionName); List <double> valuesOfParametersAF = dbHandler.SelectValuesOfParametersOfAF(lbNetSelected.Text); int k = 0; foreach (double item in valuesOfParametersAF) { af.SetValueOfParameter(k, item); k++; } int countNeurons = dbHandler.SelectCountNeuronsInNet(lbNetSelected.Text); bool[,] connections = new bool[countNeurons, countNeurons]; double[,] weights = new double[countNeurons, countNeurons]; List <Tuple <int, int, double> > ls = dbHandler.SelectLearnedTopology(lbNetSelected.Text, lbSelSelected.Text, LearningAlgorithmsLibrary.GetNameOfTypeOfAlgoritm(lbLASelected.Text)); for (int i = 0; i < countNeurons; i++) { for (int j = 0; j < countNeurons; j++) { connections[i, j] = false; weights[i, j] = 0.0; } } foreach (Tuple <int, int, double> item in ls) { connections[item.Item2, item.Item1] = true; weights[item.Item2, item.Item1] = item.Item3; } int[] neuronsInLayers = dbHandler.SelectNeuronsInLayers(lbNetSelected.Text); NeuroNet net = new NeuroNet(countInputNeurons, countOutputNeurons, neuronsInLayers, connections, weights, af); NeuroNetSolvingWindow solvingWnd = new NeuroNetSolvingWindow(net); solvingWnd.Show(); }
public void WeightMutation(NeuroNet nn) { Mutation m = new Mutation { WeightChanges = new List <Tuple <Connection, double> >(), Type = MutationType.WeightChange }; m.PrevParentFun = nn.LastResult; foreach (Connection c in nn.Connections) { double change = GetRandom(-this.WeightDispersion, this.WeightDispersion) * (double)(1 / Math.Pow(nn.OperationsAfterSM, 0.05)); m.WeightChanges.Add(new Tuple <Connection, double>(c, change)); c.Weight += change; } nn.OperationsAfterSM++; nn.MutationsDone.Push(m); nn.Age += 1; }
public void MutateAddConnection(NeuroNet nn) { //TODO: Добавляем новую случайную связь. List <Node> avF = nn.Nodes; List <Node> avT = nn.Nodes.FindAll(x => x.ID >= Inputs); /* * * int maxT = 2*(avT.Count * (avT.Count - 1) / 2) + avT.Count + (Inputs* avT.Count); * int numAs = 0; * foreach (Connection c in nn.Connections) * { * if(c.Active) * { * numAs++; * } * } * * if(maxT < numAs) * {*/ int f = 0; int t = 0; do { f = avF[GetRandomInt(avF.Count)].ID; t = avT[GetRandomInt(avT.Count)].ID; } while (nn.Connections.Exists(x => x.From == f && x.To == t)); Connection ex = _generationConnections.Find(x => x.From == f && x.To == t); if (ex == null) { Connection con = new Connection(GetNewID(), f, t, GetRandom(-WeightDispersion, WeightDispersion)); nn.AddConnection(con); _generationConnections.Add(con); } else { nn.AddConnection(new Connection(ex)); } }
static void Main(string[] args) { try { //Для дальнейшей обработки и кадрирования инициализируем ImageProcess imgprocess = new ImageProcess(width, height, win_width, win_height); //Случайные числа, для многопоточного обучения System.Random rnd = new System.Random((int)System.DateTime.Now.Ticks); //Инициализируем нейросеть //Сеть распознающая переломы - 4 слоя neuronet_fractures = new NeuroNet(width * height, new int[] { 0, 300, 36 }, 1); fast_test_fractures(); //Инициализируем потоки обучения for (int i = 0; i < 3; i++) { //Инициализируем поток обучения сети распознающей переломы new Thread(() => teach_fracture_network(3000)).Start(); //Случайная задержка System.Threading.Thread.Sleep(rnd.Next(100, 5000)); } //Загружаем основной поток обучением переломов, дабы не простаивал teach_fracture_network(3000, 1); //Случайная задержка System.Threading.Thread.Sleep(rnd.Next(100, 200)); //Пауза 500 мс, что бы все потоки успели завержиться System.Threading.Thread.Sleep(500); //Сохраняем веса для сетей neuronet_fractures.write_weights(); //Запускаем тестирование test_fractures(); //Если файл не найден System.Threading.Thread.Sleep(10000000); } catch (System.IO.FileNotFoundException e) { Console.WriteLine("File does not exist", e.Source); } }
static void Main(string[] args) { try { //Для дальнейшей обработки и кадрирования инициализируем ImageProcess imgprocess = new ImageProcess(width, height, win_width, win_height); //Случайные числа, для многопоточного обучения System.Random rnd = new System.Random((int)System.DateTime.Now.Ticks); //Инициализируем нейросеть //Сеть распознающая переломы - 4 слоя neuronet_fractures = new NeuroNet(width * height, new int[] { 0, 300,36 }, 1); fast_test_fractures(); //Инициализируем потоки обучения for (int i = 0; i < 3; i++) { //Инициализируем поток обучения сети распознающей переломы new Thread(() => teach_fracture_network(3000)).Start(); //Случайная задержка System.Threading.Thread.Sleep(rnd.Next(100, 5000)); } //Загружаем основной поток обучением переломов, дабы не простаивал teach_fracture_network(3000,1); //Случайная задержка System.Threading.Thread.Sleep(rnd.Next(100, 200)); //Пауза 500 мс, что бы все потоки успели завержиться System.Threading.Thread.Sleep(500); //Сохраняем веса для сетей neuronet_fractures.write_weights(); //Запускаем тестирование test_fractures(); //Если файл не найден System.Threading.Thread.Sleep(10000000); } catch (System.IO.FileNotFoundException e) { Console.WriteLine("File does not exist", e.Source); } }
public NeuroNetSolvingWindow(NeuroNet net) { InitializeComponent(); currentNet = net; inputs = new List <Tuple <Label, TextBox> >(); outputs = new List <Tuple <Label, TextBox> >(); for (int i = 0; i < net.InputNeuronsCount; i++) { Label lb = new Label(); lb.Text = "x[" + i + "]="; lb.Location = new Point(15, 17 + i * 25); lb.Size = new Size(lb.Text.Length * 8, 20); gbInputs.Controls.Add(lb); TextBox tb = new TextBox(); tb.Text = "0,0"; tb.Location = new Point(lb.Text.Length * 10 + 5, 15 + i * 25); tb.Size = new Size(100, 20); gbInputs.Controls.Add(tb); inputs.Add(new Tuple <Label, TextBox>(lb, tb)); } for (int i = 0; i < net.OutputNeuronsCount; i++) { Label lb = new Label(); lb.Text = "y[" + i + "]="; lb.Location = new Point(15, 17 + i * 25); lb.Size = new Size(lb.Text.Length * 8, 20); gbOutputs.Controls.Add(lb); TextBox tb = new TextBox(); tb.Text = "0,0"; tb.Location = new Point(lb.Text.Length * 10 + 5, 15 + i * 25); tb.Size = new Size(100, 20); gbOutputs.Controls.Add(tb); outputs.Add(new Tuple <Label, TextBox>(lb, tb)); } }
public Form1() { InitializeComponent(); double[] inputMas = WorkWithFile.LoadingSequence(); double maxValue = inputMas.Max(); double[] data = normalizing(inputMas, maxValue); int count = data.Length; int learnCount = count - 1000; int neuronCount = 20; var learnSet = convertToLearnSet(data.Take(learnCount).ToArray(), neuronCount); double lambda = 0.001; NeuroNet nn = new NeuroNet(neuronCount, lambda); nn.Learning(learnSet, 200); }
public static void initNeuroNer(int countInNeurons, int countOutNeurons, List <int> countHiddenNeurons, bool bias = false) { NNCore.nn = new NeuroNet(countInNeurons, countOutNeurons, countHiddenNeurons, bias); NNCore.nn.Initialize(); initVars(); }
internal void CreateNeuronet(int classCount, int inputsCount, int hiddenNeuronsCount) { neuralNetwork = new NeuroNet(inputsCount, hiddenNeuronsCount, classCount); }
public void PassGeneration() { var results = PassTests(); results.Sort((x, y) => { if (x.a > y.a) { return(1); } else if (x.a == y.a) { return(0); } else { return(-1); } }); LastResult = new List <Pair <double, NeuroNet> >(); Parallel.ForEach(results, x => { bool undone = false; bool rec = false; double mdF = x.a - x.b.LastResult; if (mdF < scoreLim && UndoneEnabled) { if (x.b.MutationsDone.Peek().Type == MutationType.WeightChange) { x.b.UndoChanges(); undone = true; x.a = x.b.LastResult; } } if (x.a > record) { record = x.a; lock (_currentNets) { x.b.Copies += 1; NeuroNet nn = new NeuroNet(x.b); nn.Age = 0; nn.OperationsAfterSM = 1; this._currentNets.Add(nn); this._currentNets.Remove(results[0].b); } rec = true; } /*if (x.b == results[results.Count - 1].b) * { * rec = true; * }*/ if (true || !undone || x.b.OperationsAfterSM > (x.b.Connections.Count + x.b.Nodes.Count)) { if (rec) { if (x.b.Copies / (double)Individuals < 0.1) { lock (_currentNets) { x.b.Copies += 1; NeuroNet nn = new NeuroNet(x.b); nn.Age = 0; nn.OperationsAfterSM = 1; this._currentNets.Add(nn); this._currentNets.Remove(results[0].b); } } } double dF = x.a - x.b.ParentResult; double Psm = ((x.b.Connections.Count + x.b.Nodes.Count) + Math.Abs(this.a * dF)) / (this.b * (x.b.Age + 1)); if (GetRandom(0.0, 1.0) > Psm) { StructureMutation(x.b, x.a); } else { WeightMutation(x.b); } x.b.LastResult = x.a; } else { WeightMutation(x.b); } }); results.ForEach(x => LastResult.Add(x)); LastResult.Sort((x, y) => { if (x.a > y.a) { return(1); } else if (x.a == y.a) { return(0); } else { return(-1); } }); _currentGeneration++; }