public void InitializeLearning(INeuralNet net) { foreach (INeuron n in m_neurons) { n.InitializeLearning(this); } }
void INeuralLayer.ApplyLearning(INeuralNet net) { foreach (INeuron n in m_neurons) { n.ApplyLearning(this); } }
public static async Task <bool> ToFile(string path, INeuralNet net, bool @override = true) { if (File.Exists(path)) { if (@override) { File.Delete(path); } else { return(false); } } var data = InnerToString(net); if (string.IsNullOrEmpty(data)) { return(false); } using (var ws = new FileStream(path, FileMode.Create, FileAccess.Write)) { var bytes = Encoding.UTF8.GetBytes(data); await ws.WriteAsync(bytes, 0, bytes.Length); } return(true); }
public void ApplyLearning(INeuralNet net) { foreach (INeuron neuron in m_neurons) { neuron.ApplyLearning(this); } }
public void ApplyLearning(INeuralNet net) { double learningRate = net.LearningRate; foreach (INeuron n in m_neurons) n.ApplyLearning(this, ref learningRate); }
void INeuralLayer.Pulse(INeuralNet net) { foreach (INeuron n in m_neurons) { n.Pulse(this); } }
public void Pulse(INeuralNet net) { foreach (INeuron neuron in m_neurons) { neuron.Pulse(this); } }
public void ApplyLearning(INeuralNet net) { double learningRate = net.LearningRate; foreach (INeuron n in m_neurons) { n.ApplyLearning(this, ref learningRate); } }
public NeuralNetTrainer(INeuralNet net, Tuple <double[], double[]>[] dataSets, int?epochCount, double?tolerableError, double?learnRate, int?logCycle, ProgressLog logMethod) : this(net, dataSets, epochCount, tolerableError, learnRate) { if (logCycle.HasValue) { ErrorWriteCycle = logCycle.Value; } Log = logMethod; }
public SweeperDodger(double maxX, double maxY, double maxSpeed, double maxRotation, INeuralNet brain) { _maxX = maxX; _maxY = maxY; _maxSpeed = maxSpeed; _maxRotation = maxRotation; Brain = brain; SetRandomMotion(); }
protected void Replace(INeuralNet destination, INeuralNet source) { int destinationIndex = population.IndexOf(destination); source.Save(populationPath + "\\" + destinationIndex + "\\" + neuralNetName); destination.Load(populationPath + "\\" + destinationIndex + "\\" + neuralNetName); fitness[destination] = fitness[source]; }
public static EditANNForm ShowDialogue(Window window, INeuralNet _ann, CloseEvent closeFunction = null, string category = null, ISkinFile file = null) { var form = new EditANNForm(); form.Initialize(_ann, closeFunction, "Edit ANN", true, true, category, file); form.Show(window); return(form); }
public virtual void OnDeserialize(INeuralNet net) { _net = net; if (_neurons != null) { foreach (var n in _neurons) { n.OnDeserialize(this); } } }
double Evaluate(INeuralNet network) { double[] fitnessTests = new double[tests]; for (int i = 0; i < fitnessTests.Length; i++) { double[] inputs = testInputs[i]; double[] outs = network.Calculate(inputs); double expected = inputs.Sum() % 2; fitnessTests[i] = Fitness(outs[0], expected); } return(fitnessTests.Average()); }
public INeuralNet Crossover(INeuralNet other) { if (other is OneLayerNeuralNet == false) { throw new ApplicationException("'other' should be of type COneLayerNeuralNet to be able to crossover with COneLayerNeuralNet."); } var child = new OneLayerNeuralNet(this.wages.GetLength(0), this.wages.GetLength(1)); child.wages = CrossoverHelper.Crossover(this.wages, (other as OneLayerNeuralNet).wages); child.biasesInOutputLayer = CrossoverHelper.Crossover(this.biasesInOutputLayer, (other as OneLayerNeuralNet).biasesInOutputLayer); return(child); }
double Evaluate(INeuralNet network) { double[] fitnessTests = new double[testInputs.Count]; for (int i = 0; i < fitnessTests.Length; i++) { double[] inputs = testInputs[i]; double[] outs = network.Calculate(inputs); for (int i2 = 0; i2 < outs.Length; i2++) { outs[i2] = Fitness(testOutputs[i][i2], outs[i2]); } fitnessTests[i] = outs.Average(); } return(fitnessTests.Average()); }
public NeuralLayer(INeuralNet net, Type cell, Type bias, Type func, int neuronsCount, int connectionsCount, double skew) { _net = net; _neurons = new INeuralCell[neuronsCount]; AnswerHistory = new List <double[]>(); _inputLength = neuronsCount; var clearCell = (INeuralCell)Activator.CreateInstance(cell); _bias = ((INeuralBias)Activator.CreateInstance(bias))?.Create(this) ?? throw new NeuralTypeException(typeof(INeuralBias), bias); for (var i = 0; i < neuronsCount; i++) { _neurons[i] = clearCell.Create(this, func, connectionsCount, skew); } }
public NeuralNetTrainer(INeuralNet net, Tuple <double[], double[]>[] dataSets, int?epochCount, double?tolerableError, double?learnRate) : this(net, dataSets) { if (epochCount.HasValue) { EpochsCount = epochCount.Value; } if (tolerableError.HasValue) { TolerableError = tolerableError.Value; } if (learnRate.HasValue) { LearnRate = learnRate.Value; } }
public void Train() { _neuralNet = new NeuralNet(new int[] { Trainigs[0].Length, X, Y, 4 }, L, 0, 1, new Sigmoid(), new MeanSquare()); var noises = new double[2][][]; for (var j = 0; j < noises.Length; j++) { noises[j] = new double[Trainigs.Length][]; for (var k = 0; k < Trainigs.Length; k++) { noises[j][k] = AddNoise(Trainigs[k], LearnNoise); } } var learnErrors = new double[E]; var testErrors = new double[E]; for (var i = 0; i < E; i++) { testErrors[i] = 0; for (var j = 0; j < noises.Length; j++) { for (var k = 0; k < Trainigs.Length; k++) { testErrors[i] += _neuralNet.Run(noises[j][k], _expectations[k], out double[] outputs); } } testErrors[i] /= noises.Length * Trainigs.Length; learnErrors[i] = 0; for (var j = 0; j < Trainigs.Length; j++) { learnErrors[i] += _neuralNet.Train(Trainigs[j], _expectations[j]); } learnErrors[i] /= Trainigs.Length; } Lines.SetLines(learnErrors, testErrors); }
public INeuralNet Crossover(INeuralNet other) { if (other is MultiLayerNeuralNet == false) { throw new ApplicationException("'other' should be of type COneLayerNeuralNet to be able to crossover with COneLayerNeuralNet."); } var child = new MultiLayerNeuralNet( this.wagesBetweenInputAndFirstHiddenLayer.GetLength(0), this.wagesBetweenLastHiddenAndOutputLayer.GetLength(1), this.wagesBetweenInputAndFirstHiddenLayer.GetLength(1), this.wagesBetweenHiddenLayers.Length + 1); var castedOther = other as MultiLayerNeuralNet; child.wagesBetweenInputAndFirstHiddenLayer = CrossoverHelper.Crossover(this.wagesBetweenInputAndFirstHiddenLayer, castedOther.wagesBetweenInputAndFirstHiddenLayer); for (int i = 0; i < this.wagesBetweenHiddenLayers.Length; i++) { child.wagesBetweenHiddenLayers[i] = CrossoverHelper.Crossover(this.wagesBetweenHiddenLayers[i], castedOther.wagesBetweenHiddenLayers[i]); } for (int i = 0; i < this.biasesInHiddenLayers.Length; i++) { child.biasesInHiddenLayers[i] = CrossoverHelper.Crossover(this.biasesInHiddenLayers[i], castedOther.biasesInHiddenLayers[i]); } child.wagesBetweenLastHiddenAndOutputLayer = CrossoverHelper.Crossover(this.wagesBetweenLastHiddenAndOutputLayer, castedOther.wagesBetweenLastHiddenAndOutputLayer); child.biasesInOutputLayer = CrossoverHelper.Crossover(this.biasesInOutputLayer, castedOther.biasesInOutputLayer); return(child); }
public async Task Evaluate() { fitnesses.Clear(); await Task.WhenAll(networks.Select(kvp => Task.Run(async() => { double fitness = 0; await Task.Run(() => fitness = evaluationFunc(kvp.Value)); lock (fitnesses) { if (fitnesses.ContainsKey(kvp.Key)) { fitnesses.Remove(kvp.Key); } fitnesses.Add(kvp.Key, fitness); } } ))); List <NeuralNetwork> networkList = networks.Select(kvp => kvp.Value).ToList(); networkList.Sort((t1, t2) => { return(-fitnesses[t1.GetGuid()].CompareTo(fitnesses[t2.GetGuid()])); }); maxFitness = fitnesses[networkList[0].GetGuid()]; bestNetwork = networkList[0]; }
public NeuralNetTrainer(INeuralNet net, Tuple <double[], double[]>[] dataSets) : this(net) { DataSets = dataSets; }
public void InitializeLearning(INeuralNet net) { foreach (INeuron n in m_neurons) n.InitializeLearning(this); }
private void ManualTestOfNetwork(INeuralNet net, int numberOfInputNodes, int numberOfOutputNodes) { while (true) { // Asking user for input int age, annualIncome, workStatus, destination; Console.WriteLine("\nManual test"); Console.Write("Age: "); age = Convert.ToInt32(Console.ReadLine()); Console.Write("Annual income: "); annualIncome = Convert.ToInt32(Console.ReadLine()); Console.Write("1: Student, 2: Employed, 3: Unemployed, 4: Retired\n" + "Work status: "); workStatus = Convert.ToInt32(Console.ReadLine()); Console.Write("1: Prag, 2: Budapest, 3: Berlin, 4: Stockholm, 5: Oslo, 6: London, 7: New York, 8: Grønland, 9: Bora Bora, 10: Dubai\n" + "Destination:"); destination = Convert.ToInt32(Console.ReadLine()); Customer customer = new Customer(age, annualIncome, workStatus, destination); NormalizedCustomer nc = new NormalizedCustomer(customer); // Converting normalized customer to input and output arrays double[][] userInput = new double[1][]; userInput[0] = new double[] { nc.Age, nc.AnnualIncome, nc.WorkStatusStudent, nc.WorkStatusEmployed, nc.WorkStatusUnemployed, nc.WorkStatusRetired }; double[][] userOutput = new double[1][]; userOutput[0] = new double[] { nc.DestinationPrag, nc.DestinationBudapest, nc.DestinationBerlin, nc.DestinationStockholm, nc.DestinationOslo, nc.DestinationLondon, nc.DestinationNewYork, nc.DestinationGreenland, nc.DestinationBoraBora, nc.DestinationDubai }; // Testing the input customer for (int j = 0; j < numberOfInputNodes; j++) { net.PerceptionLayer[j].Output = userInput[0][j]; } net.Pulse(); double[][] actualTestDataResults = new double[1][]; actualTestDataResults[0] = new double[numberOfOutputNodes]; for (int i = 0; i < numberOfOutputNodes; i++) { actualTestDataResults[0][i] = net.OutputLayer[i].Output; } // Checking accuracy of the test int actualResult = 0; for (int i = 0; i < numberOfOutputNodes; i++) { if (actualTestDataResults[0][i] > 0.5) { actualResult = i + 1; } } // Conclusion Console.WriteLine("Expected result: {0}, Actual result: {1}", destination, actualResult); if (destination == actualResult) { Console.WriteLine("The neural network reached the expected result."); } else if (actualResult == 0) { Console.WriteLine("The neural network couldn't find a destination."); } else { Console.WriteLine("The neural network did not reach the expected result."); } Console.WriteLine("Press any key to manually test again..."); Console.ReadKey(); } }
public void Pulse(INeuralNet net) { foreach (INeuron n in m_neurons) n.Pulse(this); }
public void Initialize(INeuralNet _ann, CloseEvent closeFunction = null, string title = null, bool resizable = false, bool isDialog = true, string category = null, ISkinFile file = null) { ann = _ann; base.Initialize(closeFunction, title, resizable, isDialog, category, file); }
public virtual INeuralLayer Create(INeuralNet net, Type cell, Type bias, Type func, int neuronsCount, int connectionsCount, double skew) => new NeuralLayer(net, cell, bias, func, neuronsCount, connectionsCount, skew);
public NeuralNetTrainer(INeuralNet net) { Net = net; }
public NeuralNetTrainer SetNet(INeuralNet net) { Net = net; return(this); }