static void Learning() { for (int i = 0; i < iterationsBetweenDrawing; i++) { learning.Run(Inputs[rnd.Next(Inputs.Length)]); } map = new MapElement[task.NetworkWidth, task.NetworkHeight]; int number = 0; for (int x = 0; x < task.NetworkWidth; x++) { for (int y = 0; y < task.NetworkHeight; y++) { var neuron = network.Layers[0].Neurons[x * task.NetworkHeight + y]; map[x, y] = new MapElement { X = (float)neuron.Weights[0], Y = (float)neuron.Weights[1], Id = number++ }; } } foreach (var e in Inputs) { network.Compute(e); var winner = network.GetWinner(); map[winner / task.NetworkHeight, winner % task.NetworkHeight].IsActive = true; } }
/// <summary> /// Implementation of the divergence operation. /// </summary> protected override void diverge() { //convert all the modalities in a single vector double[] realSignal; double[] predictedSignal; getConcatenatedModalities(out realSignal, out predictedSignal); //Generate the prediction double[] output = network.Compute(realSignal); Neuron winner = network.Layers[0].Neurons[network.GetWinner()]; double[] networkPrediction = new double[InputCount]; for (int i = 0; i < InputCount; i++) { networkPrediction[i] = winner.Weights[i]; } //Distribute it over the Signal setConcatenatedModalities(null, networkPrediction); //Proceed to learning if (!learningLocked && learningRate > 0) { teacher.Run(realSignal); } //todo : copy the neuron activities to MNNode output }
// Worker thread void SearchSolution() { // create learning algorithm SOMLearning trainer = new SOMLearning(network); // input double[] input = new double[3]; double fixedLearningRate = learningRate / 10; double driftingLearningRate = fixedLearningRate * 9; // iterations int i = 0; // loop while (!needToStop) { trainer.LearningRate = driftingLearningRate * (iterations - i) / iterations + fixedLearningRate; trainer.LearningRadius = (double)radius * (iterations - i) / iterations; input[0] = rand.Next(256); input[1] = rand.Next(256); input[2] = rand.Next(256); trainer.Run(input); // update map once per 50 iterations if ((i % 10) == 9) { UpdateMap(); } // increase current iteration i++; // set current iteration's info SetText(currentIterationBox, i.ToString()); // stop ? if (i >= iterations) { break; } } // enable settings controls EnableControls(true); }
// Worker thread void SearchSolution( ) { // create learning algorithm var trainer = new SOMLearning(this.network); // input var input = new double[3]; var fixedLearningRate = this.learningRate / 10; var driftingLearningRate = fixedLearningRate * 9; // iterations var i = 0; // loop while (!this.needToStop) { trainer.LearningRate = driftingLearningRate * (this.iterations - i) / this.iterations + fixedLearningRate; trainer.LearningRadius = (double)this.radius * (this.iterations - i) / this.iterations; input[0] = this.rand.Next(256); input[1] = this.rand.Next(256); input[2] = this.rand.Next(256); trainer.Run(input); // update map once per 50 iterations if ((i % 10) == 9) { UpdateMap( ); } // increase current iteration i++; // set current iteration's info this.currentIterationBox.Text = i.ToString( ); // stop ? if (i >= this.iterations) { break; } } // enable settings controls EnableControls(true); }
// Worker thread void SearchSolution() { SOMLearning trainer = new SOMLearning(network); double[] input = new double[3]; double fixedLearningRate = learningRate / 10; double driftingLearningRate = fixedLearningRate * 9; int i = 0; while (!needToStop) { trainer.LearningRate = driftingLearningRate * (iterations - i) / iterations + fixedLearningRate; trainer.LearningRadius = radius * (iterations - i) / iterations; if (rand != null) { input[0] = rand.Next(256); input[1] = rand.Next(256); input[2] = rand.Next(256); } trainer.Run(input); // update map once per 50 iterations if ((i % 10) == 9) { UpdateMap(); } i++; SetText(currentIterationBox, i.ToString()); if (i >= iterations) { break; } } EnableControls(true); }
private void StartCommandHandler(object sender, ExecutedRoutedEventArgs e) { PrepareParams(); lblStatus.Text = "Обучение"; this.Cursor = Cursors.Wait; BitmapSource bmp = (BitmapSource)imgOrig.Source; int H = bmp.PixelHeight, W = bmp.PixelWidth, stride = (W * bmp.Format.BitsPerPixel + 7) / 8, len = H * stride; byte[] pixels = new byte[len]; bmp.CopyPixels(pixels, stride, 0); // input double[] input = new double[4]; for (int i = 0; i < len; i += 4) { byte b = pixels[i]; byte g = pixels[i + 1]; byte r = pixels[i + 2]; byte a = pixels[i + 3]; } Neuron.RandRange = new Range <double>(0, 255); nt = new DistanceNetwork(4, _nx * _ny); SOMLearning trainer = new SOMLearning(nt, _nx, _ny); double fixedLearningRate = _rate / 10; double driftingLearningRate = fixedLearningRate * 9; // iterations int k = 0; Random rand = new Random(); // loop while (true) { trainer.LearningRate = driftingLearningRate * (_iterations - k) / _iterations + fixedLearningRate; trainer.LearningRadius = (double)_radius * (_iterations - k) / _iterations; int i = rand.Next(H * W); input[0] = pixels[i]; input[1] = pixels[i + 1]; input[2] = pixels[i + 2]; input[3] = pixels[i + 3]; trainer.Run(input); // increase current iteration k++; // stop ? if (k >= _iterations) { break; } } stride = 4 * _nx; byte[] array = new byte[_ny * stride]; Layer layer = nt[0]; for (int y = 0, i = 0; y < _ny; y++) { // for all pixels for (int x = 0; x < stride; i++, x += 4) { Neuron neuron = layer[i]; array[stride * y + x] = (byte)Math.Max(0, Math.Min(255, neuron[0])); array[stride * y + x + 1] = (byte)Math.Max(0, Math.Min(255, neuron[1])); array[stride * y + x + 2] = (byte)Math.Max(0, Math.Min(255, neuron[2])); array[stride * y + x + 3] = (byte)Math.Max(0, Math.Min(255, neuron[3])); } } try { System.Drawing.Graphics g = System.Drawing.Graphics.FromHwnd(IntPtr.Zero); WriteableBitmap bm1 = new WriteableBitmap(_nx, _ny, g.DpiX, g.DpiY, bmp.Format, null); bm1.WritePixels(new Int32Rect(0, 0, _nx, _ny), array, stride, 0); imgMap.Source = bm1; } catch (Exception ex) { MessageBox.Show(ex.Message); } lblStatus.Text = ""; this.Cursor = Cursors.Arrow; }