Esempio n. 1
0
        public NeuralPatternClassificator(bool learn)
        {
            List <int> dimensions = new List <int>();
            int        inputLayerSize;

            if (Intelligence.Intelligence.Configurator.GetIntProperty("char_featuresExtractionMethod") == 0)
            {
                inputLayerSize = NormalizeX * NormalizeY;
            }
            else
            {
                inputLayerSize = features.Length * 4;
            }

            dimensions.Add(inputLayerSize);
            dimensions.Add(Intelligence.Intelligence.Configurator.GetIntProperty("neural_topology"));
            dimensions.Add(Alphabet.Length);
            Network = new NeuralNetwork.NeuralNetwork(dimensions);

            if (learn)
            {
                LearnAlphabet(Intelligence.Intelligence.Configurator.GetStringProperty("char_learnAlphabetPath"));
            }
            else
            {
                Network = new NeuralNetwork.NeuralNetwork(
                    Intelligence.Intelligence.Configurator.GetPathProperty("char_neuralNetworkPath"));
            }
        }
Esempio n. 2
0
 public Neuron(Scene scene, NeuralNetwork.NeuralNetwork nn, int i, int j)
     : base(scene)
 {
     _nn = nn;
     _i  = i;
     _j  = j;
 }
Esempio n. 3
0
        private static NeuralPlayer MakeNeuralPlayer(int inputCount, int outputCount, int depth, int hiddenNeuronsPerLayer, DNA individual)
        {
            NeuralNetwork.NeuralNetwork network = new NeuralNetwork.NeuralNetwork();

            network.BuildFromDNA($"{inputCount}||{outputCount}||{depth}||{hiddenNeuronsPerLayer}||{individual.ToString()}");

            NeuralPlayer p2 = new NeuralPlayer(network, SquareTypes.O);

            return(p2);
        }
        public void OnNext(TrainingErrorMessage value)
        {
            NeuralNetwork.NeuralNetwork neuralNetwork            = value.NeuralNetwork;
            Matrix <double>             trainingSet              = value.TrainingSet,
                                        trainingSetOutput        = value.TrainingSetOutput,
                                        crossValidationSet       = value.CrossValidationSet,
                                        crossValidationSetOutput = value.CrossValidationSetOutput;

            //var error = trainingSet.EnumerateRows().Select(row =>
            //{
            //    var temp = neuralNetwork.ForwardInput(row);
            //    return temp.Item1[temp.Item1.Count - 1];
            //}).Zip(trainingSetOutput.EnumerateColumns(), (first, second) => computeError(first, second)).Sum();
            //TrainingErrors.Add(error);
            //CrossValidationErrors.Add(crossValidationSet.EnumerateRows().Select(row =>
            //{
            //    var temp = neuralNetwork.ForwardInput(row);
            //    return temp.Item1[temp.Item1.Count - 1];
            //}).Zip(crossValidationSetOutput.EnumerateColumns(), (first, second) => computeError(first, second)).Sum());
            //value.Error = error;
            if (_isLeven)
            {
                TrainingErrors.Add(value.TrainError);
                CrossValidationErrors.Add(value.CrossError);
                NotifyResults(new ResultMesssage(value.TrainError, value.CrossError, value.Epochs));
                return;
            }
            /*Trainer as observer*/
            double validationERR = 0, trainingERR = 0;
            //int epochsNUM=0;
            var Terror = trainingSet.EnumerateRows().Select(row =>
            {
                var temp = neuralNetwork.ForwardInput(row);
                return(temp.Item1[temp.Item1.Count - 1]);
            }).Zip(trainingSetOutput.EnumerateColumns(), (first, second) => computeError(first, second)).Sum();

            TrainingErrors.Add(Terror);
            trainingERR = Terror;
            var Verror = crossValidationSet.EnumerateRows().Select(row =>
            {
                var temp = neuralNetwork.ForwardInput(row);
                //epochsNUM = temp.Item1.Count;
                return(temp.Item1[temp.Item1.Count - 1]);
            }).Zip(crossValidationSetOutput.EnumerateColumns(), (first, second) => computeError(first, second)).Sum();

            value.Error = Terror;
            CrossValidationErrors.Add(Terror);
            validationERR = Verror;
            Console.WriteLine("{0}\t{1}\t*****{2}", value.Epochs, validationERR, trainingERR);
            //Trainer as observer
            ResultMesssage messsage = new ResultMesssage(trainingERR, validationERR, value.Epochs);

            NotifyResults(messsage);
            /*Trainer as observer*/
        }
Esempio n. 5
0
        /// <summary>
        /// Creates a <see cref="PredictionModel"/> from the specified file.
        /// </summary>
        /// <param name="filename">The filename.</param>
        /// <returns>The deserialized prediction model.</returns>
        public static PredictionModel FromFile(string filename)
        {
            Contracts.FileExists(filename, nameof(filename));
            Contracts.FileExtensionValid(filename, ".nn", nameof(filename));

            var deserializer = new NnDeserializer();

            NeuralNetwork.NeuralNetwork neuralNetwork = deserializer.Deserialize(filename);

            return(new PredictionModel(neuralNetwork));
        }
Esempio n. 6
0
 public BirdAIController(NeuralNetwork.NeuralNetwork net = null)
 {
     if (net == null)
     {
         rand = new UnityEngine.Random();
         List <int> inputs = new List <int>()
         {
             4, 4, 2
         };
         this.network = new NeuralNetwork.NeuralNetwork(inputs);
     }
     else
     {
         this.network = net;
     }
 }
Esempio n. 7
0
        /* Initialise l'adn à partire d'un réseau de neurones */
        public void Initialize(NeuralNetwork.NeuralNetwork with)
        {
            List <float> _genes = new List <float> ();

            for (int layer = 0; layer < with.Weight.Length; layer++)
            {
                for (int neuron = 0; neuron < with.Weight[layer].Length; neuron++)
                {
                    for (int axon = 0; axon < with.Weight[layer][neuron].Length; axon++)
                    {
                        _genes.Add(with.Weight[layer][neuron][axon]);
                    }
                }
            }
            this.genes = _genes.ToArray();
        }
Esempio n. 8
0
        private void Calculate()
        {
            int[] layersconfig = new int[33];
            layersconfig[32] = 1;

            for (int i = 0; i < 32; i++)
            {
                layersconfig[i] = 12;
            }

            NeuralNetwork.NeuralNetwork net = new NeuralNetwork.NeuralNetwork(2, layersconfig);

            float[][] X = new float[DataSet.Count][];
            float[][] Y = new float[DataSet.Count][];

            // prepare data
        }
        public void Train(NeuralNetwork.NeuralNetwork neuralNetwork, Matrix <double> inputs, Matrix <double> outputs, HyperParameters hyperParamters = null)
        {
            if (TrainingAlgorithm is LevenbergAlgorithm)
            {
                _isLeven = true;
            }
            else
            {
                _isLeven = false;
            }
            TrainingAlgorithm.Subscribe(this);
            var temp = Divider.Divide(inputs, outputs);

            Console.WriteLine(temp.Item1);
            Console.WriteLine(temp.Item2);
            _train(neuralNetwork, temp.Item1, temp.Item3, temp.Item2, temp.Item4, hyperParamters);
        }
Esempio n. 10
0
        public static trainerParams BuildNet(FlowLayoutPanel layers, FileDialog LoadData_dlg, Result r, Telerik.WinControls.UI.RadDiagram radDiagram1)
        {
            Training.Trainer     trainer = new Training.Trainer();
            NeuralNetworkBuilder b       = new NeuralNetworkBuilder();
            L_ctrl_mat           temp;
            int neuronsnumber;
            IActivatorFunction AF = null;
            FunctionApplier    functionApplier = new FunctionApplier();

            foreach (var layer in layers.Controls)
            {
                temp          = layer as L_ctrl_mat;
                neuronsnumber = Convert.ToInt16(temp.NN_drpdn.Value);
                if (ActivatorFunctions.FunctionName.SIGMOID.ToString() == temp.AF_drpdn.SelectedItem.Text)
                {
                    AF = new SigmoidFunction();
                    imgs.Add(Resources.Layer__Sigmoid);
                }
                if (ActivatorFunctions.FunctionName.TANH.ToString() == temp.AF_drpdn.SelectedItem.Text)
                {
                    AF = new TanhFunction();
                    imgs.Add(Resources.Layer_Tan_H);
                }
                functionApplier.ActivatorFunction = AF;
                b.Layer(neuronsnumber, functionApplier, (double)temp.Lr_drpdn.Value);
            }
            NeuralNetwork.NeuralNetwork nn = b.Build();
            string FileName = LoadData_dlg.FileName;
            var    tuples   = DataReader.DataReader.Instance.ReadFromFile(FileName);

            var inputs  = tuples.Item1;
            var outputs = tuples.Item2;

            // StaticDivider Divider = new StaticDivider(.6,.3);
            //var temp2 = Divider.Divide(inputs, outputs);
            //ActivatorFunctions.FunctionName applier ;

            //// test case (should belong to the second class = [0 1 0])
            //var tt = nn.ForwardInput(Vector<double>.Build.DenseOfArray(new double[] { 5.5, 2.5, 4.0, 1.3 }));
            Params       = new trainerParams();
            Params.nn    = nn;
            Params.Tuple = tuples;
            NetGraph(nn, radDiagram1);

            return(Params);
        }
Esempio n. 11
0
        List <NeuralNetwork.NeuralNetwork> EvolveAgents()
        {
            List <NeuralNetwork.NeuralNetwork> nets = _savedBirds.Select(b => b.controller.GetNetwork()).ToList();

            nets.Sort();

            for (int i = 0; i < _savedBirds.Count / 2; i++)
            {
                nets[i] = new NeuralNetwork.NeuralNetwork(nets[i + (_savedBirds.Count / 2)]);
                nets[i].Mutate();
                nets[i + (_savedBirds.Count / 2)] = new NeuralNetwork.NeuralNetwork(nets[i + (_savedBirds.Count / 2)]);

                //  _birdDNA[i] = new NeuralNetwork.NeuralNetwork(_birds[i+(_birdCount / 2)].controller.GetNetwork());
                //  _birdDNA[i].Mutate();
                //  _birdDNA[i + (_birdCount / 2)] = new NeuralNetwork.NeuralNetwork(_birds[i + (_birdCount / 2)].controller.GetNetwork());
            }
            //_birds.ForEach(b => b.controller.GetNetwork().fitness.Set(0));

            return(nets);
        }
Esempio n. 12
0
        static void Main(string[] args)
        {
            NeuralNetwork.NeuralNetwork nn    = new NeuralNetwork.NeuralNetwork();
            List <double[]>             input = new List <double[]>()
            {
                new double[] { 0, 0 },
                new double[] { 0, 1 },
                new double[] { 1, 0 },
                new double[] { 1, 1 }
            };
            List <double[]> output = new List <double[]>()
            {
                new double[] { 0 },
                new double[] { 1 },
                new double[] { 0 },
                new double[] { 1 }
            };

            for (int i = 0; i < 100000; i++)
            {
                nn.Train(input, output);
            }
        }
Esempio n. 13
0
        private void Form1_Load(object sender, EventArgs e)
        {
            Matrix.Matrix output  = new Matrix.Matrix(1, 1);
            double        sum     = 0;
            double        sumTest = 0;
            int           epochs  = 0;
            int           epoch   = 5000;


            LoadFromFile(data, target);
            LoadFromFileTest(dataTest, targetTest);
            SortX(data, target);
            this.Controls.Add(pv);
            this.Controls.Add(pv2);
            PlotModel pm = new PlotModel();

            pv.Model = pm;
            PlotModel pm2 = new PlotModel();

            pv2.Model = pm2;

            LineSeries punktySerii = new LineSeries
            {
                LineStyle    = LineStyle.None,
                MarkerType   = MarkerType.Diamond,
                MarkerSize   = 2,
                MarkerStroke = OxyColors.Red,
                Title        = "Punkty treningowe"
            };
            LineSeries punktySeriiTestu = new LineSeries
            {
                LineStyle    = LineStyle.None,
                MarkerType   = MarkerType.Circle,
                MarkerSize   = 2,
                MarkerStroke = OxyColors.Red,
                Title        = "Punkty wejściowe"
            };

            LineSeries punktySieci = new LineSeries()
            {
                LineStyle    = LineStyle.None,
                MarkerType   = MarkerType.Plus,
                MarkerSize   = 3,
                MarkerStroke = OxyColors.Blue,
                Title        = "Punkty wyjściowe"
            };

            LineSeries punktyBledu = new LineSeries()
            {
                LineStyle    = LineStyle.Solid,
                MarkerType   = MarkerType.Diamond,
                Color        = OxyColors.Red,
                MarkerSize   = 1,
                MarkerStroke = OxyColors.Red,
                Title        = "Zbior treningowy"
            };

            LineSeries punktyBleduTestowego = new LineSeries()
            {
                LineStyle    = LineStyle.Solid,
                MarkerType   = MarkerType.Diamond,
                Color        = OxyColors.Blue,
                MarkerSize   = 1,
                MarkerStroke = OxyColors.Blue,
                Title        = "Zbior testowy"
            };

            LineSeries punktyKresek = new LineSeries
            {
                Color           = OxyColors.Blue,
                StrokeThickness = 1
            };

            /*
             * for (int i = 0; i < data.Length; i++)
             * {
             *  if (data[i] >= 1)
             *      target[i] = data[i] * -2;
             * }
             */

            for (int i = 0; i < data.Length; i++)
            {
                punktySerii.Points.Add(new DataPoint(data[i], target[i]));
            }

            for (int i = 0; i < dataTest.Length; i++)
            {
                punktySeriiTestu.Points.Add(new DataPoint(dataTest[i], targetTest[i]));
            }



            Matrix.Matrix outputFF         = new Matrix.Matrix(1, 1);
            double[]      d                = new double[1];
            NeuralNetwork.NeuralNetwork nn = new NeuralNetwork.NeuralNetwork(1, 10, 1, true);

            //uczenie

            for (int i = 0; i < epoch; ++i)
            {
                sum     = 0;
                sumTest = 0;

                foreach (int j in Enumerable.Range(0, 81).OrderBy(x => rnd.Next()))
                {
                    d[0] = data[j];
                    double[] y = new double[1] {
                        target[j]
                    };
                    nn.Train(d, y);
                    // Console.Write("x: " + data[j]+ "y= ");
                    // nn.FeedForward(d).DisplayMatrix();
                    // Console.WriteLine();
                }
                Console.WriteLine("epoka: " + epochs);



                for (int j = 0; j < data.Length; j++)
                {
                    d[0]     = data[j];
                    outputFF = nn.FeedForward(d);
                    sum     += (outputFF.tab[0, 0] - target[j]) * (outputFF.tab[0, 0] - target[j]) / 2;
                }

                for (int j = 0; j < dataTest.Length; j++)
                {
                    d[0]     = dataTest[j];
                    outputFF = nn.FeedForward(d);
                    sumTest += (outputFF.tab[0, 0] - targetTest[j]) * (outputFF.tab[0, 0] - targetTest[j]) / 2;
                }

                EpochErrorTest.Add(new DataPoint(epochs, sumTest / dataTest.Length));
                EpochError.Add(new DataPoint(epochs, sum / data.Length));
                ++epochs;
            }

            /*
             * for (int i = 0; i < data.Length; ++i)
             * {
             *  d[0] = data[i];
             *  outputFF = nn.FeedForward(d);
             *  punktySieci.Points.Add(new DataPoint(d[0], outputFF.tab[0, 0]));
             *  outerSpace[i] = outputFF.tab[0, 0];
             *  //Console.WriteLine("x: " + d[0] + " y: " + outputFF.tab[0, 0]);
             *  //outputFF.DisplayMatrix();
             * }
             */

            for (int i = 0; i < dataTest.Length; ++i)
            {
                d[0]     = dataTest[i];
                outputFF = nn.FeedForward(d);
                punktySieci.Points.Add(new DataPoint(d[0], outputFF.tab[0, 0]));
                outerSpaceTest[i] = outputFF.tab[0, 0];
            }

            SortX(data, outerSpace);
            SortX(dataTest, outerSpaceTest);

            /*
             * for(int i = 0; i < data.Length; ++i)
             * {
             *  punktyKresek.Points.Add(new DataPoint(data[i], outerSpace[i]));
             * }
             *
             * for (int i = 0; i < dataTest.Length; ++i)
             * {
             *  punktyKresek.Points.Add(new DataPoint(dataTest[i], outerSpaceTest[i]));
             * }
             */
            //for error:

            for (int i = 0; i < epoch; ++i)
            {
                punktyBleduTestowego.Points.Add(EpochErrorTest[i]);
                punktyBledu.Points.Add(EpochError[i]);
            }

            pm2.Series.Add(punktyBleduTestowego);
            pm2.Series.Add(punktyBledu);
            pm2.Axes.Add(new OxyPlot.Axes.LinearAxis {
                Position = OxyPlot.Axes.AxisPosition.Bottom, MinimumPadding = 0.1, MaximumPadding = 0.1, MajorGridlineStyle = LineStyle.Solid, MinorGridlineStyle = LineStyle.Dot, Title = "Epoka"
            });
            pm2.Axes.Add(new OxyPlot.Axes.LinearAxis {
                Position = OxyPlot.Axes.AxisPosition.Left, MinimumPadding = 0.1, MaximumPadding = 0.1, MajorGridlineStyle = LineStyle.Solid, MinorGridlineStyle = LineStyle.Dot, Title = "Błąd"
            });

            //for points

            pm.Series.Add(punktySerii);
            pm.Series.Add(punktySieci);
            pm.Series.Add(punktySeriiTestu);
            //pm.Series.Add(punktyKresekTestu);
            //pm.Series.Add(punktyKresek);
        }
Esempio n. 14
0
 /* Crée une nouvelle ADN ou les genes peuvent contenir les poids des axones du réseau de neurones */
 public DNA(NeuralNetwork.NeuralNetwork neuralNetwork)
 {
     genes = new float[0];
     neuralNetworkStructure = neuralNetwork.Layers;
     Initialize(neuralNetwork);
 }
Esempio n. 15
0
        public static void NetGraph(NeuralNetwork.NeuralNetwork nn, Telerik.WinControls.UI.RadDiagram radDiagram1)
        {
            List <Feature_shape> featureShapes = new List <Feature_shape>();
            List <AF_shape>      afShapes      = new List <AF_shape>();
            double x = 0;
            double y = 90;

            radDiagram1.AddShape(new Input_shape(), null, new Point(x, y));
            for (int i = 0; i < nn.Layers.Count; i++)
            {
                x = i * 250 + 100;
                y = 0;
                //nn.Layers[i].Applier.ToString();
                Point location = new Point(x, y);
                radDiagram1.AddShape(new Layer_shape("\n\t" + nn.Layers[i].NeuronsNumber, imgs[i])
                {
                    //Label = { Text ="Layer "+i , }
                    //LayerBitmap=imgs[i]
                }
                                     , null, location);

                /**
                 * featureShapes.Add(new Feature_shape()
                 * {
                 *  Location = new Point(i * 250 + 100, 30),
                 *  Text = Convert.ToString(nn.Layers[i].NeuronsNumber)
                 * }
                 * );
                 * /**
                 * afShapes.Add(new AF_shape()
                 * {
                 *  Location = new Point(i * 250 + 200, 75),
                 *  Text = Convert.ToString(nn.Layers[i].Applier)
                 *
                 * }
                 * );
                 * /**/
                //(radDiagram1.Shapes[radDiagram1.Shapes.Count-1] as Layer_shape).addnodes();
            }
            x = nn.Layers.Count * 250 + 100;
            y = 90;
            radDiagram1.AddShape(new Output_shape(), null, new Point(x, y));
            Weight_edge edge;

            for (int i = 1; i < radDiagram1.Shapes.Count; i++)
            {
                radDiagram1.AddConnection((IShape)radDiagram1.Shapes[i - 1], (IShape)radDiagram1.Shapes[i]);
                radDiagram1.Connections[i - 1].TargetCapType     = CapType.Arrow2Filled;
                radDiagram1.Connections[i - 1].AllowDelete       = false;
                radDiagram1.Connections[i - 1].IsDraggingEnabled = false;
                radDiagram1.Connections[i - 1].IsEditable        = false;
                radDiagram1.Connections[i - 1].TargetCapSize     = new SizeF(20, 25);
                //I don't know how!!
                RadDiagramConnection connection1 = (RadDiagramConnection)radDiagram1.Connections[i - 1];
                connection1.BackColor = Color.LightSalmon;
                //    radDiagram1.AddConnection(
                //        radDiagram1,i
                //        //new Weight_edge(
                //        ////    (IShape)radDiagram1.Shapes[i],
                //        ////(IShape)radDiagram1.Shapes[i - 1]
                //)
                //       // )

                //        ;
            }
            for (int i = 0; i < featureShapes.Count; i++)
            {
                radDiagram1.AddShape(featureShapes[i]);
                //radDiagram1.AddShape(afShapes[i]);
                //radDiagram1.AddConnection((IShape) radDiagram1.Shapes[radDiagram1.Shapes.Count - 1],
                //   (IShape) radDiagram1.Shapes[radDiagram1.Shapes.Count - 2]);
            }
        }
Esempio n. 16
0
 public NeuralNetworkScene(NeuralNetwork.NeuralNetwork nn)
 {
     _nn = nn;
 }
Esempio n. 17
0
 public abstract void Train(NeuralNetwork.NeuralNetwork neuralNetwork, Matrix <double> trainingSet, Matrix <double> crossValidationSet, Matrix <double> trainingSetOutput, Matrix <double> crossValidationSetOutput, HyperParameters hyperParameters = null);
Esempio n. 18
0
 public Bird(Vector2 position, Texture2D texture, NeuralNetwork.NeuralNetwork brain) : base(position, texture)
 {
     Brain = brain;
 }
        public override void Train(NeuralNetwork.NeuralNetwork neuralNetwork, Matrix <double> trainingSet, Matrix <double> crossValidationSet, Matrix <double> trainingSetOutput, Matrix <double> crossValidationSetOutput, HyperParameters hyperParameters = null)
        {
            double maxError = 0.01, error = 5, momentum = 0.9;
            int    maxEpochs = 1000, epochs = 0;

            if (hyperParameters != null)
            {
                if (maxEpochs <= 0)
                {
                    throw new ArgumentException("Max Epochs cannot be negative");
                }
                if (maxError > 2 || maxError < 0)
                {
                    throw new ArgumentException("Max error cannot be negative or very large");
                }
                maxError  = hyperParameters.MaxError;
                maxEpochs = hyperParameters.MaxEpochs;
                momentum  = hyperParameters.Momentum;
            }

            TrainingErrorMessage message = new TrainingErrorMessage()
            {
                NeuralNetwork = neuralNetwork, TrainingSet = trainingSet, CrossValidationSet = crossValidationSet, TrainingSetOutput = trainingSetOutput, CrossValidationSetOutput = crossValidationSetOutput
            };
            var layers  = neuralNetwork.Layers;
            var weights = neuralNetwork.HiddenWeights;

            //for momentum
            List <Matrix <double> > prevDeltaW = new List <Matrix <double> >();

            for (int i = 0; i < weights.Count; ++i)
            {
                prevDeltaW.Add(Matrix <double> .Build.Dense(layers[i + 1].NeuronsNumber, layers[i].NeuronsNumber + 1));
            }
            //end
            //epochs++ => ++epochs
            while (error >= maxError && ++epochs <= maxEpochs)
            {
                prevDeltaW.ForEach(e => e.Clear());
                for (int i = 0; i < trainingSet.RowCount; i++)
                {
                    Vector <double> input  = trainingSet.Row(i),
                                    output = trainingSetOutput.Column(i);
                    var temp = neuralNetwork.ForwardInput(input);
                    IList <Vector <double> > acs = temp.Item1, gs = temp.Item2;
                    var D      = (output - acs[acs.Count - 1]).PointwiseMultiply(gs[gs.Count - 1]).ToColumnMatrix(); // n(output) * 1
                    var deltaW = D * acs[acs.Count - 2].ToRowMatrix() * layers[layers.Count - 1].LearningRate;       // (n(output) * 1) * ((n(output-1)+1) * 1)' = n(output) * (n(output-1)+1)
                    //for momentum
                    deltaW += computeAdditionalTerms(prevDeltaW[weights.Count - 1], momentum);
                    prevDeltaW[weights.Count - 1] = deltaW;
                    //end
                    neuralNetwork.UpdateWeightsAt(deltaW, weights.Count - 1);
                    for (int j = layers.Count - 2; j > 0; j--)
                    {
                        D      = (weights[j].Transpose() * D).RemoveRow(0).PointwiseMultiply(gs[j - 1].ToColumnMatrix()); // (n(j+1) * (n(j)+1))' * (n(j+1) * 1) = (n(j)+1) * 1, then => (n(j) * 1) .* (n(j) * 1)
                        deltaW = D * acs[j - 1].ToRowMatrix() * layers[j].LearningRate;                                   // (n(j) * 1) * ((n(j-1)+1) * 1)' = n(j) * (n(j-1)+1)
                        //for momentum
                        deltaW           += computeAdditionalTerms(prevDeltaW[j - 1], momentum);
                        prevDeltaW[j - 1] = deltaW;
                        //end
                        neuralNetwork.UpdateWeightsAt(deltaW, j - 1);
                    }
                }
                message.Epochs = epochs;
                base.Notify(message);
                error = message.Error;
                Console.WriteLine(error);
            }
            base.OnComplete();
        }
Esempio n. 20
0
 private void _train(NeuralNetwork.NeuralNetwork neuralNetwork, Matrix <double> trainingSet,
                     Matrix <double> crossValidationSet, Matrix <double> trainingSetOutput, Matrix <double> crossValidationSetOutput, HyperParameters hyperParamters = null)
 {
     TrainingAlgorithm.Train(neuralNetwork, trainingSet, crossValidationSet, trainingSetOutput,
                             crossValidationSetOutput, hyperParamters);
 }
        public override void Train(NeuralNetwork.NeuralNetwork neuralNetwork, Matrix <double> trainingSet,
                                   Matrix <double> crossValidationSet, Matrix <double> trainingSetOutput, Matrix <double> crossValidationSetOutput,
                                   HyperParameters hyperParameters = null)
        {
            //Func<Math.IActivatorFunction, IActivationFunction> to = (fun) =>
            //{
            //    if (fun is Math.SigmoidFunction)
            //        return new SigmoidFunction();
            //    if (fun is Math.IdentityFunction)
            //        return new IdentityFunction();
            //    return new SigmoidFunction();
            //};
            //IList<ActivationLayer> layers = new List<ActivationLayer>();
            //layers.Add(new ActivationLayer(neuralNetwork.HiddenWeights[0].RowCount, trainingSet.ColumnCount, to(neuralNetwork.Layers[0].Applier.ActivatorFunction)));
            //for (int i = 1; i < neuralNetwork.HiddenWeights.Count; i++)
            //{
            //    layers.Add(new ActivationLayer(neuralNetwork.HiddenWeights[i - 1].RowCount, trainingSet.ColumnCount, to(neuralNetwork.Layers[i].Applier.ActivatorFunction)));
            //}
            double maxError = 0.01, error = 5, lr = 0.01;
            int    maxEpochs = 1000, epochs = 0;

            if (hyperParameters != null)
            {
                if (maxEpochs <= 0)
                {
                    throw new ArgumentException("Max Epochs cannot be negative");
                }
                if (maxError > 2 || maxError < 0)
                {
                    throw new ArgumentException("Max error cannot be negative or very large");
                }
                maxError  = hyperParameters.MaxError;
                maxEpochs = hyperParameters.MaxEpochs;
                lr        = hyperParameters.Lr;
            }
            ActivationNetwork          network = new ActivationNetwork(new SigmoidFunction(2), trainingSet.ColumnCount, neuralNetwork.Layers.Select(x => x.NeuronsNumber).ToArray());
            LevenbergMarquardtLearning teacher = new LevenbergMarquardtLearning(network, true)
            {
                LearningRate = lr
            };
            TrainingErrorMessage message       = new TrainingErrorMessage()
            {
                NeuralNetwork            = neuralNetwork,
                TrainingSet              = trainingSet,
                CrossValidationSet       = crossValidationSet,
                TrainingSetOutput        = trainingSetOutput,
                CrossValidationSetOutput = crossValidationSetOutput
            };
            int iterations = 1;

            double[][] inputs = new double[trainingSet.RowCount][],
            crossInputs  = new double[crossValidationSet.RowCount][],
            outputs      = new double[trainingSetOutput.RowCount][],
            crossOutputs = new double[crossValidationSetOutput.RowCount][];
            for (int i = 0; i < trainingSet.RowCount; i++)
            {
                inputs[i] = new double[trainingSet.ColumnCount];
                for (int j = 0; j < trainingSet.ColumnCount; j++)
                {
                    inputs[i][j] = trainingSet[i, j];
                }
            }
            for (int i = 0; i < trainingSetOutput.RowCount; i++)
            {
                outputs[i] = new double[trainingSetOutput.ColumnCount];
                for (int j = 0; j < trainingSet.ColumnCount; j++)
                {
                    outputs[i][j] = trainingSetOutput[i, j];
                }
            }
            for (int i = 0; i < crossValidationSet.RowCount; i++)
            {
                crossInputs[i] = new double[crossValidationSet.ColumnCount];
                for (int j = 0; j < crossValidationSet.ColumnCount; j++)
                {
                    crossInputs[i][j] = crossValidationSet[i, j];
                }
            }
            for (int i = 0; i < crossValidationSetOutput.RowCount; i++)
            {
                crossOutputs[i] = new double[crossValidationSetOutput.ColumnCount];
                for (int j = 0; j < crossValidationSetOutput.ColumnCount; j++)
                {
                    crossOutputs[i][j] = crossValidationSetOutput[i, j];
                }
            }
            while (error > maxError && iterations++ <= maxEpochs)
            {
                message.Epochs     = iterations;
                error              = teacher.RunEpoch(inputs, outputs);
                message.TrainError = error;
                message.CrossError = teacher.ComputeError(crossInputs, crossOutputs);
                base.Notify(message);
            }

            //double mue = 0.001, mue_adj = 10, max_mue = 1e10;

            //base.Notify(message);

            //double currentError = message.Error;
            //while (currentError >= maxError && epochs++ < maxEpochs)
            //{
            //    message.Epochs = epochs;

            //    var temp = HissienAndGragient(neuralNetwork, trainingSet, trainingSetOutput);
            //    var hessien = temp.Item1;
            //    var gradient = temp.Item2;
            //    Matrix<double> blendingMatrix = Matrix<double>.Build.DenseDiagonal(hessien.RowCount, hessien.ColumnCount);
            //    var prevW = neuralNetwork.HiddenWeights.ToList();
            //    //Console.WriteLine("prev :");
            //    //prevW.ForEach(Console.WriteLine);
            //    double nextError = 100000;
            //    while (true)
            //    {
            //        var term = hessien + mue*blendingMatrix;
            //        var det = term.Determinant();

            //        if (System.Math.Abs(det) > 0)
            //        {
            //            var deltaW = term*gradient;
            //            neuralNetwork.UpdateWeightsFromVector(deltaW);
            //            //Console.WriteLine("updated :");
            //            //neuralNetwork.HiddenWeights.ForEach(Console.WriteLine);
            //            base.Notify(message);
            //            nextError = message.Error;
            //        }

            //        if (!(System.Math.Abs(det) > 0) || nextError >= currentError)
            //        {
            //            neuralNetwork.SetWeights(prevW);
            //            //Console.WriteLine("set to prev :");
            //            //neuralNetwork.HiddenWeights.ForEach(Console.WriteLine);
            //            mue *= mue_adj;
            //            if (mue > max_mue)
            //            {
            //                mue = max_mue;
            //                break;
            //            }
            //        }
            //        else
            //        {
            //            mue /= mue_adj;
            //            currentError = nextError;
            //            //Console.WriteLine("the shit is here");
            //            break;
            //        }
            //    }
            //}
        }
        private Tuple <Matrix <double>, Vector <double> > HissienAndGragient(NeuralNetwork.NeuralNetwork nn, Matrix <double> trainingSet, Matrix <double> trainingSetOutput)
        {
            var weights = nn.HiddenWeights;
            var layers  = nn.Layers;
            List <Matrix <double> > gradient2D = new List <Matrix <double> >();

            for (int i = 0; i < weights.Count; ++i)
            {
                gradient2D.Add(Matrix <double> .Build.Dense(layers[i + 1].NeuronsNumber, layers[i].NeuronsNumber + 1));
            }
            Dictionary <int, List <double> > gradients = new Dictionary <int, List <double> >();
            List <double> errors = new List <double>();

            for (int i = 0; i < trainingSet.RowCount; i++)
            {
                //conpute gradient for one training example
                Vector <double> input  = trainingSet.Row(i),
                                output = trainingSetOutput.Column(i);
                var temp = nn.ForwardInput(input);
                IList <Vector <double> > acs = temp.Item1, gs = temp.Item2;
                var D      = (output - acs[acs.Count - 1]).PointwiseMultiply(gs[gs.Count - 1]).ToColumnMatrix();
                var deltaW = D * acs[acs.Count - 2].ToRowMatrix();
                gradient2D[weights.Count - 1] = deltaW;
                for (int j = layers.Count - 2; j > 0; j--)
                {
                    D                 = (nn.HiddenWeights[j].Transpose() * D).RemoveRow(0).PointwiseMultiply(gs[j - 1].ToColumnMatrix());
                    deltaW            = D * acs[j - 1].ToRowMatrix();
                    gradient2D[j - 1] = deltaW;
                }
                //end

                //put the gradient in 1D list
                int m = i;
                gradients.Add(m, new List <double>());
                for (int j = 0; j < weights.Count; j++)
                {
                    gradient2D[j].Enumerate().ForEach(e => gradients[m].Add(e));
                }
                //end

                //compute error for one training example
                var t = output - acs[acs.Count - 1];
                errors.Add(t.PointwiseMultiply(t).Sum() / 2);
                //end
            }

            //initialize jaccobian matrix
            Matrix <double> jac = Matrix <double> .Build.Dense(trainingSet.RowCount, gradients[0].Count);

            for (int i = 0; i < jac.RowCount; i++)
            {
                for (int k = 0; k < jac.ColumnCount; ++k)
                {
                    jac[i, k] = gradients[i][k];
                }
            }
            //end

            //errors vector
            var err = Vector <double> .Build.DenseOfEnumerable(errors.AsEnumerable());


            return(new Tuple <Matrix <double>, Vector <double> >(jac.Transpose() * jac, jac.Transpose() * err));
        }