Ejemplo n.º 1
0
 private void button4_Click_1(object sender, EventArgs e)
 {
     button4.Enabled = false;
     network3        = null;
     evteacher       = null;
     network3        = new ActivationNetwork(new BipolarSigmoidFunction(param3), 100, 100, 90, 80, 70, 60, 50, 40, 30, 20, 10, 1);
     evteacher       = new EvolutionaryLearning(network2, 100);
     network3.Randomize();
 }
        public EvolutionaryLearningForm()
        {
            InitializeComponent();



            watch = new Stopwatch();

            backgroundWorkerTrainer.Disposed                  += backgroundWorkerTrainer_Disposed;
            backgroundWorkerTrainer.DoWork                    += backgroundWorkerTrainer_DoWork;
            backgroundWorkerTrainer.ProgressChanged           += backgroundWorkerTrainer_ProgressChanged;
            backgroundWorkerTrainer.WorkerSupportsCancellation = true;
            backgroundWorkerTrainer.WorkerReportsProgress      = true;
            saveFileDialog1.Filter           = "Evelutionary network files (*.efn)|*.ffn";
            saveFileDialog1.Title            = "Save Evelutionary network files";
            saveFileDialog1.InitialDirectory = null;
            saveFileDialog1.FileName         = null;

            openFileDialog1.Filter           = "Evelutionary network files (*.efn)|*.ffn";
            openFileDialog1.Title            = "Load Evelutionary network file";
            openFileDialog1.InitialDirectory = null;
            openFileDialog1.FileName         = null;

            backgroundWorkerSignal.Disposed += backgroundWorkerSignal_Disposed;
            backgroundWorkerSignal.WorkerSupportsCancellation = true;
            backgroundWorkerSignal.WorkerReportsProgress      = true;
            backgroundWorkerSignal.DoWork             += backgroundWorkerSignal_DoWork;
            backgroundWorkerSignal.RunWorkerCompleted += backgroundWorkerSignal_RunWorkerCompleted;


            // initialize input and output values
            input = new double[4][] {
                new double[] { 0, 0 }, new double[] { 0, 1 },
                new double[] { 1, 0 }, new double[] { 1, 1 }
            };
            output = new double[4][] {
                new double[] { 0 }, new double[] { 1 },
                new double[] { 1 }, new double[] { 0 }
            };

            networkEV = new ActivationNetwork(new SigmoidFunction(2), 2, 2, 1);

            teacherEV = new EvolutionaryLearning(networkEV, 100);

            //logg.Show();

            // pane used to draw your chart
            myPane = new GraphPane();

            // poing pair lists
            listPointsOne = new PointPairList();
        }
Ejemplo n.º 3
0
        /* DANE WEJSCIOWE
         *
         * Dane odcinka wejsciowego skryżowania x 4     = 32
         * - idealny czas odcinka
         * - średni czas odcinka przez ostatnia godzine
         * - średni czas odcinka dla tej godziny poprzedniego dnia
         * - ilość pojazdów na odcinku
         * - średni czas odcinka dla tej godziny poprzedniego dnia
         * - czy coś na kierunku [0, 1] x 3     (lewo prosto prawo)
         *
         * Czas od zmiany na pozycje swiatel x 6
         *
         * Dane sasiadow x 4                = 48
         * - Dance odcinka wejsciowego x 4
         * - - idealny czas odcinka
         * - - średni czas odcinka przez ostatnia godzine
         * - - ilosc na odcinku
         *
         * RAZEM 86
         */
        public SterowanieSi()
        {
            zestawyUczace = new List <ZestawDanychSieci>();
            siecNeuronowa = new ActivationNetwork(
                new SigmoidFunction(),   // funkcja aktywacji
                WIELKOSC_WEJSCIE,        // wielkosc wejscia
                WIELKOSC_UKRYTE,         // wielkosc 1. warstwy ukrytej (kolejne po przecinku)
                WIELKOSC_WYJSCIE         // wielkoscy wyjscia
                );

            siecNeuronowa.Randomize();

            //nauczyciel = new BackPropagationLearning(siecNeuronowa);
            //nauczyciel.LearningRate = 0.1;

            ewolutor = new EvolutionaryLearning(siecNeuronowa, 100);

            UczenieWstepne();
        }
Ejemplo n.º 4
0
        private void TrainNetworkE(TrainSet trainSet, EvolutionaryLearning teacher)
        {
            double[] sides     = new double[4];
            double[] direction = new double[2];
            double[] input     = new double[5];
            double[] output    = new double[4];

            double error = 10;
            int    epoch = 10000;

            while (epoch-- > 0)
            {
                for (int s = 0; s < trainSet.Situation.Count; s++)
                {
                    sides     = SimplifyEnvironment(trainSet.Situation[s].Environment);
                    direction = VectorFromDirection(trainSet.Decision[s].Direction);

                    // INPUT
                    input[0] = trainSet.Situation[s].ColonyPopulation;
                    input[1] = sides[0]; // UP
                    input[2] = sides[1]; // RIGHT
                    input[3] = sides[2]; // DOWN
                    input[4] = sides[3]; // LEFT

                    // OUTPUT
                    output[0] = trainSet.Decision[s].IsLeaving ? 1 : -1;
                    output[1] = trainSet.Decision[s].PopulationToMove;
                    output[2] = direction[0]; // X
                    output[3] = direction[1]; // Y

                    error = teacher.RunEpoch(new double[][] { input }, new double[][] { output });
                }

                Debug.Print(error.ToString());
            }
        }
            public void Learn()
            {
                try
                {
                    //if (object.Equals(networkStruct, null)) { networkStruct = GetLayersStruct(LayersStruct); }

                    if (Equals(networkStruct, null))
                    {
                        return;
                    }

                    if (ANN_InputsCount == -1 | ANN_OuputsCount == -1)
                    {
                        return;
                    }

                    if (Equals(ActiveFunction_Params, null))
                    {
                        throw new Exception("No activation function parameterss are specified !!!");
                    }
                    if (ActiveFunction_Params.Length < 1)
                    {
                        throw new Exception("No activation function parameterss are specified !!!");
                    }


                    if (Equals(LearningAlgorithm_Params, null))
                    {
                        throw new Exception("No learning algorithm parameters are specified !!!");
                    }

                    // create neural network
                    // Network = new ActivationNetwork(new SigmoidFunction(1),mInputsCount, networkStruct);
                    //Network = new ActivationNetwork(new BipolarSigmoidFunction(2), mInputsCount, networkStruct);
                    //2  :  two inputs in the network
                    // 2  : two neurons in the first layer
                    // 1  : one neuron in the second layer

                    switch (ActivationFunction)
                    {
                    case ActivationFunctionEnum.LinearFunction:

                        Network = new ActivationNetwork(new LinearFunction(ActiveFunction_Params[0]), ANN_InputsCount, networkStruct);
                        break;

                    case ActivationFunctionEnum.SigmoidFunction:
                        Network = new ActivationNetwork(new SigmoidFunction(ActiveFunction_Params[0]), ANN_InputsCount, networkStruct);
                        break;

                    case ActivationFunctionEnum.BipolarSigmoidFunction:
                        Network = new ActivationNetwork(new BipolarSigmoidFunction(ActiveFunction_Params[0]), ANN_InputsCount, networkStruct);
                        break;

                    default:
                        Network = new ActivationNetwork(new SigmoidFunction(ActiveFunction_Params[0]), ANN_InputsCount, networkStruct);
                        break;
                    }

                    // create teacher
                    ISupervisedLearning teacher = null;
                    //LevenbergMarquardtLearning teacher = new LevenbergMarquardtLearning(Network);
                    //BackPropagationLearning teacher = new BackPropagationLearning(Network);
                    // EvolutionaryLearning teacher = new EvolutionaryLearning(Network, 25);

                    switch (LearningAlgorithm)
                    {
                    case LearningAlgorithmEnum.BackPropagationLearning:
                        if (LearningAlgorithm_Params.Length < 2)
                        {
                            throw new Exception("No activation function parameterss are specified !!!");
                        }

                        teacher = new BackPropagationLearning(Network);
                        var teacherBP = (BackPropagationLearning)teacher;
                        teacherBP.LearningRate = LearningAlgorithm_Params[0];
                        teacherBP.Momentum     = LearningAlgorithm_Params[1];
                        teacher = teacherBP;
                        break;

                    case LearningAlgorithmEnum.LevenbergMarquardtLearning:
                        if (LearningAlgorithm_Params.Length < 2)
                        {
                            throw new Exception("No activation function parameterss are specified !!!");
                        }
                        teacher = new LevenbergMarquardtLearning(Network);
                        var teacherLM = (LevenbergMarquardtLearning)teacher;
                        teacherLM.LearningRate      = LearningAlgorithm_Params[0];
                        teacherLM.Adjustment        = LearningAlgorithm_Params[1];
                        teacherLM.UseRegularization = false;

                        teacher = teacherLM;
                        break;

                    case LearningAlgorithmEnum.BayesianLevenbergMarquardtLearning:
                        throw new NotImplementedException("The implementation is not finished yet.");

                        if (LearningAlgorithm_Params.Length < 4)
                        {
                            throw new Exception("No activation function parameterss are specified !!!");
                        }

                        teacher = new LevenbergMarquardtLearning(Network);
                        var teacherBLM = (LevenbergMarquardtLearning)teacher;
                        teacherBLM.UseRegularization = true;
                        teacherBLM.LearningRate      = LearningAlgorithm_Params[0];
                        teacherBLM.Adjustment        = LearningAlgorithm_Params[1];
                        teacherBLM.Alpha             = LearningAlgorithm_Params[2];
                        teacherBLM.Beta = LearningAlgorithm_Params[3];
                        teacher         = teacherBLM;
                        break;

                    case LearningAlgorithmEnum.EvolutionaryLearningGA:
                        if (LearningAlgorithm_Params.Length < 1)
                        {
                            throw new Exception("No activation function parameterss are specified !!!");
                        }

                        teacher = new EvolutionaryLearning(Network, (int)LearningAlgorithm_Params[0]);
                        var teacherEGA = (EvolutionaryLearning)teacher;

                        break;

                    case LearningAlgorithmEnum.RGA_Learning:
                        throw new NotImplementedException();
                        // teacher = new RGA_Learning(Network, EOA_PopulationSize, RGA_MutationPhrequency);
                        break;

                    case LearningAlgorithmEnum.GSA_Learning:
                        throw new NotImplementedException();
                        // teacher = new GSA_Learning(Network, EOA_PopulationSize, MaxIteration, GSA_Go, GSA_Alpha);
                        break;

                    case LearningAlgorithmEnum.GWO_Learning:
                        throw new NotImplementedException();
                        // teacher = new GWO_Learning(Network, EOA_PopulationSize, MaxIteration, GWO_Version, IGWO_uParameter);
                        break;

                    case LearningAlgorithmEnum.HPSOGWO_Learning:
                        throw new NotImplementedException();
                        //teacher = new HPSOGWO_Learning(Network, EOA_PopulationSize, MaxIteration, HPSOGWO_C1, HPSOGWO_C2, HPSOGWO_C3);
                        break;

                    case LearningAlgorithmEnum.mHPSOGWO_Learning:
                        throw new NotImplementedException();
                        //teacher = new HPSOGWO_Learning(Network, EOA_PopulationSize, MaxIteration, HPSOGWO_C1, HPSOGWO_C2, HPSOGWO_C3);
                        break;

                    case LearningAlgorithmEnum.PSOGSA_Learning:
                        if (Equals(LearningAlgorithm_Params, null))
                        {
                            throw new Exception("No activation function parameterss are specified!!!");
                        }
                        if (LearningAlgorithm_Params.Length < 6)
                        {
                            throw new Exception("No activation function parameterss are specified!!!");
                        }

                        teacher = new PSOGSA_Learning(Network, (int)LearningAlgorithm_Params[0], (int)LearningAlgorithm_Params[1], (int)LearningAlgorithm_Params[2], (int)LearningAlgorithm_Params[3], (int)LearningAlgorithm_Params[4], (int)LearningAlgorithm_Params[5]);
                        break;
                    }

                    bool needToStop = false;
                    IterationCounter = 0;
                    double error = double.NaN;

                    // loop
                    while (!needToStop)
                    {
                        // run epoch of learning procedure
                        error = teacher.RunEpoch(mTraining_Inputs, mTraining_Outputs);

                        IterationCounter += 1;

                        // check error value to see if we need to stop
                        // ...
                        //Console.WriteLine(error);

                        if (error <= mTeachingError || IterationCounter >= MaxIteration)
                        {
                            needToStop = true;
                        }
                    }

                    FinalTeachingErr = error;
                    //----------------------------------
                    switch (LearningAlgorithm)
                    {
                    case LearningAlgorithmEnum.GSA_Learning:
                        throw new NotImplementedException();

                        //GSA_Learning gsaL = (GSA_Learning)teacher;
                        //this.BestChart = gsaL.Best_Chart;
                        //this.BestWeights = gsaL.BestSolution;

                        //Set best weights parameters to the network:
                        //SetBestWeightsToTheNetwork();
                        break;

                    case LearningAlgorithmEnum.HPSOGWO_Learning:
                        throw new NotImplementedException();

                        //HPSOGWO_Learning hpgwoL = (HPSOGWO_Learning)teacher;
                        //this.BestChart = hpgwoL.Best_Chart;
                        //this.BestWeights = hpgwoL.BestSolution;

                        //Set best weights parameters to the network:
                        //SetBestWeightsToTheNetwork();
                        break;

                    case LearningAlgorithmEnum.mHPSOGWO_Learning:
                        throw new NotImplementedException();

                        //HPSOGWO_Learning hpsgwoL = (HPSOGWO_Learning)teacher;
                        //this.BestChart = hpsgwoL.Best_Chart;
                        //this.BestWeights = hpsgwoL.BestSolution;

                        //Set best weights parameters to the network:
                        //SetBestWeightsToTheNetwork();
                        break;

                    case LearningAlgorithmEnum.GWO_Learning:
                        throw new NotImplementedException();

                        //GWO_Learning gwoL = (GWO_Learning)teacher;
                        //this.BestChart = gwoL.Best_Chart;
                        //this.BestWeights = gwoL.BestSolution;

                        //Set best weights parameters to the network:
                        //SetBestWeightsToTheNetwork();
                        break;

                    case LearningAlgorithmEnum.RGA_Learning:
                        throw new NotImplementedException();
                        //RGA_Learning rgaL = (RGA_Learning)teacher;
                        //this.BestChart = rgaL.Best_Chart;
                        //this.BestWeights = rgaL.BestSolution;

                        //Set best weights parameters to the network:
                        //SetBestWeightsToTheNetwork();
                        break;

                    case LearningAlgorithmEnum.PSOGSA_Learning:
                        PSOGSA_Learning psogsaL = (PSOGSA_Learning)teacher;
                        BestChart   = psogsaL.Best_Chart;
                        BestWeights = psogsaL.BestSolution;

                        //Set best weights parameters to the network:
                        SetBestWeightsToTheNetwork();
                        break;
                    }
                }
                catch (Exception ex)
                { throw ex; }
            }
Ejemplo n.º 6
0
        public BackPropogation()
        {
            InitializeComponent();

            activation_nework = new ActivationNetwork(new BipolarSigmoidFunction(), 50, 50, 10, 1);

            watch1 = new Stopwatch();

            watch2 = new Stopwatch();

            watch3 = new Stopwatch();

            backgroundWorkerTrainer.Disposed                  += backgroundWorkerTrainer_Disposed;
            backgroundWorkerTrainer.DoWork                    += backgroundWorkerTrainer_DoWork;
            backgroundWorkerTrainer.ProgressChanged           += backgroundWorkerTrainer_ProgressChanged;
            backgroundWorkerTrainer.WorkerSupportsCancellation = true;
            backgroundWorkerTrainer.WorkerReportsProgress      = true;
            saveFileDialog1.Filter           = "feed forward network files (*.ffn)|*.ffn";
            saveFileDialog1.Title            = "Save neural networkfile";
            saveFileDialog1.InitialDirectory = null;
            saveFileDialog1.FileName         = null;

            openFileDialog1.Filter           = "feed forward network files (*.ffn)|*.ffn";
            openFileDialog1.Title            = "Load neural network file";
            openFileDialog1.InitialDirectory = null;
            openFileDialog1.FileName         = null;

            backgroundWorkerSignal.Disposed += backgroundWorkerSignal_Disposed;
            backgroundWorkerSignal.WorkerSupportsCancellation = true;
            backgroundWorkerSignal.WorkerReportsProgress      = true;
            backgroundWorkerSignal.DoWork             += backgroundWorkerSignal_DoWork;
            backgroundWorkerSignal.RunWorkerCompleted += backgroundWorkerSignal_RunWorkerCompleted; //80, 70, 60, 50, 40,
            network1                = activation_nework;
            network2                = activation_nework;                                            // new ActivationNetwork(new BipolarSigmoidFunction(), 50, 50, 40, 1);
            network3                = activation_nework;                                            // new ActivationNetwork(new BipolarSigmoidFunction(), 50, 50, 40, 1);
            network4                = activation_nework;                                            // new ActivationNetwork(new BipolarSigmoidFunction(), 50, 50, 40, 1);
            network5                = new ActivationNetwork(new BipolarSigmoidFunction(), 50, 1);
            network6                = new ActivationNetwork(new BipolarSigmoidFunction(), 50, 1);
            teacher                 = new BackPropagationLearning(network1);
            evteacher               = new EvolutionaryLearning(network2, 100);
            reprop                  = new ResilientBackpropagationLearning(network3);
            lbteacher               = new LevenbergMarquardtLearning(network4);
            delta                   = new DeltaRuleLearning(network5);
            perceptron              = new PerceptronLearning(network6);
            delta.LearningRate      = 1;
            perceptron.LearningRate = 0.1;
            myPane                  = new GraphPane();
            listPointsOne           = new PointPairList();

            myPane = zedGraphControl1.GraphPane;

            // set a title
            myPane.Title.Text = "Error VS Time";

            // set X and Y axis titles
            myPane.XAxis.Title.Text = "Time in Milliseconds";
            myPane.YAxis.Title.Text = "Error";
            myCurveOne = myPane.AddCurve("Learning curve", listPointsOne, Color.Red, SymbolType.None);
            // myCurveOne = myPane.AddCurve("Resilient Back Propagation", listPointstwo, Color.Green, SymbolType.None);
            // myCurveOne = myPane.AddCurve("Genetic Learning", listPointsthree, Color.Blue, SymbolType.None);
        }
Ejemplo n.º 7
0
        private async Task TrainModel(IProgress <TrainingProgress> progress)
        {
            if (Global.Model != null)
            {
                double learningRate = 1;
                double.TryParse(txLearnRate.Text, out learningRate);

                learningRate = Math.Max(learningRate, 0.1);

                //                BackPropagationLearning
                //              LevenbergMarquardtLearning
                //              ResilientBackpropagationLearning
                ISupervisedLearning teacher;

                switch (comboBox1.SelectedIndex)
                {
                case 0:
                    teacher = new BackPropagationLearning(Global.Model);
                    ((BackPropagationLearning)teacher).LearningRate = learningRate;
                    break;

                case 1:
                    teacher = new LevenbergMarquardtLearning(Global.Model, true);
                    ((LevenbergMarquardtLearning)teacher).LearningRate = learningRate;
                    break;

                case 2:
                    teacher = new ResilientBackpropagationLearning(Global.Model);
                    ((ResilientBackpropagationLearning)teacher).LearningRate = learningRate;
                    break;

                case 3:
                    teacher = new EvolutionaryLearning(Global.Model, 100);
                    break;

                default:
                    teacher = new LevenbergMarquardtLearning(Global.Model, true);
                    ((LevenbergMarquardtLearning)teacher).LearningRate = learningRate;
                    break;
                }
                //var teacher = new ResilientBackpropagationLearning(Global.Model);

                this.IsTraining = true;

                bool _training = true;
                var  sw        = Stopwatch.StartNew();

                bool isKeepRunning = false;

                await Task.Run(() => {
                    var retVal   = new TrainingProgress();
                    retVal.error = double.PositiveInfinity;

                    while (_training)
                    {
                        lock (syncLock)
                        {
                            _training = this.IsTraining;
                            var error = teacher.RunEpoch(Global.inputs, Global.outputs) / Global.inputs.Length;
                            if (!isKeepRunning && (error > retVal.error))
                            {
                                if (MessageBox.Show("Training increases error. Continue?",
                                                    "Confirmation", MessageBoxButtons.YesNo) == DialogResult.Yes)
                                {
                                    isKeepRunning = true;
                                }
                                else
                                {
                                    break;
                                }
                            }
                            retVal.error = error;
                        }

                        retVal.epochs++;
                        retVal.timeElapsed = sw.ElapsedMilliseconds;

                        //if (retVal.epochs % 50 == 0) // updates score every 50 epochs
                        //{
                        //    retVal.error = GetCurrentScore();
                        //}

                        progress.Report(retVal);
                    }
                });

                sw.Stop();
            }
        }
Ejemplo n.º 8
0
        static double Neural_Network(bool show)
        {
            double       error      = new double();
            DataTable    entireData = DataController.MakeDataTable("../../drug_consumption.txt");
            Codification codebook   = new Codification(entireData);
            //"Alcohol", "Amfet", !!"Amyl", "Benzos", "Cofeine", "Cannabis", "Chocolate", "Coke", (1)"Crac", ///"Ecstasy", !!"Heroine",
            //    !!"Ketamine", //"LegalH", "LSD", !!"Meth", //"Mushrooms", "Nicotine", lol "Semeron", "VSA"
            string LookingFor = "Heroine";
            int    good       = 0;

            string[][] outputs;
            string[][] inputs = DataController.MakeString("../../drug_consumption_500.txt", out outputs);
            string[][] testOutputs;
            string[][] testInputs = DataController.MakeString("../../drug_consumption_500.txt", out testOutputs);

            DataTable outputs1     = DataController.MakeDataFromString(outputs, "output");
            DataTable inputs1      = DataController.MakeDataFromString(inputs, "input");
            DataTable testOutputs1 = DataController.MakeDataFromString(testOutputs, "output");
            DataTable testInputs1  = DataController.MakeDataFromString(testInputs, "input");

            DataTable Isymbols  = codebook.Apply(inputs1);
            DataTable Osymbols  = codebook.Apply(outputs1);
            DataTable TIsymbols = codebook.Apply(testInputs1);
            DataTable TOsymbols = codebook.Apply(testOutputs1);

            double[][] inputsD  = Isymbols.ToJagged <double>("Age", "Gender", "Education", "Country", "Eticnity", "Nscore", "Escore", "Oscore", "Ascore", "Cscore", "Impulsive", "SS");
            double[][] outputsD = Osymbols.ToJagged <double>(LookingFor);
            outputsD = DataController.convertDT(outputsD);
            double[][] inputsT  = TIsymbols.ToJagged <double>("Age", "Gender", "Education", "Country", "Eticnity", "Nscore", "Escore", "Oscore", "Ascore", "Cscore", "Impulsive", "SS");
            double[][] outputsT = TOsymbols.ToJagged <double>(LookingFor);
            outputsT = DataController.convertDT(outputsT);

            DeepBeliefNetwork network = new DeepBeliefNetwork(inputs.First().Length, 10, 7);

            new GaussianWeights(network, 0.1).Randomize();
            network.UpdateVisibleWeights();
            DeepBeliefNetworkLearning FirstLearner = new DeepBeliefNetworkLearning(network)
            {
                Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
                {
                    LearningRate = 0.1,
                    Momentum     = 0.5,
                    Decay        = 0.001,
                }
            };

            int batchCount = Math.Max(1, inputs.Length / 100);

            int[]        groupsNew  = Accord.Statistics.Classes.Random(inputsD.Length, batchCount);
            double[][][] batchesNew = Accord.Statistics.Classes.Separate(inputsD, groupsNew);
            double[][][] layerData;

            for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++)
            {
                FirstLearner.LayerIndex = layerIndex;
                layerData = FirstLearner.GetLayerInput(batchesNew);
                for (int i = 0; i < 500; i++)
                {
                    error = FirstLearner.RunEpoch(layerData) / inputsD.Length;
                    if (i % 10 == 0 && show == true)
                    {
                        Console.WriteLine("Error value(" + LookingFor + ", test: " + i + ") = " + error);
                    }
                }
            }

            var SecondLearner = new BackPropagationLearning(network)
            {
                LearningRate = 0.15,
                Momentum     = 0.7
            };
            EvolutionaryLearning teacher = new EvolutionaryLearning(network, 100);

            for (int i = 0; i < 800; i++)
            {
                error = teacher.RunEpoch(inputsD, outputsD) / inputsD.Length;
                if (i % 50 == 0 && show == true)
                {
                    Console.WriteLine("Error value(" + LookingFor + ", test: " + i + ") = " + error);
                }
            }

            for (int i = 0; i < 800; i++)
            {
                error = SecondLearner.RunEpoch(inputsD, outputsD) / inputsD.Length;
                if (i % 10 == 0 && show == true)
                {
                    Console.WriteLine("Error value(" + LookingFor + ", test: " + i + ") = " + error);
                }
            }

            for (int i = 0; i < inputsD.Length; i++)
            {
                double[] outputValues = network.Compute(inputsT[i]);
                if (outputValues.ToList().IndexOf(outputValues.Max()) == outputsT[i].ToList().IndexOf(outputsT[i].Max()))
                {
                    good++;
                }
            }
            if (show == true)
            {
                Console.WriteLine("Poprawność - " + Math.Round(((double)good / (double)inputsD.Length * 100), 4) + "%");
                Console.ReadKey();
            }

            return(error);
        }
 private void button4_Click_1(object sender, EventArgs e)
 {
     button4.Enabled = false;
     network3 = null;
     evteacher = null;
     network3 = new ActivationNetwork(new BipolarSigmoidFunction(param3), 100, 100, 90, 80, 70, 60, 50, 40, 30, 20, 10, 1);
     evteacher = new EvolutionaryLearning(network2, 100);
     network3.Randomize();
 }
        public BackPropogation()
        {
            InitializeComponent();

            activation_nework = new ActivationNetwork(new BipolarSigmoidFunction(), 50, 50,  10, 1);

            watch1 = new Stopwatch();

            watch2 = new Stopwatch();

            watch3 = new Stopwatch();

            backgroundWorkerTrainer.Disposed += backgroundWorkerTrainer_Disposed;
            backgroundWorkerTrainer.DoWork += backgroundWorkerTrainer_DoWork;
            backgroundWorkerTrainer.ProgressChanged += backgroundWorkerTrainer_ProgressChanged;
            backgroundWorkerTrainer.WorkerSupportsCancellation = true;
            backgroundWorkerTrainer.WorkerReportsProgress = true;
            saveFileDialog1.Filter = "feed forward network files (*.ffn)|*.ffn";
            saveFileDialog1.Title = "Save neural networkfile";
            saveFileDialog1.InitialDirectory = null;
            saveFileDialog1.FileName = null;

            openFileDialog1.Filter = "feed forward network files (*.ffn)|*.ffn";
            openFileDialog1.Title = "Load neural network file";
            openFileDialog1.InitialDirectory = null;
            openFileDialog1.FileName = null;

            backgroundWorkerSignal.Disposed += backgroundWorkerSignal_Disposed;
            backgroundWorkerSignal.WorkerSupportsCancellation = true;
            backgroundWorkerSignal.WorkerReportsProgress = true;
            backgroundWorkerSignal.DoWork += backgroundWorkerSignal_DoWork;
            backgroundWorkerSignal.RunWorkerCompleted += backgroundWorkerSignal_RunWorkerCompleted;//80, 70, 60, 50, 40,
            network1 = activation_nework;
            network2 = activation_nework; // new ActivationNetwork(new BipolarSigmoidFunction(), 50, 50, 40, 1);
            network3 = activation_nework; // new ActivationNetwork(new BipolarSigmoidFunction(), 50, 50, 40, 1);
            network4 = activation_nework; // new ActivationNetwork(new BipolarSigmoidFunction(), 50, 50, 40, 1);
            network5 = new ActivationNetwork(new BipolarSigmoidFunction(), 50,1);
            network6 = new ActivationNetwork(new BipolarSigmoidFunction(), 50, 1);
            teacher = new BackPropagationLearning(network1);
            evteacher = new EvolutionaryLearning(network2, 100);
            reprop = new ResilientBackpropagationLearning(network3);
            lbteacher = new LevenbergMarquardtLearning(network4);
            delta = new DeltaRuleLearning(network5);
            perceptron = new PerceptronLearning(network6);
            delta.LearningRate = 1;
            perceptron.LearningRate = 0.1;
            myPane = new GraphPane();
            listPointsOne = new PointPairList();

            myPane = zedGraphControl1.GraphPane;

            // set a title
            myPane.Title.Text = "Error VS Time";

            // set X and Y axis titles
            myPane.XAxis.Title.Text = "Time in Milliseconds";
            myPane.YAxis.Title.Text = "Error";
            myCurveOne = myPane.AddCurve("Learning curve", listPointsOne, Color.Red, SymbolType.None);
               // myCurveOne = myPane.AddCurve("Resilient Back Propagation", listPointstwo, Color.Green, SymbolType.None);
               // myCurveOne = myPane.AddCurve("Genetic Learning", listPointsthree, Color.Blue, SymbolType.None);
        }
Ejemplo n.º 11
0
            public void LuanchLearning()
            {
                try
                {
                    if (Equals(mTrainingInputs, null))
                    {
                        return;
                    }
                    if (Equals(mTrainingInputs.Data, null))
                    {
                        return;
                    }

                    if (Equals(mTrainingOutputs_DS1, null))
                    {
                        return;
                    }
                    if (Equals(mTrainingOutputs_DS1.Data, null))
                    {
                        return;
                    }

                    //StandardizeData(TrainingInputs,mActivationFunction);
                    //StandardizeData(TrainingOutputs_DS1, mActivationFunction);

                    StandardizeData(TrainingInputs);
                    StandardizeData(TrainingOutputs_DS1);

                    mTraining_Inputs  = Convert(mTrainingInputs);
                    mTraining_Outputs = Convert(mTrainingOutputs_DS1);

                    mInputsCount = mTrainingInputs.Data[0].List.Count();

                    int[] networkStruct = GetLayersStruct(LayersStruct);
                    if (Equals(networkStruct, null))
                    {
                        return;
                    }

                    // create neural network
                    // Network = new ActivationNetwork(new SigmoidFunction(1),mInputsCount, networkStruct);
                    //Network = new ActivationNetwork(new BipolarSigmoidFunction(2), mInputsCount, networkStruct);
                    //2  :  two inputs in the network
                    // 2  : two neurons in the first layer
                    // 1  : one neuron in the second layer

                    switch (mActivationFunction)
                    {
                    case ActivationFunctionEnum.SigmoidFunction:
                        Network = new ActivationNetwork(new SigmoidFunction(ActiveFunction_Alpha), mInputsCount, networkStruct);
                        break;

                    case ActivationFunctionEnum.BipolarSigmoidFunction:
                        Network = new ActivationNetwork(new BipolarSigmoidFunction(ActiveFunction_Alpha), mInputsCount, networkStruct);
                        break;

                    case ActivationFunctionEnum.RectifiedLinearFunction:
                        Network = new ActivationNetwork(new RectifiedLinearFunction(), mInputsCount, networkStruct);
                        break;

                    default:
                        Network = new ActivationNetwork(new SigmoidFunction(ActiveFunction_Alpha), mInputsCount, networkStruct);
                        break;
                    }

                    // create teacher
                    ISupervisedLearning teacher = null;

                    //LevenbergMarquardtLearning teacher = new LevenbergMarquardtLearning(Network);
                    //BackPropagationLearning teacher = new BackPropagationLearning(Network);
                    // EvolutionaryLearning teacher = new EvolutionaryLearning(Network, 25);

                    switch (mLearningAlgorithm)
                    {
                    case LearningAlgorithmEnum.BackPropagationLearning:
                        teacher = new BackPropagationLearning(Network);
                        break;

                    case LearningAlgorithmEnum.LevenbergMarquardtLearning:
                        teacher = new LevenbergMarquardtLearning(Network);
                        break;

                    case LearningAlgorithmEnum.EvolutionaryLearningGA:
                        teacher = new EvolutionaryLearning(Network, EOA_PopulationSize);

                        break;

                    case LearningAlgorithmEnum.RGA_Learning:
                        teacher = new RGA_Learning(Network, EOA_PopulationSize, RGA_MutationPhrequency);
                        break;

                    case LearningAlgorithmEnum.GSA_Learning:
                        teacher = new GSA_Learning(Network, EOA_PopulationSize, MaxIteration, GSA_Go, GSA_Alpha);
                        break;

                    case LearningAlgorithmEnum.GWO_Learning:
                        teacher = new GWO_Learning(Network, EOA_PopulationSize, MaxIteration, GWO_Version, IGWO_uParameter);
                        break;

                    case LearningAlgorithmEnum.HPSOGWO_Learning:
                        teacher = new HPSOGWO_Learning(Network, EOA_PopulationSize, MaxIteration, HPSOGWO_C1, HPSOGWO_C2, HPSOGWO_C3);
                        break;

                    case LearningAlgorithmEnum.mHPSOGWO_Learning:
                        teacher = new HPSOGWO_Learning(Network, EOA_PopulationSize, MaxIteration, HPSOGWO_C1, HPSOGWO_C2, HPSOGWO_C3);
                        break;

                    case LearningAlgorithmEnum.PSOGSA_Learning:
                        teacher = new PSOGSA_Learning(Network, EOA_PopulationSize, MaxIteration, PSOGSA_Go, PSOGSA_Alpha, PSOGSA_C1, PSOGSA_C2);
                        break;
                    }

                    bool needToStop = false;
                    IterationCounter = 0;
                    double error = double.NaN;

                    // loop
                    while (!needToStop)
                    {
                        // run epoch of learning procedure
                        error = teacher.RunEpoch(mTraining_Inputs, mTraining_Outputs);

                        IterationCounter += 1;

                        // check error value to see if we need to stop
                        // ...
                        //Console.WriteLine(error);

                        if (error <= mTeachingError)
                        {
                            needToStop = true;
                        }

                        if (IterationCounter >= MaxIteration)
                        {
                            needToStop = true;
                        }
                    }

                    FinalTeachingErr = error;
                    //----------------------------------
                    switch (LearningAlgorithm)
                    {
                    case LearningAlgorithmEnum.GSA_Learning:
                        GSA_Learning gsaL = (GSA_Learning)teacher;
                        this.BestChart   = gsaL.Best_Chart;
                        this.BestWeights = gsaL.BestSolution;

                        //Set best weights parameters to the network:
                        SetBestWeightsToTheNetwork();
                        break;

                    case LearningAlgorithmEnum.HPSOGWO_Learning:
                        HPSOGWO_Learning hpgwoL = (HPSOGWO_Learning)teacher;
                        this.BestChart   = hpgwoL.Best_Chart;
                        this.BestWeights = hpgwoL.BestSolution;

                        //Set best weights parameters to the network:
                        SetBestWeightsToTheNetwork();
                        break;

                    case LearningAlgorithmEnum.mHPSOGWO_Learning:
                        HPSOGWO_Learning hpsgwoL = (HPSOGWO_Learning)teacher;
                        this.BestChart   = hpsgwoL.Best_Chart;
                        this.BestWeights = hpsgwoL.BestSolution;

                        //Set best weights parameters to the network:
                        SetBestWeightsToTheNetwork();
                        break;

                    case LearningAlgorithmEnum.GWO_Learning:
                        GWO_Learning gwoL = (GWO_Learning)teacher;
                        this.BestChart   = gwoL.Best_Chart;
                        this.BestWeights = gwoL.BestSolution;

                        //Set best weights parameters to the network:
                        SetBestWeightsToTheNetwork();
                        break;

                    case LearningAlgorithmEnum.RGA_Learning:
                        RGA_Learning rgaL = (RGA_Learning)teacher;
                        this.BestChart   = rgaL.Best_Chart;
                        this.BestWeights = rgaL.BestSolution;

                        //Set best weights parameters to the network:
                        SetBestWeightsToTheNetwork();
                        break;

                    case LearningAlgorithmEnum.PSOGSA_Learning:
                        PSOGSA_Learning psogsaL = (PSOGSA_Learning)teacher;
                        BestChart   = psogsaL.Best_Chart;
                        BestWeights = psogsaL.BestSolution;

                        //Set best weights parameters to the network:
                        SetBestWeightsToTheNetwork();
                        break;
                    }
                }
                catch (Exception ex)
                { throw ex; }
            }
Ejemplo n.º 12
0
 public EvolutionaryTrainer(NeuralNetwork neuralNetwork, int populationSize)
 {
     teacher = new EvolutionaryLearning(neuralNetwork.ActivationNetwork, populationSize);
 }
        public EvolutionaryLearningForm()
        {
            InitializeComponent();

            watch = new Stopwatch();

            backgroundWorkerTrainer.Disposed += backgroundWorkerTrainer_Disposed;
            backgroundWorkerTrainer.DoWork += backgroundWorkerTrainer_DoWork;
            backgroundWorkerTrainer.ProgressChanged += backgroundWorkerTrainer_ProgressChanged;
            backgroundWorkerTrainer.WorkerSupportsCancellation = true;
            backgroundWorkerTrainer.WorkerReportsProgress = true;
            saveFileDialog1.Filter = "Evelutionary network files (*.efn)|*.ffn";
            saveFileDialog1.Title = "Save Evelutionary network files";
            saveFileDialog1.InitialDirectory = null;
            saveFileDialog1.FileName = null;

            openFileDialog1.Filter = "Evelutionary network files (*.efn)|*.ffn";
            openFileDialog1.Title = "Load Evelutionary network file";
            openFileDialog1.InitialDirectory = null;
            openFileDialog1.FileName = null;

            backgroundWorkerSignal.Disposed += backgroundWorkerSignal_Disposed;
            backgroundWorkerSignal.WorkerSupportsCancellation = true;
            backgroundWorkerSignal.WorkerReportsProgress = true;
            backgroundWorkerSignal.DoWork += backgroundWorkerSignal_DoWork;
            backgroundWorkerSignal.RunWorkerCompleted += backgroundWorkerSignal_RunWorkerCompleted;

            // initialize input and output values
            input = new double[4][] {
                new double[] {0, 0}, new double[] {0, 1},
                new double[] {1, 0}, new double[] {1, 1}
            };
            output = new double[4][] {
                new double[] {0}, new double[] {1},
                new double[] {1}, new double[] {0}
            };

            networkEV = new ActivationNetwork(new SigmoidFunction(2), 2, 2, 1);

            teacherEV = new EvolutionaryLearning(networkEV, 100);

            //logg.Show();

            // pane used to draw your chart
            myPane = new GraphPane();

            // poing pair lists
            listPointsOne = new PointPairList();
        }
        protected override ISupervisedLearning CreateTeacher()
        {
            var teacher = new EvolutionaryLearning(_network, _populationSize);

            return(teacher);
        }