Esempio n. 1
0
        public double TrainClassifierWithArticle(LabeledArticle article)
        {
            double[] input  = GetRawFeatures(article.Article);
            double[] output = GetOutput(article);

            double error = _teacher.Run(input, output);

            Console.WriteLine(error);
            return(error);
        }
Esempio n. 2
0
    // Update is called once per frame
    void Update()
    {
        if (Input.GetKeyDown(KeyCode.A))
        {
            agent_play = !agent_play;
        }

        double[] input  = get_input();
        double[] output = get_output();

        double[][] outputs = agent_output(input);

        if (!agent_play)
        {
            double error4 = teacher4.Run(outputs[2], output);
            double error3 = teacher3.Run(outputs[1], outputs[2]);
            double error2 = teacher2.Run(outputs[0], outputs[1]);
            double error1 = teacher.Run(input, outputs[0]);
            TM.text = error4.ToString("F4");
        }
        TM2.text = outputs[3][0].ToString("F2");

        if (agent_play)
        {
            agent_work(input);
        }
    }
        public void Train(int cycleCount)
        {
            double meanError = 0;

            if (trainingSet.Count < 20)
            {
                Initialize();
            }
            ActivationNetwork       trainingNetwork = new ActivationNetwork(new SigmoidFunction(2), 6, 6, 4, 4);
            BackPropagationLearning teacher         = new BackPropagationLearning(trainingNetwork);

            teacher.Momentum = 0.1;

            //teacher.Run(trainingSet[0].Dataset,trainingSet[0].Genre);
            for (int i = 0; i < cycleCount; i++)
            {
                foreach (SoundSnippet snippet in trainingSet)
                {
                    meanError += teacher.Run(
                        snippet.Dataset,
                        snippet.Genre);
                }
            }

            meanError = meanError / (cycleCount * trainingSet.Count);

            network = trainingNetwork;
            network.Save(networkPath);
            Console.WriteLine("Done!");
            Console.WriteLine("Error rate: " + Convert.ToString(meanError));
        }
Esempio n. 4
0
        public List <double> TeachNetwork(List <double> Input, List <double> Output)
        {
            var teacher = new BackPropagationLearning(network)
            {
                LearningRate = 0.1f,
                Momentum     = 0.9f
            };

            /*var teacher = new Accord.Neuro.Learning.DeepNeuralNetworkLearning(network)
             * {
             *  Algorithm = (ann, i) => new ParallelResilientBackpropagationLearning(ann),
             *  LayerIndex = network.Layers.Length - 1,
             * };*/

            //double[][] inputs, outputs;
            //Main.Database.Training.GetInstances(out inputs, out outputs);

            // Start running the learning procedure
            //for (int i = 0; i < Epochs && !shouldStop; i++)
            {
                teacher.Run(Input.ToArray(), Output.ToArray());
                //double error = teacher.RunEpoch(inputs, outputs);
            }

            //network.UpdateVisibleWeights();

            return(new List <double>(network.Compute(Input.ToArray())));
        }
Esempio n. 5
0
        public override void TrainNextMove(GridModel grid, Direction direction)
        {
            var    input  = GridToInput(grid);
            var    output = DirectionToOutput(direction);
            double error  = _teacher.Run(input, output);

            // also train rotated boards at the same time!
            input  = RotateLeft(input);
            output = DirectionToOutput(RotateLeft(direction));
            error  = Math.Max(_teacher.Run(input, output), error);

            input  = RotateLeft(input);
            output = DirectionToOutput(RotateLeft(direction));
            error  = Math.Max(_teacher.Run(input, output), error);

            input  = RotateLeft(input);
            output = DirectionToOutput(RotateLeft(direction));
            error  = Math.Max(_teacher.Run(input, output), error);
            Console.WriteLine("Error: {0}", error);
        }
Esempio n. 6
0
    // Update is called once per frame
    void Update()
    {
        //double[] input = { Random.Range(-100.0f, 100.0f) };
        //double[] output = { (input[0]+100.0f) * 2.0f / 400.0f };

        double[] input = { Random.Range(-10.0f, 10.0f) };
        //double[] output = input;
        double[] output = { (input[0] + 10.0f) / 20.0f };
        Debug.Log("output " +
                  ((output[0] > 1.0d || output[0] < -1.0d) ? output[0].ToString() : ""));
        //double[] input = { 0.0d };
        //double[] output = { 1.0d };
        double error = teacher.Run(input, output);

        TestText_TRANS.GetComponent <TextMesh>().text = error.ToString("F4");
    }
Esempio n. 7
0
        private void RunEpoch(BackPropagationLearning teacher, double[][] input, double[][] output, bool isParallel)
        {
            if (isParallel)
            {
                var data = input.Zip(output, (n, w) => new { singleInput = n, singleOutput = w });

                //var q = from v in data.AsParallel()
                //        select teacher.Run(v.singleInput, v.singleOutput);
                //q.ToArray();
                Parallel.ForEach(data, v =>
                {
                    teacher.Run(v.singleInput, v.singleOutput);
                });
            }
            else
            {
                teacher.RunEpoch(input, output);
            }
        }
        public void Train(int games)
        {
            ActivationNetwork       net     = new ActivationNetwork(new BipolarSigmoidFunction(), 9, 18, 9, 3, 1);
            BackPropagationLearning teacher = new BackPropagationLearning(net);

            net.Randomize();

            teacher.LearningRate = 0.025;

            for (int i = 0; i < games; i++)
            {
                var        trainingGame = PlayTrainingGame(net, i % 2 == 0, i % 3 == 0);
                double[][] inputs       = trainingGame.Item1;
                double[][] outputs      = trainingGame.Item2;
                for (int j = 0; j < inputs.Length; j++)
                {
                    teacher.Run(inputs[j], outputs[j]);
                }
            }

            Model = new Model(net);
        }
Esempio n. 9
0
        private void ZnajdźPropagacja(List <List <Osoba> > listaOsób, int IlośćUczeń, out int[,] Mapa, Size WIelkość)
        {
            int L = 0;
            Dictionary <List <Osoba>, int> Dz = new Dictionary <List <Osoba>, int>();

            AForge.Neuro.Network Sieć = new ActivationNetwork(new SigmoidFunction(1), 2, 3, 3);
            Sieć.Randomize();
            AForge.Neuro.Learning.BackPropagationLearning br = new BackPropagationLearning((ActivationNetwork)Sieć);

            for (int i = 0; i < IlośćUczeń; i++)
            {
                int R = 0;
                foreach (var item in listaOsób)
                {
                    double[] ftp = new double[listaOsób.Count];
                    ftp[R] = 1;
                    R++;
                    foreach (var item2 in item)
                    {
                        br.Run(item2.TablicaUczącaD, ftp);
                    }
                }
            }

            int   Wn  = WIelkość.Width - 1;
            int   Hn  = WIelkość.Height - 1;
            float Skx = 1f / Wn;
            float Sky = 1f / Hn;

            Mapa = new int[WIelkość.Width, WIelkość.Height];
            for (int i = 0; i < WIelkość.Width; i++)
            {
                for (int j = 0; j < WIelkość.Height; j++)
                {
                    double[] t = new double[] { (Skx * i) - 0.5f, (Sky * j) - 0.5f };
                    Mapa[i, j] = SprawdźPewnośćSieci(Sieć.Compute(t));
                }
            }
        }
Esempio n. 10
0
        private Move LearnFromMinimax(Virus percept)
        {
            //lær fra MiniMax
            Move       move         = teacher.Move(percept);
            VirusBoard currentState = percept.GetBoardCopy();

            backProp.LearningRate = 0.1;
            backProp.Momentum     = 0.1;
            Move   annMove = OutputsToMove(network.Compute(BoardToInput(currentState)));
            double error   = backProp.Run(BoardToInput(currentState), MoveToOutputs(move, currentState.Size));

            if (move.Equals(annMove))
            {
                using (StreamWriter writer = new StreamWriter("ann" + percept.Size + "log.txt", true))
                    writer.WriteLine("using right move. E: " + error);
            }
            else
            {
                using (StreamWriter writer = new StreamWriter("ann" + percept.Size + "log.txt", true))
                    writer.WriteLine("using wrong move. E: " + error);
            }
            return(move);
        }
Esempio n. 11
0
        public void Fit(double[][] inputs, double[][] outputs, double validationSplit = 0)
        {
            trainAccQueue = new Queue <double>();
            cvAccQueue    = new Queue <double>();
            errorQueue    = new Queue <double>();

            int trainSetSize = (int)Math.Round(inputs.GetLength(0) * (1 - validationSplit));
            int cvSetSize    = (int)Math.Round(inputs.GetLength(0) * validationSplit);

            double[][] trainSet        = new double[trainSetSize][];
            double[][] trainSetAnswers = new double[trainSetSize][];
            double[][] cvSet           = new double[cvSetSize][];
            double[][] cvSetAnswers    = new double[cvSetSize][];

            //Shufle data

            //Divide:
            for (int i = 0; i < trainSetSize; i++)
            {
                trainSet[i]        = inputs[i];
                trainSetAnswers[i] = outputs[i];
            }
            for (int i = trainSetSize; i < trainSetSize + cvSetSize; i++)
            {
                cvSet[i - trainSetSize]        = inputs[i];
                cvSetAnswers[i - trainSetSize] = outputs[i];
            }

            //Run epochs:
            double trainError;
            double trainAcc;
            double cvAcc;
            double picturedError = 0;

            for (int i = 0; i < epochs; i++)
            {
                if (BREAKER)
                {
                    BREAKER = false;
                    trainingFinished(this, null);
                    return;
                }
                Console.WriteLine("Running epoch {0}/{1}\n", i + 1, epochs);

                trainError = 0;
                trainAcc   = 0;
                cvAcc      = 0;

                //run train set, compute error on train set
                for (int k = 0; k < trainSetSize; k++)
                {
                    double error = teacher.Run(trainSet[k], Normalize(trainSetAnswers[k]));
                    trainError    += error;
                    picturedError += error;
                    if ((k + 1) % (trainSetSize / 10) == 0)
                    {
                        errorQueue.Enqueue(picturedError);
                        picturedError = 0;
                        errorQueueUpdated(this, null);
                    }
                }

                //compute accuracy on train set
                for (int k = 0; k < trainSetSize; k++)
                {
                    double[] output = network.Compute(trainSet[k]);
                    if (output.Argmax() == trainSetAnswers[k].Argmax())
                    {
                        trainAcc++;
                    }
                }

                //compute accuracy on cv set
                for (int k = 0; k < cvSetSize; k++)
                {
                    double[] output = network.Compute(cvSet[k]);
                    if (output.Argmax() == cvSetAnswers[k].Argmax())
                    {
                        cvAcc++;
                    }
                }

                //plots
                trainAccQueue.Enqueue(trainAcc / trainSetSize);
                cvAccQueue.Enqueue(cvAcc / cvSetSize);
                epochFinished(this, null);
                //log
                //Console.WriteLine("train set error = {0:0.0000} | train set accuracy = {1:0.0000} | validation set accuracy = {2:0.0000}\n", trainError, trainAcc/trainSetSize, cvAcc/cvSetSize);
                //MessageBox.Show("train set error = " + trainError + " \n train set accuracy = " + (trainAcc / trainSetSize) + " \n validation set accuracy = " + (cvAcc / cvSetSize));
            }
            trainingFinished(this, null);
        }
Esempio n. 12
0
        public static void RunXOR(string[] args)
        {
            string            path       = Environment.CurrentDirectory + @"\XORNetwork.txt";
            ActivationNetwork XORNetwork = new ActivationNetwork(new SigmoidFunction(1), 2, 2, 1);

            Console.WriteLine(path);
            int[] trainingSet = new int[8] {
                0, 0, 0, 1, 1, 0, 1, 1
            };
            int[] trainingAnswers = new int[4] {
                0, 1, 1, 0
            };

Start:
            Console.WriteLine("What would you like to do?");
            switch (Console.ReadLine().ToLower())
            {
            case ("train"):
                BackPropagationLearning teacher = new BackPropagationLearning(XORNetwork);
                while (true)
                {
                    int iter = 0;
                    for (iter = 0; iter < 100000000; iter++)
                    {
                        for (int i = 0; i < 4; i++)
                        {
                            //Console.WriteLine(Convert.ToString(trainingSet[i * 2]) + " " + Convert.ToString(trainingSet[i * 2 + 1]));
                            //Console.WriteLine(Convert.ToString(teacher.Run(new double[2] { trainingSet[i * 2], trainingSet[i * 2 + 1] }, new double[1] { trainingAnswers[i] })));
                            teacher.Run(new double[2] {
                                trainingSet[i * 2], trainingSet[i * 2 + 1]
                            }, new double[1] {
                                trainingAnswers[i]
                            });
                        }
                        //Console.WriteLine(iter.ToString());
                    }
                    //if (Console.ReadLine() == "stop")
                    //{
                    Console.WriteLine("Done");
                    XORNetwork.Save(path);
                    goto Start;
                    //}
                }

            case ("read"):
                Network network;
                if (File.Exists(path))
                {
                    network = Network.Load(path);
                }
                else
                {
                    network = XORNetwork;
                }

                for (int i = 0; i < 4; i++)
                {
                    Console.WriteLine(Convert.ToString(trainingSet[i * 2]) + " " + Convert.ToString(trainingSet[i * 2 + 1]));
                    Console.WriteLine(network.Compute((new double[2] {
                        trainingSet[i * 2], trainingSet[i * 2 + 1]
                    }))[0].ToString());
                    Console.WriteLine(Math.Round(network.Compute((new double[2] {
                        trainingSet[i * 2], trainingSet[i * 2 + 1]
                    }))[0]).ToString());
                }

                /*double[] userValues = new double[2];
                 * userValues[0] = Convert.ToDouble(Console.ReadLine());
                 * userValues[1] = Convert.ToDouble(Console.Read());
                 * Console.WriteLine(XORNetwork.Compute(userValues)[0].ToString());*/
                break;

            case "randomize":
                XORNetwork.Randomize();
                XORNetwork.Save(path);
                Console.WriteLine("done");
                break;

            case "status":
                break;
            }
            goto Start;
        }
Esempio n. 13
0
        private void button2_Click(object sender, EventArgs e)
        {
            List <WynikNeuroneowej> wk = new List <WynikNeuroneowej>();
            Random r = new Random();
            int    Najlepsza = 0;
            int    Długość = 0, IlośćPetli = 0;
            double WSPUczenia = 0, WspPendu = 0, Bias = 0;
            float  OstatniaPróbaUcząca, UczeniePopranego; try

            {
                Pobierz(out Długość, out IlośćPetli, out WSPUczenia, out WspPendu, out Bias, out OstatniaPróbaUcząca, out UczeniePopranego);
            }
            catch (Exception)
            {
                MessageBox.Show("źle wpisane dane");
                return;
            }
            Maks = Convert.ToInt32(textBox6.Text);
            listBox1.Items.Clear();
            t = new Thread(new ThreadStart(() =>
            {
                for (int i = 0; i < IlośćPetli; i++)
                {
                    ActivationNetwork network = null;
                    if (DomyślnaSiec == null)
                    {
                        network = KontrukcjaSieci(Bias);

                        Neuron.RandRange = new AForge.Range(-1, 1);
                        network.Randomize();
                    }
                    else
                    {
                        network = DomyślnaSiec;
                    }
                    BackPropagationLearning teacher = new BackPropagationLearning(network);
                    teacher.Momentum = WspPendu;

                    for (int j = 0; j < Długość; j++)
                    {
                        float Współczynik    = ((float)(Długość - j)) / Długość;
                        teacher.LearningRate = WSPUczenia * Współczynik + OstatniaPróbaUcząca;
                        TabelaUcząca rt      = ZbiórUczący[r.Next(ZbiórUczący.Count)];
                        double[] UczWyjście  = (double[])rt.Wyjście.Clone();
                        int p = 0;
                        bool CzyPoprawny;
                        while (p++ < Maks)
                        {
                            CzyPoprawny = (Loto.Matematyka.ZnajdźMaksymalną(network.Compute(rt.Wejście)) == rt.Nr);
                            if (!CzyPoprawny)
                            {
                                teacher.Run(rt.Wejście, rt.Wyjście);
                            }
                            else if (UczPoprawne)
                            {
                                teacher.LearningRate = UczeniePopranego * WSPUczenia;
                                teacher.Run(rt.Wejście, rt.Wyjście);
                                break;
                            }
                            else
                            {
                                break;
                            }
                        }
                    }
                    int IlośćPoprawnych = 0;
                    double Odhylenie    = 0;
                    StreamWriter sw     = null;
                    if (checkBox2.Checked)
                    {
                        sw = new StreamWriter(new FileStream($"zap{i}.txt", FileMode.Create));
                    }
                    foreach (var item in ZbiórUczący)
                    {
                        double[] tb = network.Compute(item.Wejście);
                        Odhylenie  += OdchylenieStadardowe(tb, item.Wyjście);
                        if (Loto.Matematyka.ZnajdźMaksymalną(tb) == item.Nr)
                        {
                            IlośćPoprawnych++;
                        }
                        foreach (var item2 in ListaNajlepszych(tb))
                        {
                            sw?.Write(item2);
                            sw?.Write('\t');
                        }
                        sw?.WriteLine(item.Nazwa);
                    }
                    sw?.Close();
                    Odhylenie /= ZbiórUczący.Count;
                    if (Najlepsza < IlośćPoprawnych)
                    {
                        network.Save("siec.tv");
                        Najlepsza = IlośćPoprawnych;
                        Console.WriteLine(IlośćPoprawnych);
                    }

                    wk.Add(new WynikNeuroneowej()
                    {
                        błąd = Odhylenie, Poprawne = IlośćPoprawnych
                    });
                    listBox1.Invoke(new TR(() => { listBox1.Items.Add(IlośćPoprawnych.ToString() + " odchylenie stadardowe " + Odhylenie); }));
                }
                this.Invoke(new TR(() => { this.Text = WynikNeuroneowej.Drukuj(wk); }));
            }));
            t.Start();
        }
Esempio n. 14
0
        void Learn()
        {
            //перенести в обработчик кнопки c try/catch
            inputLayer = man.AngleDiscretization * 2 + 4;
            if (layers == null)
            {
                layers = new int[] { 10, 5, 1 }
            }
            ;
            //if (layers[layers.Length-1] != 1)


            AN                   = new ActivationNetwork(new BipolarSigmoidFunction(sigmoidAlphaValue), inputLayer, layers);
            teacher              = new BackPropagationLearning(AN);
            teacher.Momentum     = momentum;
            teacher.LearningRate = learningRate;

            string dir = Application.StartupPath + "samples.txt";

            //GenerateSamples(@"C:\samples.txt", samplesAmount);
            //Sample[] samples = GetSamples(@"C:\samples.txt");
            GenerateSamples(dir, samplesAmount);
            Sample[] samples = GetSamples(dir);

            double    summaryError = 0;
            ArrayList errorsList   = new ArrayList();

            for (int i = 0; i < iterationsAmount; i++)
            {
                if (cb_ConnectLRtoIt.Checked)
                {
                    teacher.LearningRate = learningRate / (1 + i * 10f / iterationsAmount);
                }

                double error = 0;
                for (int j = 0; j < samples.Length; j++)
                {
                    double[] input  = samples[j].GetInput();
                    double[] output = new double[1] {
                        samples[j].reward
                    };

                    error = teacher.Run(input, output);
                }

                errorsList.Add(error);
                summaryError += error;
                if (needToStop)
                {
                    break;
                }
                tb_CurrentIteration.Text   = i.ToString();
                tb_currenAverageError.Text = (summaryError / i).ToString();
                Application.DoEvents();
                Simulation();
            }

            // show error's dynamics
            errors = new double[errorsList.Count, 2];

            for (int i = 0, n = errorsList.Count; i < n; i++)
            {
                errors[i, 0] = i;
                errors[i, 1] = (double)errorsList[i];
            }

            errorChart.RangeX = new Range(0, errorsList.Count - 1);
            errorChart.UpdateDataSeries("error", errors);

            EnableControls(true);
        }
Esempio n. 15
0
        //training the ANN
        public void Train()
        {
            //clear the textbox
            content_box.Text = "";
            try
            {
                double gaussian = Convert.ToDouble(gw.Text);
                //use Gaussian init the Weights
                new GaussianWeights(network, gaussian).Randomize();
                bp.LearningRate = Convert.ToDouble(lr.Text);
                bp.Momentum     = Convert.ToDouble(momentum.Text);
            }
            //if input the If you enter a non-numeric string
            catch
            {
                new GaussianWeights(network, 0.1).Randomize();
                bp.LearningRate = 0.1;
                bp.Momentum     = 0.0;
            }
            for (int epoch_num = 0; epoch_num < epoch; epoch_num++)
            {
                int count = 0;
                for (int i = 0; i < train_number; i++)
                {
                    double[] input = new double[30];
                    for (int sensor_i = 0; sensor_i < 30; sensor_i++)
                    {
                        input[sensor_i] = Convert.ToDouble(dt.Rows[i][sensor_i]);
                    }
                    double[] output = new double[6];
                    switch (dt.Rows[i][30])
                    {
                    case 0:
                        output = new double[6] {
                            1, 0, 0, 0, 0, 0
                        };
                        break;

                    case 1:
                        output = new double[6] {
                            0, 1, 0, 0, 0, 0
                        };
                        break;

                    case 2:
                        output = new double[6] {
                            0, 0, 1, 0, 0, 0
                        };
                        break;

                    case 3:
                        output = new double[6] {
                            0, 0, 0, 1, 0, 0
                        };
                        break;

                    case 4:
                        output = new double[6] {
                            0, 0, 0, 0, 1, 0
                        };
                        break;

                    case 5:
                        output = new double[6] {
                            0, 0, 0, 0, 0, 1
                        };
                        break;
                    }
                    error = bp.Run(input, output);
                    count++;
                }
                content_box.Text += "Epoch " + (epoch_num + 1) + ",Error:" + error + "\r\n";
            }
        }