Example #1
0
        public void TeachWithConjugateGradients(DataCollection teacherList, int batchSize, int batchAmmount, double accuracy)
        {
            int epochCount = 0;

            simpleNet.eps = 0;
            double error = double.MaxValue;

            while (error > accuracy)
            {
                simpleNet.eps = 0;
                error         = 0;
                DataCollection batch = new DataCollection();
                Random         rd    = new Random();

                batch.BatchLength = batchSize;
                for (int i = 0; i < batchSize; i++)
                {
                    batch.List.Add(teacherList.List[rd.Next(0, teacherList.BatchLength)]);
                }

                Random rd1 = new Random();
                for (int btch = 0; btch < batchAmmount; btch++)
                {
                    for (int m = 0; m < batch.BatchLength; m++)
                    {
                        simpleNet.SetDropout(rd1, 0);

                        List <double> result = Result(batch.List[m].image);

                        string[] standartResult = batch.List[m].label.Split();

                        List <List <double> > simpleNetMistake = simpleNet.CalculateMistakeBackProp(standartResult);

                        int p = 0;
                        for (int l = 0; l < convNet.LayersList.Last().TablesCount; l++)
                        {
                            for (int i = 0; i < convNet.LayersList.Last().TablesList[l].NeuronsList.Count; i++)
                            {
                                for (int j = 0; j < convNet.LayersList.Last().TablesList[l].NeuronsList[i].Count; j++)
                                {
                                    double sum = 0;

                                    for (int k = 1; k < simpleNet.LayersList[1].NeuronsCount; k++)
                                    {
                                        sum += simpleNet.LayersList[1].NeuronsList[k].IncomingLinksList[p + 1].Weight * simpleNetMistake[0][k];
                                    }

                                    p++;
                                    convNet.LayersList.Last().TablesList[l].NeuronsList[i][j].Mistake = sum;
                                    convNet.LayersList.Last().TablesList[l].NeuronsList[i][j].Delta   = sum
                                                                                                        * convNet.LayersList.Last().TablesList[l].NeuronsList[i][j].Derivate(convNet.LayersList.Last().TablesList[l].NeuronsList[i][j].Sum);
                                }
                            }
                        }

                        for (int l = convNet.layersCount - 2; l > -1; l--)
                        {
                            convNet.CountMistakeForLayer(l);
                        }
                    }
                }

                epochCount++;

                if (epochCount % 3 == 0)
                {
                    SaveToFile("C://1/svertka");
                }
            }
        }
Example #2
0
        public void TeachWithBFGS(DataCollection teacherList, int batchLength, int batchAmmount, double accuracy)
        {
            int    epochCount = 0;
            double error      = double.MaxValue;

            while (error > accuracy)
            {
                error         = 0;
                simpleNet.eps = 0;

                DataCollection batchCollection = new DataCollection();
                Random         rd = new Random();

                batchCollection.BatchLength = batchLength;
                for (int i = 0; i < batchLength; i++)
                {
                    batchCollection.List.Add(teacherList.List[rd.Next(0, teacherList.BatchLength)]);
                }

                for (int btch = 0; btch < batchAmmount; btch++)
                {
                    for (int m = 0; m < batchLength; m++)
                    {
                        simpleNet.SetDropout(rd, 0);

                        List <double> result = Result(batchCollection.List[m].image);

                        string[] standartResult = batchCollection.List[m].label.Split();

                        List <List <double> > simpleNetMistake = simpleNet.CalculateMistakeBackProp(standartResult);

                        //Связь двух сетей . Считаем ошибку на последнем слое сверточной сети
                        int p = 0;
                        for (int l = 0; l < convNet.LayersList.Last().TablesCount; l++)
                        {
                            for (int i = 0; i < convNet.LayersList.Last().TablesList[l].NeuronsList.Count; i++)
                            {
                                for (int j = 0; j < convNet.LayersList.Last().TablesList[l].NeuronsList[i].Count; j++)
                                {
                                    double sum = 0;

                                    for (int k = 1; k < simpleNet.LayersList[1].NeuronsCount; k++)
                                    {
                                        sum += simpleNet.LayersList[1].NeuronsList[k].IncomingLinksList[p + 1].Weight * simpleNetMistake[0][k];
                                    }

                                    p++;
                                    convNet.LayersList.Last().TablesList[l].NeuronsList[i][j].Mistake = sum;
                                    convNet.LayersList.Last().TablesList[l].NeuronsList[i][j].Delta   = sum
                                                                                                        * convNet.LayersList.Last().TablesList[l].NeuronsList[i][j].Derivate(convNet.LayersList.Last().TablesList[l].NeuronsList[i][j].Sum);
                                }
                            }
                        }

                        for (int l = convNet.layersCount - 2; l > -1; l--)
                        {
                            convNet.CountMistakeForLayer(l);
                        }

                        for (int l = simpleNet.layersCount - 1; l > -1; l--)
                        {
                            var r = convNet.LayersList[1] as Convolutional_Layer;
                            r.CountHessian();
                        }
                    }
                }
            }
        }
Example #3
0
        public void TeachWithBackPropagation(DataCollection teacherList, int batchSize, int batchAmmount, double accuracy)
        {
            int epochCount = 0;

            simpleNet.eps = 0;
            double error = double.MaxValue;

            while (error > accuracy)
            {
                simpleNet.eps = 0;
                error         = 0;
                DataCollection batch = new DataCollection();
                Random         rd    = new Random();

                batch.BatchLength = batchSize;
                for (int i = 0; i < batchSize; i++)
                {
                    batch.List.Add(teacherList.List[rd.Next(0, teacherList.BatchLength)]);
                }

                Random rd1 = new Random();
                for (int btch = 0; btch < batchAmmount; btch++)
                {
                    for (int m = 0; m < batch.BatchLength; m++)
                    {
                        simpleNet.SetDropout(rd1, 0);

                        List <double> result = Result(batch.List[m].image);

                        string[] standartResult = batch.List[m].label.Split();

                        List <List <double> > simpleNetMistake = simpleNet.CalculateMistakeBackProp(standartResult);

                        //Связь двух сетей . Считаем ошибку на последнем слое сверточной сети
                        int p = 0;
                        for (int l = 0; l < convNet.LayersList.Last().TablesCount; l++)
                        {
                            for (int i = 0; i < convNet.LayersList.Last().TablesList[l].NeuronsList.Count; i++)
                            {
                                for (int j = 0; j < convNet.LayersList.Last().TablesList[l].NeuronsList[i].Count; j++)
                                {
                                    double sum = 0;

                                    for (int k = 1; k < simpleNet.LayersList[1].NeuronsCount; k++)
                                    {
                                        sum += simpleNet.LayersList[1].NeuronsList[k].IncomingLinksList[p + 1].Weight * simpleNetMistake[0][k];
                                    }

                                    p++;
                                    convNet.LayersList.Last().TablesList[l].NeuronsList[i][j].Mistake = sum;
                                    convNet.LayersList.Last().TablesList[l].NeuronsList[i][j].Delta   = sum
                                                                                                        * convNet.LayersList.Last().TablesList[l].NeuronsList[i][j].Derivate(convNet.LayersList.Last().TablesList[l].NeuronsList[i][j].Sum);
                                    //convNet.LayersList.Last().TablesList[l].NeuronsList[i][j].Delta = sum
                                    //    * convNet.LayersList.Last().TablesList[l].NeuronsList[i][j].Derivate(sum);
                                }
                            }
                        }

                        for (int l = convNet.layersCount - 2; l > -1; l--)
                        {
                            convNet.CountMistakeForLayer(l);
                        }

                        foreach (ILayersList ll in convNet.LayersList)
                        {
                            if (ll.Type == 2)
                            {
                                foreach (ITables l in ll.TablesList)
                                {
                                    for (int i = 0; i < l.Core.height; i++)
                                    {
                                        for (int j = 0; j < l.Core.width; j++)
                                        {
                                            l.Core.weigth[i][j] += Math.Round(al * l.Core.mistake[i][j], 5);
                                            l.Core.mistake[i][j] = 0;
                                        }
                                    }
                                }
                            }
                        }

                        simpleNet.RecalculateWeights(simpleNetMistake);

                        NullifyNeuronsMistake();
                    }
                }

                error = simpleNet.eps / (batchSize * batchAmmount);

                System.IO.StreamWriter sw = new System.IO.StreamWriter("C://1/svertka/eps.txt", true);
                sw.WriteLine(epochCount + "\t" + error);
                sw.Close();

                epochCount++;

                if (epochCount % 3 == 0)
                {
                    SaveToFile("C://1/svertka");
                }
            }
        }