private void Conv()
        {
            Random random = new Random();
            Vector dat    = GetSigXY(random).Item1;
            Vector nnwOut = networkMeneger.Forward(dat);


            GraphCPU graph = new GraphCPU(false);

            // Нейросеть для формирования эмбедингов
            NNW net = new NNW();

            net.Layers.Add(nnw.Layers[0]);
            net.Layers.Add(nnw.Layers[1]);
            net.Layers.Add(nnw.Layers[2]);
            net.Layers.Add(nnw.Layers[3]);
            net.Layers.Add(nnw.Layers[4]);
            net.Layers.Add(nnw.Layers[5]);
            net.Layers.Add(nnw.Layers[6]);
            net.Layers.Add(nnw.Layers[7]);

            Vector hl = net.Activate(new NNValue(dat), graph).ToVector();

            sigChart.PlotBlack(t, dat);
            hChart.BarBlack(hl);
            outChart.PlotBlack(t, nnwOut);
        }
Пример #2
0
        private void ShDat()
        {
            Vector   dat    = random.NextDouble() > 0.5 ? GetSig(2) : GetSig(4);
            GraphCPU graph  = new GraphCPU(false);
            Vector   nnwOut = NNW.Activate(new NNValue(dat), graph).ToVector();

            chartVisual1.PlotBlack(t, dat);
            chartVisual2.BarBlack(nnwOut);
        }
Пример #3
0
        public Agent(NNW model, int degreesOfFreedom, Random random)
        {
            lifes         = new List <Life>();
            graphForward  = new GraphCPU(false);
            graphBackward = new GraphCPU(true);

            this.degreesOfFreedom = degreesOfFreedom;
            this.model            = model;
            this.random           = random;
        }
Пример #4
0
        static void Main(string[] args)
        {
            Random random = new Random(13);



            NeuralNetwork cNN = new NeuralNetwork(random, 0.2);

            var conv = new ConvolutionLayer(new RectifiedLinearUnit(0.01), 8, 3, 3);

            conv.IsSame = true;


            cNN.AddNewLayer(new Shape(28, 28), conv);
            cNN.AddNewLayer(new MaxPooling(2, 2));

            cNN.AddNewLayer(new ConvolutionLayer(new RectifiedLinearUnit(0.01), 16, 3, 3));
            cNN.AddNewLayer(new MaxPooling(2, 2));

            cNN.AddNewLayer(new ConvolutionLayer(new RectifiedLinearUnit(0.01), 32, 3, 3));
            cNN.AddNewLayer(new UnPooling(2, 2));

            cNN.AddNewLayer(new ConvolutionLayer(new RectifiedLinearUnit(0.01), 16, 3, 3));
            cNN.AddNewLayer(new MaxPooling(2, 2));

            cNN.AddNewLayer(new Flatten());

            cNN.AddNewLayer(new FeedForwardLayer(20, new RectifiedLinearUnit(0.01)));
            cNN.AddNewLayer(new FeedForwardLayer(2, new SoftmaxUnit()));

            Console.WriteLine(cNN);



            GraphCPU graph = new GraphCPU(false);

            NNValue nValue  = NNValue.Random(28, 28, 2, random);
            NNValue nValue1 = NNValue.Random(28, 28, 2, random);
            NNValue outp    = new NNValue(new double[] { 0, 1 });
            NNValue outp1   = new NNValue(new double[] { 1, 0 });



            DataSetNoReccurent data = new DataSetNoReccurent(new NNValue[] { nValue, nValue1 }, new NNValue[] { outp, outp1 }, new CrossEntropyWithSoftmax());



            TrainerCPU trainer = new TrainerCPU(TrainType.MiniBatch, new Adam());

            trainer.BatchSize = 2;
            trainer.Train(10000, 0.001, cNN, data, 2, 0.0001);
            double[] dbs  = cNN.Activate(nValue, graph).DataInTensor;
            double[] dbs1 = cNN.Activate(nValue1, graph).DataInTensor;
        }
        private void Lin()
        {
            Random random = new Random();
            Vector dat    = GetSigXY(random).Item1;
            Vector nnwOut = networkMeneger.Forward(dat);


            GraphCPU graph = new GraphCPU(false);
            Vector   hl    = nnw.Layers[0].Activate(new NNValue(dat), graph).ToVector();

            sigChart.PlotBlack(t, dat);
            hChart.BarBlack(hl);
            outChart.PlotBlack(t, nnwOut);
        }
Пример #6
0
        /// <summary>
        /// Один проход для минипакетного обучения
        /// </summary>
        /// <param name="learningRate">Скорость обучения</param>
        /// <param name="network">Нейросеть</param>
        /// <param name="sequences">Датасет</param>
        /// <param name="isTraining">Производится ли обучение</param>
        /// <param name="lossFunction">Функция ошибки</param>
        /// <returns></returns>
        public double PassBatch(double learningRate, INetwork network, List <DataSequence> sequences,
                                bool isTraining, ILoss lossFunction)
        {
            double numerLoss = 0;
            double denomLoss = 0;
            int    index, passes = (sequences.Count % BatchSize == 0) ? sequences.Count / BatchSize : sequences.Count / BatchSize + 1;

            for (int j = 0; j < passes; j++)
            {
                GraphCPU g = new GraphCPU(isTraining);

                for (int i = 0; i < BatchSize; i++)
                {
                    index = random.Next(sequences.Count);
                    var seq = sequences[index];

                    network.ResetState();
                    foreach (DataStep step in seq.Steps)
                    {
                        NNValue output = network.Activate(step.Input, g);
                        if (step.TargetOutput != null)
                        {
                            double loss = lossFunction.Measure(output, step.TargetOutput);
                            if (Double.IsNaN(loss) || Double.IsInfinity(loss))
                            {
                                return(loss);
                            }
                            numerLoss += loss;
                            denomLoss++;
                            if (isTraining)
                            {
                                lossFunction.Backward(output, step.TargetOutput);
                            }
                        }
                    }
                }

                if (isTraining)
                {
                    g.Backward();                             //backprop dw values
                    UpdateModelParams(network, learningRate); //update params
                }
            }
            return(numerLoss / denomLoss);
        }
Пример #7
0
        /// <summary>
        /// Конструктор
        /// </summary>
        /// <param name="model">Модель нейронной сети</param>
        /// <param name="degreesOfFreedom">Количество возможных действий</param>
        /// <param name="random">Генератор рандома</param>
        public MLAgent(NNW model, int degreesOfFreedom, Random random, IOptimizer optimizer = null)
        {
            Generations           = new List <Generation>();
            graphForward          = new GraphCPU(false);
            graphBackward         = new GraphCPU(true);
            this.degreesOfFreedom = degreesOfFreedom;
            this.model            = model;
            this.random           = random;

            if (optimizer == null)
            {
                optimizer = new Adam();
            }

            this.optimizer = optimizer;

            trainer = new Trainer(graphBackward, TrainType.Online, optimizer);
        }
Пример #8
0
        /// <summary>
        /// Онлайн обучение
        /// </summary>
        /// <param name="learningRate"></param>
        /// <param name="network"></param>
        /// <param name="sequences"></param>
        /// <param name="applyTraining"></param>
        /// <param name="lossTraining"></param>
        /// <returns></returns>
        public double Pass(double learningRate, INetwork network, List <DataSequence> sequences,
                           bool applyTraining, ILoss lossTraining)
        {
            double numerLoss = 0;
            double denomLoss = 0;



            foreach (DataSequence seq in sequences)
            {
                GraphCPU g = new GraphCPU(applyTraining);

                network.ResetState();
                foreach (DataStep step in seq.Steps)
                {
                    NNValue output = network.Activate(step.Input, g);
                    if (step.TargetOutput != null)
                    {
                        double loss = lossTraining.Measure(output, step.TargetOutput);
                        if (Double.IsNaN(loss) || Double.IsInfinity(loss))
                        {
                            return(loss);
                        }
                        numerLoss += loss;
                        denomLoss++;
                        if (applyTraining)
                        {
                            lossTraining.Backward(output, step.TargetOutput);
                        }
                    }
                }

                if (applyTraining)
                {
                    g.Backward();                             //backprop dw values
                    UpdateModelParams(network, learningRate); //update params
                }
            }



            return(numerLoss / denomLoss);
        }
Пример #9
0
        public void Forward(int[] inp)
        {
            GraphCPU graph = new GraphCPU(false);

            int indOld = 10;

            network.ResetState();
            for (int i = 0; i < inp.Length; i++)
            {
                NNValue valueMatrix = new NNValue(DataSetSeq2Seq.GetValue(inp[i], 11));
                network.Activate(valueMatrix, graph);
            }


            for (int i = 0; i < inp.Length; i++)
            {
                NNValue valueMatrix = new NNValue(DataSetSeq2Seq.GetValue(indOld, 11));
                indOld = GetInd(network.Activate(valueMatrix, graph));
                Console.Write(indOld);
            }

            Console.WriteLine();
        }