public void Generate(Shape inpShape, Random random, double бесполезный_аргумент)
        {
            InputShape = inpShape;
            double std = 1.0 / Math.Sqrt(OutputShape.H);

            _w = NNValue.Random(OutputShape.H, inpShape.H, std, random);
            _b = new NNValue(OutputShape.H);
        }
예제 #2
0
 public RnnLayer(int inputDimension, int outputDimension, INonlinearity hiddenUnit, double initParamsStdDev,
                 Random rng)
 {
     InputShape  = new Shape(inputDimension);
     OutputShape = new Shape(outputDimension);
     _f          = hiddenUnit;
     _w          = NNValue.Random(outputDimension, inputDimension + outputDimension, initParamsStdDev, rng);
     _b          = new NNValue(outputDimension);
     ResetState();
 }
        public FeedForwardLayer(Shape inputShape, int outputDimension, INonlinearity f, Random rng)
        {
            double initParamsStdDev = 1.0 / Math.Sqrt(outputDimension);

            InputShape  = inputShape;
            OutputShape = new Shape(outputDimension);
            _w          = NNValue.Random(OutputShape.H, InputShape.H, initParamsStdDev, rng);
            _b          = new NNValue(outputDimension);
            this._f     = f;
        }
예제 #4
0
        static void Main(string[] args)
        {
            Random random = new Random(13);



            NeuralNetwork cNN = new NeuralNetwork(random, 0.2);

            var conv = new ConvolutionLayer(new RectifiedLinearUnit(0.01), 8, 3, 3);

            conv.IsSame = true;


            cNN.AddNewLayer(new Shape(28, 28), conv);
            cNN.AddNewLayer(new MaxPooling(2, 2));

            cNN.AddNewLayer(new ConvolutionLayer(new RectifiedLinearUnit(0.01), 16, 3, 3));
            cNN.AddNewLayer(new MaxPooling(2, 2));

            cNN.AddNewLayer(new ConvolutionLayer(new RectifiedLinearUnit(0.01), 32, 3, 3));
            cNN.AddNewLayer(new UnPooling(2, 2));

            cNN.AddNewLayer(new ConvolutionLayer(new RectifiedLinearUnit(0.01), 16, 3, 3));
            cNN.AddNewLayer(new MaxPooling(2, 2));

            cNN.AddNewLayer(new Flatten());

            cNN.AddNewLayer(new FeedForwardLayer(20, new RectifiedLinearUnit(0.01)));
            cNN.AddNewLayer(new FeedForwardLayer(2, new SoftmaxUnit()));

            Console.WriteLine(cNN);



            GraphCPU graph = new GraphCPU(false);

            NNValue nValue  = NNValue.Random(28, 28, 2, random);
            NNValue nValue1 = NNValue.Random(28, 28, 2, random);
            NNValue outp    = new NNValue(new double[] { 0, 1 });
            NNValue outp1   = new NNValue(new double[] { 1, 0 });



            DataSetNoReccurent data = new DataSetNoReccurent(new NNValue[] { nValue, nValue1 }, new NNValue[] { outp, outp1 }, new CrossEntropyWithSoftmax());



            TrainerCPU trainer = new TrainerCPU(TrainType.MiniBatch, new Adam());

            trainer.BatchSize = 2;
            trainer.Train(10000, 0.001, cNN, data, 2, 0.0001);
            double[] dbs  = cNN.Activate(nValue, graph).DataInTensor;
            double[] dbs1 = cNN.Activate(nValue1, graph).DataInTensor;
        }
예제 #5
0
        public GruLayer(int inputDimension, int outputDimension, Random rng)
        {
            InputShape  = new Shape(inputDimension);
            OutputShape = new Shape(outputDimension);

            double initParamsStdDev = 1.0 / Math.Sqrt(outputDimension);

            _hmix    = NNValue.Random(outputDimension, inputDimension, initParamsStdDev, rng);
            _hHmix   = NNValue.Random(outputDimension, outputDimension, initParamsStdDev, rng);
            _bmix    = new NNValue(outputDimension);
            _hnew    = NNValue.Random(outputDimension, inputDimension, initParamsStdDev, rng);
            _hHnew   = NNValue.Random(outputDimension, outputDimension, initParamsStdDev, rng);
            _bnew    = new NNValue(outputDimension);
            _hreset  = NNValue.Random(outputDimension, inputDimension, initParamsStdDev, rng);
            _hHreset = NNValue.Random(outputDimension, outputDimension, initParamsStdDev, rng);
            _breset  = new NNValue(outputDimension);
        }
예제 #6
0
        void Init(Shape inputShape, int outputDimension, Random rng)
        {
            double initParamsStdDev = 1.0 / Math.Sqrt(outputDimension);
            int    inputDimension   = inputShape.H;

            InputShape  = new Shape(inputDimension);
            OutputShape = new Shape(outputDimension);

            _hmix    = NNValue.Random(outputDimension, inputDimension, initParamsStdDev, rng);
            _hHmix   = NNValue.Random(outputDimension, outputDimension, initParamsStdDev, rng);
            _bmix    = new NNValue(outputDimension);
            _hnew    = NNValue.Random(outputDimension, inputDimension, initParamsStdDev, rng);
            _hHnew   = NNValue.Random(outputDimension, outputDimension, initParamsStdDev, rng);
            _bnew    = new NNValue(outputDimension);
            _hreset  = NNValue.Random(outputDimension, inputDimension, initParamsStdDev, rng);
            _hHreset = NNValue.Random(outputDimension, outputDimension, initParamsStdDev, rng);
            _breset  = new NNValue(outputDimension);
        }
예제 #7
0
 public LstmLayer(int inputDimension, int outputDimension, double initParamsStdDev, Random rnd)
 {
     InputShape  = new Shape(inputDimension);
     OutputShape = new Shape(outputDimension);
     _wix        = NNValue.Random(outputDimension, inputDimension, initParamsStdDev, rnd);
     _wih        = NNValue.Random(outputDimension, outputDimension, initParamsStdDev, rnd);
     _inputBias  = new NNValue(outputDimension);
     _wfx        = NNValue.Random(outputDimension, inputDimension, initParamsStdDev, rnd);
     _wfh        = NNValue.Random(outputDimension, outputDimension, initParamsStdDev, rnd);
     //set forget bias to 1.0, as described here: http://jmlr.org/proceedings/papers/v37/jozefowicz15.pdf
     _forgetBias    = NNValue.Ones(outputDimension, 1);
     _wox           = NNValue.Random(outputDimension, inputDimension, initParamsStdDev, rnd);
     _woh           = NNValue.Random(outputDimension, outputDimension, initParamsStdDev, rnd);
     _outputBias    = new NNValue(outputDimension);
     _wcx           = NNValue.Random(outputDimension, inputDimension, initParamsStdDev, rnd);
     _wch           = NNValue.Random(outputDimension, outputDimension, initParamsStdDev, rnd);
     _cellWriteBias = new NNValue(outputDimension);
     ResetState(); // Запуск НС
 }
예제 #8
0
        /// <summary>
        /// Инициализация сети
        /// </summary>
        void Init(Shape inputShape, int outputDimension, double initParamsStdDev, Random rnd)
        {
            int inputDimension = inputShape.H;

            InputShape     = new Shape(inputDimension);
            OutputShape    = new Shape(outputDimension);
            _wix           = NNValue.Random(outputDimension, inputDimension, initParamsStdDev, rnd);
            _wih           = NNValue.Random(outputDimension, outputDimension, initParamsStdDev, rnd);
            _inputBias     = new NNValue(outputDimension);
            _wfx           = NNValue.Random(outputDimension, inputDimension, initParamsStdDev, rnd);
            _wfh           = NNValue.Random(outputDimension, outputDimension, initParamsStdDev, rnd);
            _forgetBias    = NNValue.Ones(outputDimension, 1);
            _wox           = NNValue.Random(outputDimension, inputDimension, initParamsStdDev, rnd);
            _woh           = NNValue.Random(outputDimension, outputDimension, initParamsStdDev, rnd);
            _outputBias    = new NNValue(outputDimension);
            _wcx           = NNValue.Random(outputDimension, inputDimension, initParamsStdDev, rnd);
            _wch           = NNValue.Random(outputDimension, outputDimension, initParamsStdDev, rnd);
            _cellWriteBias = new NNValue(outputDimension);
            ResetState();
        }
        void Init(Shape inputShape, FilterStruct filterStruct, INonlinearity func, Random rnd)
        {
            InputShape = inputShape;
            fs         = filterStruct;
            int outputH = inputShape.H - filterStruct.FilterH + 1 + _padY,
                outputW = inputShape.W - filterStruct.FilterW + 1 + _padX;


            OutputShape = new Shape(outputH, outputW, filterStruct.FilterCount);

            double initParamsStdDev = 1.0 / Math.Sqrt(OutputShape.Len);


            int d = InputShape.D;

            Function = func;
            Filters  = new NNValue[filterStruct.FilterCount];

            for (int i = 0; i < filterStruct.FilterCount; i++)
            {
                Filters[i] = NNValue.Random(filterStruct.FilterH, filterStruct.FilterW, d, initParamsStdDev, rnd);
            }
        }
예제 #10
0
 void Init(Shape inputShape, int outputDimension, double initParamsStdDev, Random rng)
 {
     InputShape  = inputShape;
     OutputShape = new Shape(outputDimension);
     _w          = NNValue.Random(outputDimension, inputShape.H, initParamsStdDev, rng);
 }
예제 #11
0
        //no biases

        public LinearLayer(int inputDimension, int outputDimension, double initParamsStdDev, Random rng)
        {
            InputShape  = new Shape(inputDimension);
            OutputShape = new Shape(outputDimension);
            _w          = NNValue.Random(outputDimension, inputDimension, initParamsStdDev, rng);
        }