Beispiel #1
0
        public Matrix Nonlin(INonlinearity neuron, Matrix m)
        {
            Matrix returnObj = new Matrix(m.Rows, m.Cols);
            int    n         = m.W.Length;

            for (int i = 0; i < n; i++)
            {
                if (!m.Dropped[i])
                {
                    returnObj.W[i] = neuron.Forward(m.W[i]);
                }
            }
            if (this.ApplyBackprop)
            {
                Runnable bp = new Runnable();
                bp.Run = delegate()
                {
                    for (int i = 0; i < n; i++)
                    {
                        if (!m.Dropped[i])
                        {
                            m.Dw[i] += neuron.Backward(m.W[i]) * returnObj.Dw[i];
                        }
                    }
                };
                Backprop.Add(bp);
            }
            return(returnObj);
        }
Beispiel #2
0
 public RnnLayer(int inputDimension, int outputDimension, INonlinearity hiddenUnit, double initParamsStdDev, Random rng, double dropout) : base(dropout, inputDimension, outputDimension, rng)
 {
     this._inputDimension  = inputDimension;
     this._outputDimension = outputDimension;
     this._f = hiddenUnit;
     _w      = Matrix.Random(outputDimension, inputDimension + outputDimension, initParamsStdDev, rng);
     _b      = new Matrix(outputDimension);
 }
        public FeedForwardLayer(int inputDimension, int outputDimension, INonlinearity f, double initParamsStdDev, Random rng, double dropout) : base(dropout, inputDimension, outputDimension, rng)
        {
            _w      = Matrix.Random(outputDimension, inputDimension, 1 / Math.Sqrt(inputDimension), rng);
            _b      = new Matrix(outputDimension);
            this._f = f;

            SaveWeights();
        }
 public RnnLayer(int inputDimension, int outputDimension, INonlinearity hiddenUnit, double initParamsStdDev,
     Random rng)
 {
     this._inputDimension = inputDimension;
     this._outputDimension = outputDimension;
     this._f = hiddenUnit;
     _w = Matrix.Random(outputDimension, inputDimension + outputDimension, initParamsStdDev, rng);
     _b = new Matrix(outputDimension);
 }
        public FeedForwardLayer(Shape inputShape, int outputDimension, INonlinearity f, Random rng)
        {
            double initParamsStdDev = 1.0 / Math.Sqrt(outputDimension);

            InputShape  = inputShape;
            OutputShape = new Shape(outputDimension);
            _w          = NNValue.Random(OutputShape.H, InputShape.H, initParamsStdDev, rng);
            _b          = new NNValue(outputDimension);
            this._f     = f;
        }
 public RnnLayer(int inputDimension, int outputDimension, INonlinearity hiddenUnit, double initParamsStdDev,
                 Random rng)
 {
     InputShape  = new Shape(inputDimension);
     OutputShape = new Shape(outputDimension);
     _f          = hiddenUnit;
     _w          = NNValue.Random(outputDimension, inputDimension + outputDimension, initParamsStdDev, rng);
     _b          = new NNValue(outputDimension);
     ResetState();
 }
        public ConvolutionLayer(FilterStruct filterStruct, INonlinearity func)
        {
            Function = func;
            fs       = filterStruct;
            Filters  = new NNValue[fs.FilterCount];

            for (int i = 0; i < fs.FilterCount; i++)
            {
                Filters[i] = new NNValue(fs.FilterH, fs.FilterW);
            }
        }
        public ConvolutionLayer(INonlinearity func, int count, int h = 3, int w = 3)
        {
            Filters = new NNValue[count];

            for (int i = 0; i < count; i++)
            {
                Filters[i] = new NNValue(h, w);
            }

            Function = func;
            fs       = new FilterStruct()
            {
                FilterW = w, FilterCount = count, FilterH = h
            };
        }
Beispiel #9
0
 static FeedForwardLayer loadFFLayer(string dir)
 {
     try
     {
         //loading noLinearity
         string noLinearity = "";
         using (StreamReader sr = new StreamReader(Path.Combine(dir, "info.csv")))
         {
             noLinearity = sr.ReadLine();
         }
         INonlinearity lin = null;
         if (noLinearity == "LinearUnit")
         {
             lin = new LinearUnit();
         }
         if (noLinearity == "RectifiedLinearUnit")
         {
             lin = new RectifiedLinearUnit();
         }
         if (noLinearity == "SigmoidUnit")
         {
             lin = new SigmoidUnit();
         }
         if (noLinearity == "SineUnit")
         {
             lin = new SineUnit();
         }
         if (noLinearity == "TanhUnit")
         {
             lin = new TanhUnit();
         }
         Matrix w = loadMatrix(Path.Combine(dir, "W.csv"));
         Matrix b = loadMatrix(Path.Combine(dir, "B.csv"));
         return(new FeedForwardLayer(w, b, lin));
     }
     catch (Exception ex)
     {
         Console.WriteLine(ex.Message);
     }
     return(null);
 }
        void Init(Shape inputShape, FilterStruct filterStruct, INonlinearity func, Random rnd)
        {
            InputShape = inputShape;
            fs         = filterStruct;
            int outputH = inputShape.H - filterStruct.FilterH + 1 + _padY,
                outputW = inputShape.W - filterStruct.FilterW + 1 + _padX;


            OutputShape = new Shape(outputH, outputW, filterStruct.FilterCount);

            double initParamsStdDev = 1.0 / Math.Sqrt(OutputShape.Len);


            int d = InputShape.D;

            Function = func;
            Filters  = new NNValue[filterStruct.FilterCount];

            for (int i = 0; i < filterStruct.FilterCount; i++)
            {
                Filters[i] = NNValue.Random(filterStruct.FilterH, filterStruct.FilterW, d, initParamsStdDev, rnd);
            }
        }
Beispiel #11
0
        public NNValue Nonlin(INonlinearity neuron, NNValue m)
        {
            NNValue returnObj = new NNValue(m.H, m.W);
            int     n         = m.DataInTensor.Length;

            returnObj = neuron.Forward(m);

            if (this.ApplyBackprop)
            {
                Runnable bp = new Runnable();
                bp.Run = delegate()
                {
                    var data = neuron.Backward(m);

                    for (int i = 0; i < n; i++)
                    {
                        m.DifData[i] += data.DataInTensor[i] * returnObj.DifData[i];
                    }
                };
                Backprop.Add(bp);
            }
            return(returnObj);
        }
Beispiel #12
0
        public static NeuralNetwork MakeLstmWithInputBottleneck(int inputDimension, int bottleneckDimension, int hiddenDimension, int hiddenLayers, int outputDimension, INonlinearity decoderUnit, double initParamsStdDev, Random rng, double dropout, double inDropout)
        {
            List <ILayer> layers = new List <ILayer>();

            layers.Add(new LinearLayer(inputDimension, bottleneckDimension, initParamsStdDev, rng, dropout));
            for (int h = 0; h < hiddenLayers; h++)
            {
                if (h == 0)
                {
                    layers.Add(new LstmLayer(bottleneckDimension, hiddenDimension, initParamsStdDev, rng, dropout));
                }
                else
                {
                    layers.Add(new LstmLayer(hiddenDimension, hiddenDimension, initParamsStdDev, rng, dropout));
                }
            }
            layers.Add(new FeedForwardLayer(hiddenDimension, outputDimension, decoderUnit, initParamsStdDev, rng, 1));
            return(new NeuralNetwork(layers, inputDimension, inDropout, rng));
        }
Beispiel #13
0
        public Matrix Nonlin(INonlinearity neuron, Matrix m)
        {
            Matrix returnObj = new Matrix(m.Rows, m.Cols);
            int n = m.W.Length;
            for (int i = 0; i < n; i++)
            {
                returnObj.W[i] = neuron.Forward(m.W[i]);
            }
            if (this.ApplyBackprop)
            {
                Runnable bp = new Runnable();
                bp.Run = delegate()
                {
                    for (int i = 0; i < n; i++)
                    {
                        m.Dw[i] += neuron.Backward(m.W[i]) * returnObj.Dw[i];
                    }

                };
                Backprop.Add(bp);
            }
            return returnObj;
        }
Beispiel #14
0
 public FeedForwardLayer(Matrix w, Matrix b, INonlinearity f)
 {
     _w = w;
     _b = b;
     _f = f;
 }
 public RnnLayer(int outputDimension, INonlinearity hiddenUnit)
 {
     OutputShape = new Shape(outputDimension);
     _f          = hiddenUnit;
 }
 public FeedForwardLayer(int outputDimension, INonlinearity f)
 {
     OutputShape = new Shape(outputDimension);
     this._f     = f;
 }
        public static NeuralNetwork MakeRnn(int inputDimension, int hiddenDimension, int hiddenLayers, int outputDimension, INonlinearity hiddenUnit, INonlinearity decoderUnit, double initParamsStdDev, Random rng)
        {
            List <ILayer> layers = new List <ILayer>();

            for (int h = 0; h < hiddenLayers; h++)
            {
                if (h == 0)
                {
                    layers.Add(new RnnLayer(inputDimension, hiddenDimension, hiddenUnit, initParamsStdDev, rng));
                }
                else
                {
                    layers.Add(new RnnLayer(hiddenDimension, hiddenDimension, hiddenUnit, initParamsStdDev, rng));
                }
            }
            layers.Add(new FeedForwardLayer(hiddenDimension, outputDimension, decoderUnit, initParamsStdDev, rng));
            return(new NeuralNetwork(layers));
        }
 public FeedForwardLayer(int inputDimension, int outputDimension, INonlinearity f, double initParamsStdDev, Random rng)
 {
     _w      = Matrix.Random(outputDimension, inputDimension, initParamsStdDev, rng);
     _b      = new Matrix(outputDimension);
     this._f = f;
 }