예제 #1
0
        public Network(Layer layer, IOptimizer optimizer, ILossFunction lossFunction)
        {
            this.random          = RandomProvider.GetRandom();
            this.inputLayer      = layer;
            this.layerCollection = new Collection <Layer>();
            this.optimizer       = optimizer;
            this.lossFunction    = lossFunction;

            do
            {
                this.layerCollection.Add(layer);
                layer = layer.Next;
            } while (layer != null);
        }
예제 #2
0
        public Model(IEnumerable <Layer> collection, IOptimizer optimizer, ILossFunction lossFunction)
        {
            this.random          = RandomProvider.GetRandom();
            this.layerCollection = new Collection <Layer>();
            this.optimizer       = optimizer;
            this.lossFunction    = lossFunction;

            foreach (Layer layer in collection)
            {
                if (this.layerCollection.Count > 0)
                {
                    var previousLayer = this.layerCollection[this.layerCollection.Count - 1];

                    previousLayer.Next = layer;
                    layer.Previous     = previousLayer;
                }

                this.layerCollection.Add(layer);
            }
        }
예제 #3
0
 public static double LecunNormal(int fanIn)
 {
     return(RandomProvider.GetRandom().Uniform(-1, 1) * Math.Sqrt(1.0 / fanIn));
 }
예제 #4
0
        public static double HeUniform(int fanIn)
        {
            var a = Math.Sqrt(6.0 / fanIn);

            return(RandomProvider.GetRandom().Uniform(-a, a));
        }
예제 #5
0
        public static double GlorotUniform(int fanIn, int fanOut)
        {
            var a = Math.Sqrt(6.0 / (fanIn + fanOut));

            return(RandomProvider.GetRandom().Uniform(-a, a));
        }
예제 #6
0
 public static double GlorotNormal(int fanIn, int fanOut)
 {
     return(RandomProvider.GetRandom().Uniform(-1, 1) * Math.Sqrt(2.0 / (fanIn + fanOut)));
 }