public Network(Layer layer, IOptimizer optimizer, ILossFunction lossFunction) { this.random = RandomProvider.GetRandom(); this.inputLayer = layer; this.layerCollection = new Collection <Layer>(); this.optimizer = optimizer; this.lossFunction = lossFunction; do { this.layerCollection.Add(layer); layer = layer.Next; } while (layer != null); }
public Model(IEnumerable <Layer> collection, IOptimizer optimizer, ILossFunction lossFunction) { this.random = RandomProvider.GetRandom(); this.layerCollection = new Collection <Layer>(); this.optimizer = optimizer; this.lossFunction = lossFunction; foreach (Layer layer in collection) { if (this.layerCollection.Count > 0) { var previousLayer = this.layerCollection[this.layerCollection.Count - 1]; previousLayer.Next = layer; layer.Previous = previousLayer; } this.layerCollection.Add(layer); } }
public static double LecunNormal(int fanIn) { return(RandomProvider.GetRandom().Uniform(-1, 1) * Math.Sqrt(1.0 / fanIn)); }
public static double HeUniform(int fanIn) { var a = Math.Sqrt(6.0 / fanIn); return(RandomProvider.GetRandom().Uniform(-a, a)); }
public static double GlorotUniform(int fanIn, int fanOut) { var a = Math.Sqrt(6.0 / (fanIn + fanOut)); return(RandomProvider.GetRandom().Uniform(-a, a)); }
public static double GlorotNormal(int fanIn, int fanOut) { return(RandomProvider.GetRandom().Uniform(-1, 1) * Math.Sqrt(2.0 / (fanIn + fanOut))); }