Beispiel #1
0
 public DenseFullLayer(
     MessageShape inputMessageShape,
     int numberOfNeurons,
     bool enableBiases,
     Optimizer optimizer)
     : base(inputMessageShape, numberOfNeurons, enableBiases)
 {
     _weightOptimizers = new Optimizer[inputMessageShape.Size, numberOfNeurons];
     _weightOptimizers.UpdateForEach <Optimizer>((q, i) => optimizer.Clone() as Optimizer);
     _biasOptimizers = new Optimizer[numberOfNeurons];
     _biasOptimizers.UpdateForEach <Optimizer>((q, i) => optimizer.Clone() as Optimizer);
     _cache = Vector.Build.Dense(inputMessageShape.Size);
 }
Beispiel #2
0
        public static Optimizer[] InitializeBiasOptimizers(
            int numberOfkernels,
            Optimizer optimizer)
        {
            var optimizers = new Optimizer[numberOfkernels];

            optimizers.UpdateForEach <Optimizer>(q => optimizer.Clone() as Optimizer);
            return(optimizers);
        }
Beispiel #3
0
        public static Optimizer[][,,] InitializeKernelOptimizers(
            int depth,
            int numberOfkernels,
            int kernelSize,
            Optimizer optimizer)
        {
            var optimizers = new Optimizer[numberOfkernels][, , ];

            optimizers.UpdateForEach <Optimizer[, , ]>(q => new Optimizer[depth, kernelSize, kernelSize]);
            optimizers.ForEach(q => q.UpdateForEach <Optimizer>(w => optimizer.Clone() as Optimizer));
            return(optimizers);
        }