Esempio n. 1
0
        public Convolutional(
            bool shouldBackpropagate,
            IWeightInitialisation weightInitialisation,
            Func <IMatrix, IGradientDescentOptimisation> updater,
            int inputDepth,
            int filterCount,
            int padding,
            int filterWidth,
            int filterHeight,
            int xStride,
            int yStride,
            string name = null) : base(name)
        {
            _shouldBackpropagate = shouldBackpropagate;
            _padding             = padding;
            _filterWidth         = filterWidth;
            _filterHeight        = filterHeight;
            _xStride             = xStride;
            _yStride             = yStride;
            _inputDepth          = inputDepth;

            _bias    = weightInitialisation.CreateBias(filterCount);
            _filter  = weightInitialisation.CreateWeight(_filterWidth * _filterHeight * _inputDepth, filterCount);
            _updater = updater(_filter);
        }
Esempio n. 2
0
 public TiedFeedForward(IFeedForward layer, IWeightInitialisation weightInit, string name = null)
     : base(name)
 {
     _layer   = layer;
     _layerId = layer.Id;
     _bias    = weightInit.CreateBias(layer.InputSize);
 }
 internal WeightInitialisationProvider(ILinearAlgebraProvider lap)
 {
     Ones       = new Constant(lap, 0f, 1f);
     Zeroes     = new Constant(lap, 0f, 0f);
     Gaussian   = new Gaussian(lap);
     Xavier     = new Xavier(lap);
     Identity   = new Identity(lap, 1f);
     Identity01 = new Identity(lap, 0.1f);
 }
Esempio n. 4
0
 public TiedLayer(ILinearAlgebraProvider lap, INeuralNetworkLayer layer, IWeightInitialisation weightInit)
 {
     _inputSize       = layer.OutputSize;
     _outputSize      = layer.InputSize;
     _layer           = layer;
     _weight          = layer.Weight;
     _bias            = lap.Create(_outputSize, x => weightInit.GetBias());
     _weightTranspose = _weight.Transpose();
 }
Esempio n. 5
0
        public BatchNormalisation(GraphFactory graph, int inputSize, IWeightInitialisation weightInit, float momentum = 0.9f, string name = null) : base(name)
        {
            _inputSize = inputSize;
            _momentum  = momentum;
            var lap = graph.LinearAlgebraProvider;

            using (var matrix = weightInit.CreateWeight(1, _inputSize)) {
                _Create(graph,
                        gamma: lap.CreateVector(Enumerable.Repeat(1f, _inputSize)),//matrix.Row(0),
                        beta: lap.CreateVector(Enumerable.Repeat(0f, _inputSize)),
                        mean: lap.CreateVector(Enumerable.Repeat(0f, _inputSize)),
                        variance: lap.CreateVector(Enumerable.Repeat(1f, _inputSize))
                        );
            }
        }
Esempio n. 6
0
 public IPropertySet Use(IWeightInitialisation weightInit)
 {
     WeightInitialisation = weightInit; return(this);
 }
Esempio n. 7
0
        public Standard(ILinearAlgebraProvider lap, int inputSize, int outputSize, LayerDescriptor init, IActivationFunction activation, IWeightInitialisation weightInit)
        {
            _descriptor = init;
            _activation = activation;

            // initialise weights and bias
            _bias   = lap.Create(outputSize, x => weightInit.GetBias());
            _weight = lap.Create(inputSize, outputSize, (x, y) => weightInit.GetWeight(inputSize, outputSize, x, y));
        }