Example #1
0
        public IGradientDescentOptimisation Create(IGradientDescentOptimisation prev, IMatrix template, IPropertySet propertySet)
        {
            var cache  = propertySet.LinearAlgebraProvider.CreateZeroMatrix(template.RowCount, template.ColumnCount);
            var cache2 = propertySet.LinearAlgebraProvider.CreateZeroMatrix(template.RowCount, template.ColumnCount);

            return(new Adam(_decay, _decay2, cache, cache2, prev));
        }
Example #2
0
        public override void ReadFrom(GraphFactory factory, BinaryReader reader)
        {
            var lap = factory?.LinearAlgebraProvider;

            InputSize  = reader.ReadInt32();
            OutputSize = reader.ReadInt32();

            // read the bias parameters
            var bias = FloatVector.ReadFrom(reader);

            if (Bias == null)
            {
                Bias = lap.CreateVector(bias);
            }
            else
            {
                Bias.Data = bias;
            }

            // read the weight parameters
            var weight = FloatMatrix.ReadFrom(reader);

            if (Weight == null)
            {
                Weight = lap.CreateMatrix(weight);
            }
            else
            {
                Weight.Data = weight;
            }
            if (_updater == null)
            {
                _updater = factory?.CreateWeightUpdater(Weight);
            }
        }
Example #3
0
        public Convolutional(
            bool shouldBackpropagate,
            IWeightInitialisation weightInitialisation,
            Func <IMatrix, IGradientDescentOptimisation> updater,
            int inputDepth,
            int filterCount,
            int padding,
            int filterWidth,
            int filterHeight,
            int xStride,
            int yStride,
            string name = null) : base(name)
        {
            _shouldBackpropagate = shouldBackpropagate;
            _padding             = padding;
            _filterWidth         = filterWidth;
            _filterHeight        = filterHeight;
            _xStride             = xStride;
            _yStride             = yStride;
            _inputDepth          = inputDepth;

            _bias    = weightInitialisation.CreateBias(filterCount);
            _filter  = weightInitialisation.CreateWeight(_filterWidth * _filterHeight * _inputDepth, filterCount);
            _updater = updater(_filter);
        }
Example #4
0
 public FeedForward(int inputSize, int outputSize, IVector bias, IMatrix weight, IGradientDescentOptimisation updater, string name = null) : base(name)
 {
     _bias       = bias;
     _weight     = weight;
     _updater    = updater;
     _inputSize  = inputSize;
     _outputSize = outputSize;
 }
Example #5
0
        public virtual IGradientDescentOptimisation Create(IGradientDescentOptimisation prev,
                                                           IMatrix template, IPropertySet propertySet)
        {
            var cache =
                propertySet.LinearAlgebraProvider.CreateZeroMatrix(template.RowCount, template.ColumnCount);

            return(new Momentum(_momentum, cache, prev));
        }
Example #6
0
        public virtual void ReadFrom(GraphFactory factory, BinaryReader reader)
        {
            var rows    = reader.ReadInt32();
            var columns = reader.ReadInt32();

            _cache   = factory.LinearAlgebraProvider.CreateZeroMatrix(rows, columns);
            _updater = factory.CreateGradientDescentOptimisation(reader);
        }
Example #7
0
 public DropConnect(float dropOutPercentage, int inputSize, int outputSize, IVector bias,
                    IMatrix weight, bool stochastic, IGradientDescentOptimisation updater, string name = null) :
     base(inputSize, outputSize, bias, weight, updater, name)
 {
     _dropOutPercentage = dropOutPercentage;
     _probabilityToDrop = stochastic ? new Bernoulli(_dropOutPercentage)
                         : new Bernoulli(_dropOutPercentage, new Random(0));
 }
Example #8
0
        public override void ReadFrom(GraphFactory factory, BinaryReader reader)
        {
            var lap = factory?.LinearAlgebraProvider;

            _padding             = reader.ReadInt32();
            _filterWidth         = reader.ReadInt32();
            _filterHeight        = reader.ReadInt32();
            _xStride             = reader.ReadInt32();
            _yStride             = reader.ReadInt32();
            _inputDepth          = reader.ReadInt32();
            _shouldBackpropagate = reader.ReadBoolean();

            // read the bias parameters
            var bias = FloatVector.ReadFrom(reader);

            if (_bias == null)
            {
                _bias = lap.CreateVector(bias);
            }
            else
            {
                _bias.Data = bias;
            }

            // read the weight parameters
            var weight = FloatMatrix.ReadFrom(reader);

            if (_filter == null)
            {
                _filter = lap.CreateMatrix(weight);
            }
            else
            {
                _filter.Data = weight;
            }

            // read the updater
            if (_updater == null)
            {
                _updater = factory?.CreateWeightUpdater(_filter);
            }
        }
		public NesterovMomentum(float momentum, IMatrix cache, IGradientDescentOptimisation updater) :
			base(momentum, cache, updater) { }
Example #10
0
 public RmsProp(float decayRate, IMatrix cache, IGradientDescentOptimisation updater) : base(cache, updater)
 {
     _decayRate = decayRate;
 }
 internal static void Write(this BinaryWriter writer, IGradientDescentOptimisation optimisation)
 {
     writer.Write(optimisation.GetType().AssemblyQualifiedName);
     optimisation.WriteTo(writer);
 }
Example #12
0
 public Adam(float decay, float decay2, IMatrix cache, IMatrix cache2,
             IGradientDescentOptimisation updater) : base(decay, cache, updater)
 {
     _decayRate2 = decay2;
     _cache2     = cache2;
 }
Example #13
0
 public IPropertySet Use(IGradientDescentOptimisation optimisation)
 {
     GradientDescent = optimisation; return(this);
 }
Example #14
0
 public Momentum(float momentum, IMatrix cache, IGradientDescentOptimisation updater) : base(cache, updater)
 {
     _momentum = momentum;
 }
Example #15
0
 public AdaGrad(IMatrix cache, IGradientDescentOptimisation updater)
 {
     _cache   = cache;
     _updater = updater;
 }