public INeuralNetworkLayerUpdater CreateUpdater(INeuralNetworkLayer layer, LayerDescriptor descriptor)
        {
            var primary = _CreatePrimaryUpdater(layer, descriptor.Regularisation, descriptor.Lambda);

            switch (descriptor.WeightUpdate)
            {
            case WeightUpdateType.Adagrad:
                return(_weightUpdater.Adagrad(primary));

            case WeightUpdateType.Momentum:
                return(_weightUpdater.Momentum(primary, descriptor.Momentum));

            case WeightUpdateType.NesterovMomentum:
                return(_weightUpdater.NesterovMomentum(primary, descriptor.Momentum));

            case WeightUpdateType.RMSprop:
                return(_weightUpdater.RMSprop(primary, descriptor.DecayRate));

            case WeightUpdateType.Adam:
                return(_weightUpdater.Adam(primary, descriptor.DecayRate, descriptor.DecayRate2));

            default:
                return(primary);
            }
        }
Exemple #2
0
 protected internal IMatrix Combine(IMatrix input, IMatrix memory, INeuralNetworkLayer inputLayer, INeuralNetworkLayer memoryLayer, Func <IMatrix, IMatrix> activation)
 {
     using (var inputOutput = inputLayer.Execute(input))
         using (var memoryOutput = memoryLayer.Execute(memory)) {
             inputOutput.AddInPlace(memoryOutput);
             return(activation(inputOutput));
         }
 }
Exemple #3
0
 public TiedLayer(ILinearAlgebraProvider lap, INeuralNetworkLayer layer, IWeightInitialisation weightInit)
 {
     _inputSize       = layer.OutputSize;
     _outputSize      = layer.InputSize;
     _layer           = layer;
     _weight          = layer.Weight;
     _bias            = lap.Create(_outputSize, x => weightInit.GetBias());
     _weightTranspose = _weight.Transpose();
 }
        INeuralNetworkLayerUpdater _CreatePrimaryUpdater(INeuralNetworkLayer layer, RegularisationType type, float lambda)
        {
            switch (type)
            {
            case RegularisationType.L1:
                return(_weightUpdater.L1(layer, lambda));

            case RegularisationType.L2:
                return(_weightUpdater.L2(layer, lambda));

            //case RegularisationType.MaxNorm:
            //    return _weightUpdater.MaxNorm(layer, lambda);

            default:
                return(_weightUpdater.Simple(layer));
            }
        }
Exemple #5
0
 void INeuralNetworkLayer.BuildSynapses(INeuralNetworkLayer previousLayer)
 {
     throw new NotImplementedException();
 }
Exemple #6
0
 public L1RegularisationUpdater(INeuralNetworkLayer layer, float lambda) : base(layer)
 {
     _lambda = lambda;
 }
Exemple #7
0
 public INeuralNetworkLayerUpdater L1(INeuralNetworkLayer layer, float lambda)
 {
     return(new L1RegularisationUpdater(layer, lambda));
 }
Exemple #8
0
 public INeuralNetworkLayerUpdater Simple(INeuralNetworkLayer layer)
 {
     return(new UpdaterBase(layer));
 }
Exemple #9
0
 public UpdaterBase(INeuralNetworkLayer layer)
 {
     _layer = layer;
 }
        public INeuralNetworkLayerTrainer CreateTrainer(INeuralNetworkLayer layer, LayerDescriptor descriptor)
        {
            var layerUpdater = CreateUpdater(layer, descriptor);

            return(_CreateLayerUpdater(layerUpdater, descriptor));
        }
 public INeuralNetworkLayer CreateTiedLayer(INeuralNetworkLayer layer, LayerDescriptor descriptor)
 {
     return(new TiedLayer(_lap, layer, _weightInitialisation[descriptor.WeightInitialisation]));
 }
Exemple #12
0
 public Neuron(INeuralNetworkLayer previousLayer)
 {
     synapses = previousLayer.Neurons;
 }