Пример #1
0
        public INeuralNetworkLayerUpdater CreateUpdater(INeuralNetworkLayer layer, LayerDescriptor descriptor)
        {
            var primary = _CreatePrimaryUpdater(layer, descriptor.Regularisation, descriptor.Lambda);

            switch (descriptor.WeightUpdate)
            {
            case WeightUpdateType.Adagrad:
                return(_weightUpdater.Adagrad(primary));

            case WeightUpdateType.Momentum:
                return(_weightUpdater.Momentum(primary, descriptor.Momentum));

            case WeightUpdateType.NesterovMomentum:
                return(_weightUpdater.NesterovMomentum(primary, descriptor.Momentum));

            case WeightUpdateType.RMSprop:
                return(_weightUpdater.RMSprop(primary, descriptor.DecayRate));

            case WeightUpdateType.Adam:
                return(_weightUpdater.Adam(primary, descriptor.DecayRate, descriptor.DecayRate2));

            default:
                return(primary);
            }
        }
Пример #2
0
 protected internal IMatrix Combine(IMatrix input, IMatrix memory, INeuralNetworkLayer inputLayer, INeuralNetworkLayer memoryLayer, Func <IMatrix, IMatrix> activation)
 {
     using (var inputOutput = inputLayer.Execute(input))
         using (var memoryOutput = memoryLayer.Execute(memory)) {
             inputOutput.AddInPlace(memoryOutput);
             return(activation(inputOutput));
         }
 }
Пример #3
0
 public TiedLayer(ILinearAlgebraProvider lap, INeuralNetworkLayer layer, IWeightInitialisation weightInit)
 {
     _inputSize       = layer.OutputSize;
     _outputSize      = layer.InputSize;
     _layer           = layer;
     _weight          = layer.Weight;
     _bias            = lap.Create(_outputSize, x => weightInit.GetBias());
     _weightTranspose = _weight.Transpose();
 }
Пример #4
0
        INeuralNetworkLayerUpdater _CreatePrimaryUpdater(INeuralNetworkLayer layer, RegularisationType type, float lambda)
        {
            switch (type)
            {
            case RegularisationType.L1:
                return(_weightUpdater.L1(layer, lambda));

            case RegularisationType.L2:
                return(_weightUpdater.L2(layer, lambda));

            //case RegularisationType.MaxNorm:
            //    return _weightUpdater.MaxNorm(layer, lambda);

            default:
                return(_weightUpdater.Simple(layer));
            }
        }
Пример #5
0
 void INeuralNetworkLayer.BuildSynapses(INeuralNetworkLayer previousLayer)
 {
     throw new NotImplementedException();
 }
Пример #6
0
 public L1RegularisationUpdater(INeuralNetworkLayer layer, float lambda) : base(layer)
 {
     _lambda = lambda;
 }
Пример #7
0
 public INeuralNetworkLayerUpdater L1(INeuralNetworkLayer layer, float lambda)
 {
     return(new L1RegularisationUpdater(layer, lambda));
 }
Пример #8
0
 public INeuralNetworkLayerUpdater Simple(INeuralNetworkLayer layer)
 {
     return(new UpdaterBase(layer));
 }
Пример #9
0
 public UpdaterBase(INeuralNetworkLayer layer)
 {
     _layer = layer;
 }
Пример #10
0
        public INeuralNetworkLayerTrainer CreateTrainer(INeuralNetworkLayer layer, LayerDescriptor descriptor)
        {
            var layerUpdater = CreateUpdater(layer, descriptor);

            return(_CreateLayerUpdater(layerUpdater, descriptor));
        }
Пример #11
0
 public INeuralNetworkLayer CreateTiedLayer(INeuralNetworkLayer layer, LayerDescriptor descriptor)
 {
     return(new TiedLayer(_lap, layer, _weightInitialisation[descriptor.WeightInitialisation]));
 }
Пример #12
0
 public Neuron(INeuralNetworkLayer previousLayer)
 {
     synapses = previousLayer.Neurons;
 }