示例#1
0
 private Mutator(INeuralNetworkFactory networkFactory, IWeightInitializer weightInitializer, MutationConfigurationSettings config)
 {
     _networkFactory = networkFactory;
     _weightInitializer = weightInitializer;
     _config = config;
     _random = new Random();
 }
示例#2
0
 private Mutator(INeuralNetworkFactory networkFactory, IWeightInitializer weightInitializer, MutationConfigurationSettings config)
 {
     _networkFactory    = networkFactory;
     _weightInitializer = weightInitializer;
     _config            = config;
     _random            = new Random();
 }
示例#3
0
        public RadialBasisHiddenLayer(
            int hiddenNeuronCount
            , int inputVectorDimension
            , int outputVectorDimension
            , ICenterInitializer centerInitializer
            , IRadiusInitializer radiusInitializer
            , IWeightInitializer weightInitializer
            , IHiddenLayerInitializer hiddenLayerInitializer
            , bool isOffsetNeuron
            , IRBFActivationFunction activationFunction)
        {
            HiddenNeuronCount     = hiddenNeuronCount;
            IsOffsetNeuron        = isOffsetNeuron;
            InputVectorDimension  = inputVectorDimension;
            OutputVectorDimension = outputVectorDimension;

            ActivationFunction = activationFunction;

            hiddenLayerInitializer.Initialize(
                inputVectorDimension
                , outputVectorDimension
                , hiddenNeuronCount
                , isOffsetNeuron
                , centerInitializer
                , radiusInitializer
                , weightInitializer
                , ref Centers
                , ref Radiuses
                , ref Weights
                , out OffsetNeuronWeight
                );
        }
示例#4
0
文件: Network.cs 项目: koryakinp/cnn
 public Network(NetworkConfiguration networkConfig)
 {
     _networkConfig     = networkConfig;
     _costFunction      = CostFunctionFactory.Produce(_networkConfig.CostFunctionType);
     _layers            = new List <Layer>();
     _weightInitializer = new WeightInitializer();
 }
示例#5
0
 public Conv2D(int filters,
               int[] kernel_size                       = null,
               int[] strides                           = null,
               PaddingType padding                     = PaddingType.Valid,
               DataFormatType?data_format              = null,
               int[] dilation_rate                     = null,
               string activation                       = null,
               bool use_bias                           = true,
               IWeightInitializer kernel_initializer   = null,
               IWeightInitializer bias_initializer     = null,
               IWeightRegularizer kernel_regularizer   = null,
               IWeightRegularizer bias_regularizer     = null,
               IWeightRegularizer activity_regularizer = null,
               IWeightConstraint kernel_constraint     = null,
               IWeightConstraint bias_constraint       = null,
               int?[] input_shape                      = null)
     : this(filters : filters,
            kernel_size : kernel_size,
            strides : strides,
            padding : padding,
            data_format : data_format,
            dilation_rate : dilation_rate,
            activation : Activation.Create(activation),
            use_bias : use_bias,
            kernel_initializer : kernel_initializer,
            bias_initializer : bias_initializer,
            kernel_regularizer : kernel_regularizer,
            bias_regularizer : bias_regularizer,
            activity_regularizer : activity_regularizer,
            kernel_constraint : kernel_constraint,
            bias_constraint : bias_constraint,
            input_shape : input_shape)
 {
 }
示例#6
0
 private Layer(string id, int length     = 1, bool biased = true,
               IActivationFunction aFunc = null, IWeightFunction wFunc    = null,
               IInputFunction iFunc      = null, IWeightInitializer wInit = null)
 {
     this.id     = id;
     this.length = length;
     this.biased = biased;
     if (aFunc != null)
     {
         this.aFunc = aFunc;
     }
     if (wFunc != null)
     {
         this.wFunc = wFunc;
     }
     if (iFunc != null)
     {
         this.iFunc = iFunc;
     }
     if (wInit != null)
     {
         this.wInit = wInit;
     }
     if (biased)
     {
         this.biases = this.wInit != null?
                       this.wInit.Initialize(this.length) :
                           Utils.WeightInitializers.InitZero.Initialize(this.length);
     }
 }
示例#7
0
文件: Kernel.cs 项目: koryakinp/cnn
        public void RandomizeWeights(IWeightInitializer weightInitializer)
        {
            int    inputs    = Weights.GetLength(0) * Weights.GetLength(1) * Weights.GetLength(2);
            double magnitude = Math.Sqrt((double)1 / inputs);

            Weights.ForEach((i, j, k) => Weights[i, j, k] = weightInitializer.GenerateRandom(magnitude));
        }
示例#8
0
        /// <summary>
        /// Initializes a new instance of the <see cref="Dense"/> class.
        /// </summary>
        ///
        /// <param name="units">Positive integer, dimensionality of the output space.</param>
        /// <param name="input_dim">The input dim.</param>
        /// <param name="batch_input_shape">The batch input shape.</param>
        /// <param name="input_shape">The input shape.</param>
        /// <param name="activation">The activation function to use.</param>
        /// <param name="use_bias">Whether the layer uses a bias vector.</param>
        ///
        public Dense(int units, IActivationFunction activation = null, bool use_bias = true,
                     IWeightInitializer kernel_initializer     = null, IWeightInitializer bias_initializer = null,
                     IWeightRegularizer kernel_regularizer     = null, IWeightRegularizer bias_regularizer = null, IWeightRegularizer activity_regularizer = null,
                     IWeightConstraint kernel_constraint       = null, IWeightConstraint bias_constraint   = null,
                     int?input_dim = null, int?[] input_shape = null, int?[] batch_input_shape = null)
            : base(input_dim: input_dim, input_shape: input_shape, batch_input_shape: batch_input_shape)
        {
            //if 'input_shape' not in kwargs and 'input_dim' in kwargs:
            //    kwargs['input_shape'] = (kwargs.pop('input_dim'),)

            if (bias_initializer == null)
            {
                bias_initializer = new Zeros();
            }
            if (kernel_initializer == null)
            {
                kernel_initializer = new GlorotUniform();
            }

            this.units                = units;
            this.activation           = activation;
            this.use_bias             = use_bias;
            this.kernel_initializer   = kernel_initializer;
            this.bias_initializer     = bias_initializer;
            this.kernel_regularizer   = kernel_regularizer;
            this.bias_regularizer     = bias_regularizer;
            this.activity_regularizer = activity_regularizer;
            this.kernel_constraint    = kernel_constraint;
            this.bias_constraint      = bias_constraint;

            this.input_spec.Add(new InputSpec(min_ndim: 2));
            this.supports_masking = true;
        }
 protected ErrorBackPropagationStepsBase(IWeightSetter weightSetter, IWeightChangeApplier weightChangeApplier, IPerceptronUnderTraining perceptronUnderTraining, double learningRate, double momentum)
 {
     CheckLearningRate(learningRate);
     Perceptron                  = perceptronUnderTraining;
     _weightInitializer          = new WeightInitializer(Perceptron.Network, weightSetter: weightSetter);
     _errorValueBackPropagator   = new ErrorValueBackPropagator(Perceptron.Network);
     _weightChangeBackPropagator = new WeightChangeBackPropagator(weightChangeApplier, Perceptron.Network, learningRate, momentum);
 }
示例#10
0
 private NeuralNetworkFactory(ISomaFactory somaFactory, IAxonFactory axonFactory, ISynapseFactory hiddenSynapseFactory, ISynapseFactory inputOutputSynapseFactory, IWeightInitializer biasInitializer)
 {
     _somaFactory               = somaFactory;
     _axonFactory               = axonFactory;
     _hiddenSynapseFactory      = hiddenSynapseFactory;
     _inputOutputSynapseFactory = inputOutputSynapseFactory;
     _biasInitiliazer           = biasInitializer;
 }
 private NeuralNetworkFactory(ISomaFactory somaFactory, IAxonFactory axonFactory, ISynapseFactory hiddenSynapseFactory, ISynapseFactory inputOutputSynapseFactory, IWeightInitializer biasInitializer)
 {
     _somaFactory = somaFactory;
     _axonFactory = axonFactory;
     _hiddenSynapseFactory = hiddenSynapseFactory;
     _inputOutputSynapseFactory = inputOutputSynapseFactory;
     _biasInitiliazer = biasInitializer;
 }
示例#12
0
 public AutoencoderWeights(int PNumLayers, RBMLayer[] PLayers, IWeightInitializer PWInitializer)
 {
     numweightsets = PNumLayers - 1;
     weights       = new RBMWeightSet[numweightsets];
     for (int i = 0; i < numweightsets; i++)
     {
         weights[i] = new RBMWeightSet(PLayers[i].Count, PLayers[i + 1].Count, PWInitializer);
     }
 }
 private void InitBiases(IWeightInitializer PWInitializer)
 {
     for (int i = 0; i < numlayers; i++)
     {
         for (int j = 0; j < layers[i].Count; j++)
         {
             layers[i].SetBias(j, PWInitializer.InitializeBias());
         }
     }
 }
示例#14
0
 private void InitWeights(IWeightInitializer PWeightInit)
 {
     weights = new RBMWeightSet(numvisibles, numhiddens, PWeightInit);
     for (int i = 0; i < numvisibles; i++)
     {
         for (int j = 0; j < numhiddens; j++)
         {
             weights.SetWeight(i, j, Utility.NextGaussian(0, 0.1));
         }
     }
 }
 internal Autoencoder(List <RBMLayer> PLayers, AutoencoderLearningRate PTrainingInfo
                      , IWeightInitializer PWInitializer)
 {
     numlayers          = PLayers.Count;
     layers             = PLayers.ToArray();
     learnrate          = PTrainingInfo;
     recognitionweights = new AutoencoderWeights(numlayers, layers, PWInitializer);
     generativeweights  = new AutoencoderWeights(numlayers, layers, PWInitializer);
     errorobservers     = new List <IErrorObserver>();
     InitBiases(PWInitializer);
     InitTrainingData();
 }
示例#16
0
        private Tuple <Type, object> EvaluateCreateLayer(ParseTreeNode node)
        {
            var layerParams = EvaluateKeyValuePair(node.ChildNodes[0]);

            int  length = 1;
            bool biased = true;
            IActivationFunction aFunc = null;
            IInputFunction      iFunc = null;
            IWeightFunction     wFunc = null;
            IWeightInitializer  wInit = null;

            if (!layerParams.ContainsKey("id"))
            {
                throw new Exception("ID is necessary to initialize a layer!");
            }
            if (layerParams.ContainsKey("length"))
            {
                length = Int32.Parse(layerParams["length"]);
            }
            if (layerParams.ContainsKey("biased"))
            {
                biased = bool.Parse(layerParams["biased"]);
            }
            if (layerParams.ContainsKey("activationFunction"))
            {
                aFunc = (IActivationFunction)(typeof(ActivationFunctions)
                                              .GetField(layerParams["activationFunction"])
                                              .GetValue(null));
            }
            if (layerParams.ContainsKey("weightFunction"))
            {
                wFunc = (IWeightFunction)(typeof(WeightFunctions)
                                          .GetField(layerParams["weightFunction"])
                                          .GetValue(null));
            }
            if (layerParams.ContainsKey("inputFunction"))
            {
                iFunc = (IInputFunction)(typeof(InputFunctions)
                                         .GetField(layerParams["inputFunction"])
                                         .GetValue(null));
            }
            if (layerParams.ContainsKey("weightInitializer"))
            {
                wInit = (IWeightInitializer)(typeof(WeightInitializers)
                                             .GetField(layerParams["weightInitializer"])
                                             .GetValue(null));
            }

            var layer = Layer.Create(layerParams["id"], length: length, aFunc: aFunc,
                                     wFunc: wFunc, iFunc: iFunc, wInit: wInit, biased: biased);

            return(new Tuple <Type, object>(typeof(Layer), layer));
        }
示例#17
0
 /// <summary>
 /// Initializes a new instance of the <see cref="Dense"/> class.
 /// </summary>
 ///
 /// <param name="units">Positive integer, dimensionality of the output space.</param>
 /// <param name="input_dim">The input dim.</param>
 /// <param name="batch_input_shape">The batch input shape.</param>
 /// <param name="input_shape">The input shape.</param>
 /// <param name="activation">The activation function to use.</param>
 /// <param name="use_bias">Whether the layer uses a bias vector.</param>
 ///
 public Dense(int units, string activation, bool use_bias = true,
              IWeightInitializer kernel_initializer       = null, IWeightInitializer bias_initializer = null,
              IWeightRegularizer kernel_regularizer       = null, IWeightRegularizer bias_regularizer = null, IWeightRegularizer activity_regularizer = null,
              IWeightConstraint kernel_constraint         = null, IWeightConstraint bias_constraint   = null,
              int?input_dim = null, int?[] input_shape = null, int?[] batch_input_shape = null)
     : this(units : units, activation : Activation.Create(activation), use_bias : use_bias,
            kernel_initializer : kernel_initializer, bias_initializer : bias_initializer,
            kernel_regularizer : kernel_regularizer, bias_regularizer : bias_regularizer, activity_regularizer : activity_regularizer,
            kernel_constraint : kernel_constraint, bias_constraint : bias_constraint,
            input_dim : input_dim, input_shape : input_shape, batch_input_shape : batch_input_shape)
 {
 }
示例#18
0
        public Neuron(
            IActivator activator,
            IWeightInitializer weightInitializer,
            int numberOfConnections,
            LearningRateAnnealerType lrat)
        {
            Weights = new double[numberOfConnections];
            _learningRateAnnealers = new ILearningRateAnnealer[numberOfConnections];
            _learningRateAnnealers.ForEach((q, i) => _learningRateAnnealers[i] = LearningRateAnnealerFactory.Produce(lrat));
            _biasLearningRateAnnealer = LearningRateAnnealerFactory.Produce(lrat);

            double magnitude = 1 / Math.Sqrt(numberOfConnections);

            Weights.ForEach((q, i) => Weights[i] = weightInitializer.GenerateRandom(magnitude));
            _activator = activator;
        }
示例#19
0
 public RBMWeightSet(int PPreSynapticLayerSize, int PPostSynapticLayerSize, IWeightInitializer PWeightInit)
 {
     presize       = PPreSynapticLayerSize;
     postsize      = PPostSynapticLayerSize;
     weights       = new double[presize][];
     weightchanges = new double[presize][];
     for (int i = 0; i < presize; i++)
     {
         weights[i]       = new double[postsize];
         weightchanges[i] = new double[postsize];
         Utility.ZeroArray(weightchanges[i]);
         for (int j = 0; j < postsize; j++)
         {
             weights[i][j] = PWeightInit.InitializeWeight();
         }
     }
 }
示例#20
0
        public void InitializeWeights(IWeightInitializer weightInitializer)
        {
            if (!isWeightInitialized)
            {
                isWeightInitialized = true;
                if (CurrentLayer is IWeightInitializable)
                {
                    (CurrentLayer as IWeightInitializable).SetWeights(weightInitializer);
                }
            }

            for (int i = 0; i < OutputLayers.Count; i++)
            {
                if (OutputLayers[i] is LayerContainer)
                {
                    (OutputLayers[i] as LayerContainer).InitializeWeights(weightInitializer);
                }
            }
        }
示例#21
0
        public void SetWeights(IWeightInitializer weightInitializer)
        {
            float[] m_ws = new float[Weights.Rows];
            float[] b_ws = new float[Biases.Rows];

            for (int j = 0; j < Weights.Columns * Weights.Rows; j++)
            {
                Weights.Memory[j] = (float)weightInitializer.GetWeight(Weights.Columns, Weights.Rows); //(i + j * Weights.Height + 1) / (Weights.Width * Weights.Height + 1); //
            }
            //);

            for (int i = 0; i < b_ws.Length; i++)
            //Parallel.For(0, b_ws.Length, (i) =>
            {
                b_ws[i] = weightInitializer.GetBias();
            }//);

            Biases.Write(b_ws);
        }
        public INeuralNetworkFactory BuildBackpropagationNetworkFactory(IWeightInitializer weightInitializer,
                                                                        ISomaFactory somaFactory,
                                                                        IActivationFunctionDerivative activationFunctionDerivative,
                                                                        IActivationFunction inputActivationFunction,
                                                                        INeuronFactory neuronFactory)
        {
            var axonFactory          = BackpropagationAxonFactory.GetInstance(activationFunctionDerivative);
            var hiddenSynapseFactory = DecoratedSynapseFactory.GetInstance(weightInitializer,
                                                                           AxonFactory.GetInstance(activationFunctionDerivative));
            var inputSynapseFactory = DecoratedSynapseFactory.GetInstance(new ConstantWeightInitializer(1.0),
                                                                          AxonFactory.GetInstance(inputActivationFunction));
            var decoratedNeuronFactory    = BackpropagationNeuronFactory.GetInstance(neuronFactory);
            INeuralNetworkFactory factory = NeuralNetworkFactory.GetInstance(somaFactory, axonFactory,
                                                                             hiddenSynapseFactory, inputSynapseFactory, weightInitializer, decoratedNeuronFactory);

            var backPropNetworkFactory = new BackpropagationNetworkFactoryDecorator(factory);

            return(backPropNetworkFactory);
        }
示例#23
0
        public FullyConnectedLayer(
            IActivator activator,
            int numberOfNeurons,
            int numberOfNeuronsInPreviouseLayer,
            int layerIndex,
            IWeightInitializer weightInitializer,
            LearningRateAnnealerType lrat)
            : base(layerIndex)
        {
            _numberOfNeuronsInPreviouseLayer = numberOfNeuronsInPreviouseLayer;

            List <Neuron> neurons = new List <Neuron>();

            for (int i = 0; i < numberOfNeurons; i++)
            {
                neurons.Add(new Neuron(activator, weightInitializer, numberOfNeuronsInPreviouseLayer, lrat));
            }

            Neurons = new List <Neuron>(neurons);
        }
示例#24
0
        public ConvolutionalLayer(int nk, int ks, int li, FilterMeta ifm, IWeightInitializer wi, LearningRateAnnealerType lrat)
            : base(li, ifm)
        {
            _numberOfKernels = nk;
            _kernelSize      = ks;

            List <Kernel> temp = new List <Kernel>();

            for (int i = 0; i < _numberOfKernels; i++)
            {
                var k = new Kernel(ks, ifm.Channels, lrat);
                k.RandomizeWeights(wi);
                temp.Add(k);
            }

            _kernels     = new List <Kernel>(temp);
            _inputeFm    = ifm;
            _outputFm    = GetOutputFilterMeta();
            _featureMaps = new double[_outputFm.Channels, _outputFm.Size, _outputFm.Size];
        }
示例#25
0
 public Conv2D(int filters,
               int[] kernel_size                       = null,
               int[] strides                           = null,
               PaddingType padding                     = PaddingType.Valid,
               DataFormatType?data_format              = null,
               int[] dilation_rate                     = null,
               IActivationFunction activation          = null,
               bool use_bias                           = true,
               IWeightInitializer kernel_initializer   = null,
               IWeightInitializer bias_initializer     = null,
               IWeightRegularizer kernel_regularizer   = null,
               IWeightRegularizer bias_regularizer     = null,
               IWeightRegularizer activity_regularizer = null,
               IWeightConstraint kernel_constraint     = null,
               IWeightConstraint bias_constraint       = null,
               int?[] input_shape                      = null)
     : base(rank: 2,
            filters: filters,
            kernel_size: kernel_size,
            strides: strides,
            padding: padding,
            data_format: data_format,
            dilation_rate: dilation_rate,
            activation: activation,
            use_bias: use_bias,
            kernel_initializer: kernel_initializer,
            bias_initializer: bias_initializer,
            kernel_regularizer: kernel_regularizer,
            bias_regularizer: bias_regularizer,
            activity_regularizer: activity_regularizer,
            kernel_constraint: kernel_constraint,
            bias_constraint: bias_constraint,
            input_shape: input_shape)
 {
     this.input_spec = new List <InputSpec> {
         new InputSpec(ndim: 4)
     };
 }
示例#26
0
        /// <summary>
        /// Initializes a new instance of the <see cref="Dense"/> class.
        /// </summary>
        ///
        /// <param name="units">Positive integer, dimensionality of the output space.</param>
        /// <param name="input_dim">The input dim.</param>
        /// <param name="batch_input_shape">The batch input shape.</param>
        /// <param name="input_shape">The input shape.</param>
        /// <param name="activation">The activation function to use.</param>
        /// <param name="use_bias">Whether the layer uses a bias vector.</param>
        /// <param name="spectral_norm_iteration">Whether the layer uses a bias vector.</param>
        ///
        public Dense(int units, IActivationFunction activation = null, bool use_bias = true,
                     IWeightInitializer kernel_initializer     = null, IWeightInitializer bias_initializer = null,
                     IWeightRegularizer kernel_regularizer     = null, IWeightRegularizer bias_regularizer = null, IWeightRegularizer activity_regularizer = null,
                     IWeightConstraint kernel_constraint       = null, IWeightConstraint bias_constraint   = null,
                     int?input_dim = null, int?[] input_shape = null, int?[] batch_input_shape = null, int spectral_norm_iteration = 0)
            : base(input_dim: input_dim, input_shape: input_shape, batch_input_shape: batch_input_shape)
        {
            // https://github.com/fchollet/keras/blob/f65a56fb65062c8d14d215c9f4b1015b97cc5bf3/keras/layers/core.py#L791

            //if 'input_shape' not in kwargs and 'input_dim' in kwargs:
            //    kwargs['input_shape'] = (kwargs.pop('input_dim'),)

            if (bias_initializer == null)
            {
                bias_initializer = new Zeros();
            }
            if (kernel_initializer == null)
            {
                kernel_initializer = new GlorotUniform();
            }

            this.units                = units;
            this.activation           = activation;
            this.use_bias             = use_bias;
            this.kernel_initializer   = kernel_initializer;
            this.bias_initializer     = bias_initializer;
            this.kernel_regularizer   = kernel_regularizer;
            this.bias_regularizer     = bias_regularizer;
            this.activity_regularizer = activity_regularizer;
            this.kernel_constraint    = kernel_constraint;
            this.bias_constraint      = bias_constraint;

            this.input_spec = new List <InputSpec>();
            this.input_spec.Add(new InputSpec(min_ndim: 2));
            this.supports_masking        = true;
            this.spectral_norm_iteration = spectral_norm_iteration;
        }
示例#27
0
 public static ISynapseFactory GetInstance(IWeightInitializer weightInitializer, IAxonFactory axonFactory)
 {
     return new SynapseFactory(weightInitializer, axonFactory);
 }
示例#28
0
        /// <summary>
        /// Create a network with random weights and biases
        /// </summary>
        /// <param name="layerConfig">The layer configuration containing the number of neurons in each layer in this order: [input layer][1st hidden layer][2nd hidden layer]...[nth hidden layer][output layer]</param>
        /// <param name="activationFunction">The activation function to use</param>
        /// <param name="weightInitializer">The weight and bias initializer to use</param>
        /// <returns></returns>
        public static Network CreateNetworkInitRandom(int[] layerConfig, IActivationFunction activationFunction, IWeightInitializer weightInitializer = null)
        {
            if (weightInitializer == null)
            {
                weightInitializer = new DefaultWeightInitializer();
            }

            List <List <Tuple <List <float>, float> > > inputLayers = new List <List <Tuple <List <float>, float> > >();

            for (int layId = 1; layId < layerConfig.Length; ++layId)
            {
                int prevLayerSize = layerConfig[layId - 1];
                int layerSize     = layerConfig[layId];
                List <Tuple <List <float>, float> > neuronList = new List <Tuple <List <float>, float> >();
                for (int i = 0; i < layerSize; i++)
                {
                    List <float> weights = new List <float>();
                    for (int j = 0; j < prevLayerSize; ++j)
                    {
                        weights.Add(weightInitializer.GetRandomWeight(prevLayerSize));
                    }
                    neuronList.Add(new Tuple <List <float>, float>(weights, weightInitializer.GetRandomBias()));
                }
                inputLayers.Add(neuronList);
            }

            return(CreateNetwork(inputLayers, activationFunction));
        }
示例#29
0
 public static NeuralNetworkFactory GetInstance(ISomaFactory somaFactory, IAxonFactory axonFactory, ISynapseFactory hiddenSynapseFactory, ISynapseFactory inputOutputSynapseFactory, IWeightInitializer biasInitializer, INeuronFactory neuronFactory)
 {
     return(new NeuralNetworkFactory(somaFactory, axonFactory, hiddenSynapseFactory, inputOutputSynapseFactory, biasInitializer, neuronFactory));
 }
示例#30
0
 public static ISynapseFactory GetInstance(IWeightInitializer weightInitializer, IAxonFactory axonFactory)
 {
     return(new SynapseFactory(weightInitializer, axonFactory));
 }
 public static NeuralNetworkFactory GetInstance(ISomaFactory somaFactory, IAxonFactory axonFactory, ISynapseFactory hiddenSynapseFactory, ISynapseFactory inputOutputSynapseFactory, IWeightInitializer biasInitializer)
 {
     return new NeuralNetworkFactory(somaFactory, axonFactory, hiddenSynapseFactory, inputOutputSynapseFactory, biasInitializer);
 }
        ////////////////////////////////////////////////////////////////////////////////////////////////////
        /// <summary>
        /// Initializes a new instance of the Autoencoders.AutoencoderWeights class.
        /// </summary>
        ///
        /// <param name="PNumLayers">       Number of layers. </param>
        /// <param name="PLayers">          The layers. </param>
        /// <param name="PWInitializer">    The password initializer. </param>
        ////////////////////////////////////////////////////////////////////////////////////////////////////

        public AutoencoderWeights(int PNumLayers, RestrictedBoltzmannMachineLayer[] PLayers, IWeightInitializer PWInitializer)
        {
            numweightsets = PNumLayers - 1;
            weights       = new RestrictedBoltzmannMachineWeightSet[numweightsets];
            for (int i = 0; i < numweightsets; i++)
            {
                weights[i] = new RestrictedBoltzmannMachineWeightSet(PLayers[i].Count, PLayers[i + 1].Count, PWInitializer);
            }
        }
示例#33
0
 private SynapseFactory(IWeightInitializer weightInitializer, IAxonFactory axonFactory)
 {
     _weightInitializer = weightInitializer;
     _axonFactory       = axonFactory;
 }
示例#34
0
 private SynapseFactory(IWeightInitializer weightInitializer, IAxonFactory axonFactory)
 {
     _weightInitializer = weightInitializer;
     _axonFactory = axonFactory;
 }
        ////////////////////////////////////////////////////////////////////////////////////////////////////
        /// <summary>
        /// Initializes a new instance of the Autoencoders.RestrictedBoltzmannMachineWeightSet class.
        /// </summary>
        ///
        /// <param name="PPreSynapticLayerSize">    Size of the pre synaptic layer. </param>
        /// <param name="PPostSynapticLayerSize">   Size of the post synaptic layer. </param>
        /// <param name="PWeightInit">              The weight initialize. </param>
        ////////////////////////////////////////////////////////////////////////////////////////////////////

        public RestrictedBoltzmannMachineWeightSet(int PPreSynapticLayerSize, int PPostSynapticLayerSize, IWeightInitializer PWeightInit)
        {
            preSize       = PPreSynapticLayerSize;
            postSize      = PPostSynapticLayerSize;
            weights       = new double[preSize][];
            weightChanges = new double[preSize][];
            for (int i = 0; i < preSize; i++)
            {
                weights[i]       = new double[postSize];
                weightChanges[i] = new double[postSize];
                Utility.SetArrayToZero(weightChanges[i]);
                for (int j = 0; j < postSize; j++)
                {
                    weights[i][j] = PWeightInit.InitializeWeight();
                }
            }
        }
示例#36
0
 public static IMutator GetInstance(INeuralNetworkFactory networkFactory, IWeightInitializer weightInitializer, MutationConfigurationSettings config)
 {
     return new Mutator(networkFactory, weightInitializer, config);
 }
示例#37
0
 public static IBreederFactory GetInstance(INeuralNetworkFactory networkFactory, IWeightInitializer weightInitializer)
 {
     return new BreederFactory(networkFactory, weightInitializer);
 }
示例#38
0
 private BreederFactory(INeuralNetworkFactory networkFactory, IWeightInitializer weightInitializer)
 {
     _networkFactory = networkFactory;
     _weightInitializer = weightInitializer;
 }