/// <summary>
        ///   Gets the number of parameters in a network.
        /// </summary>
        private static int getNumberOfParameters(ActivationNetwork network)
        {
            int sum = 0;

            for (int i = 0; i < network.Layers.Length; i++)
            {
                for (int j = 0; j < network.Layers[i].Neurons.Length; j++)
                {
                    // number of weights plus the bias value
                    sum += network.Layers[i].Neurons[j].InputsCount + 1;
                }
            }
            return(sum);
        }
        /// <summary>
        ///   Initializes a new instance of the <see cref="LevenbergMarquardtLearning"/> class.
        /// </summary>
        ///
        /// <param name="network">Network to teach.</param>
        /// <param name="useRegularization">True to use Bayesian regularization, false otherwise.</param>
        /// <param name="method">The method by which the Jacobian matrix will be calculated.</param>
        ///
        public LevenbergMarquardtLearning(ActivationNetwork network, bool useRegularization, JacobianMethod method)
        {
            this.ParallelOptions    = new ParallelOptions();
            this.network            = network;
            this.numberOfParameters = getNumberOfParameters(network);
            this.outputCount        = network.Layers[network.Layers.Length - 1].Neurons.Length;

            this.useBayesianRegularization = useRegularization;
            this.method = method;

            this.weights = new float[numberOfParameters];
            this.hessian = new float[numberOfParameters][];
            for (int i = 0; i < hessian.Length; i++)
            {
                hessian[i] = new float[numberOfParameters];
            }
            this.diagonal = new float[numberOfParameters];
            this.gradient = new float[numberOfParameters];
            this.jacobian = new float[numberOfParameters][];


            // Will use Backpropagation method for Jacobian computation
            if (method == JacobianMethod.ByBackpropagation)
            {
                // create weight derivatives arrays
                this.weightDerivatives     = new float[network.Layers.Length][][];
                this.thresholdsDerivatives = new float[network.Layers.Length][];

                // initialize arrays
                for (int i = 0; i < network.Layers.Length; i++)
                {
                    ActivationLayer layer = (ActivationLayer)network.Layers[i];

                    this.weightDerivatives[i]     = new float[layer.Neurons.Length][];
                    this.thresholdsDerivatives[i] = new float[layer.Neurons.Length];

                    for (int j = 0; j < layer.Neurons.Length; j++)
                    {
                        this.weightDerivatives[i][j] = new float[layer.InputsCount];
                    }
                }
            }
            else // Will use finite difference method for Jacobian computation
            {
                throw new NotImplementedException("Finite difference method is not implemented");
            }
        }
Ejemplo n.º 3
0
        internal static JObject SerializeToJson(ActivationNetwork network)
        {
            JObject result = new JObject();

            result["InputsCount"] = network.inputsCount;
            result["layersCount"] = network.layersCount;
            //   result["Outputs"] = new JArray(network.output);
            JArray layers = new JArray();

            for (int i = 0; i < network.layers.Length; i++)
            {
                ActivationLayer layer = (ActivationLayer)network.layers[i];
                layers.Add(ActivationLayer.SerializeToJson(layer));
            }
            result["Layers"] = layers;

            return(result);
        }
Ejemplo n.º 4
0
        internal static ActivationNetwork DeserializeFromJson(JObject jnetwork)
        {
            ActivationNetwork network = new ActivationNetwork();

            network.inputsCount = jnetwork["InputsCount"].ToObject <int>();
            network.layersCount = jnetwork["layersCount"].ToObject <int>();
            //      network.output = jnetwork["Outputs"].ToObject<double[]>();

            network.layers = new Layer[network.layersCount];
            int counter = 0;

            foreach (JObject jlayer in jnetwork["Layers"].Children <JObject>())
            {
                network.layers[counter++] = ActivationLayer.DeserializeFromJson(jlayer);
            }

            network.SetActivationFunction(new SigmoidFunction());
            return(network);
        }
 /// <summary>
 ///   Initializes a new instance of the <see cref="LevenbergMarquardtLearning"/> class.
 /// </summary>
 ///
 /// <param name="network">Network to teach.</param>
 /// <param name="method">The method by which the Jacobian matrix will be calculated.</param>
 ///
 public LevenbergMarquardtLearning(ActivationNetwork network, JacobianMethod method)
     : this(network, false, method)
 {
 }
 /// <summary>
 ///   Initializes a new instance of the <see cref="LevenbergMarquardtLearning"/> class.
 /// </summary>
 ///
 /// <param name="network">Network to teach.</param>
 /// <param name="useRegularization">True to use Bayesian regularization, false otherwise.</param>
 ///
 public LevenbergMarquardtLearning(ActivationNetwork network, bool useRegularization) :
     this(network, useRegularization, JacobianMethod.ByBackpropagation)
 {
 }
 /// <summary>
 ///   Initializes a new instance of the <see cref="LevenbergMarquardtLearning"/> class.
 /// </summary>
 ///
 /// <param name="network">Network to teach.</param>
 ///
 public LevenbergMarquardtLearning(ActivationNetwork network) :
     this(network, false, JacobianMethod.ByBackpropagation)
 {
 }
Ejemplo n.º 8
0
 public static string Serialize(ActivationNetwork network)
 {
     return(SerializeToJson(network).ToString());
 }