public void should_back_propagate_correctly()
        {
            // Arrange
            var sigmoid = new Sigmoid(new Size(2, 2));

            var trainingRun = new TrainingRun(1)
            {
                Input = new float[]
                {
                    0.31f, 0.61f, 0.27f, 0.19f
                },
                OutputError = new float[]
                {
                    0.25f * 0.61f + -0.15f * 0.02f,
                    0.25f * 0.96f + -0.15f * 0.23f,
                    0.25f * 0.82f + -0.15f * -0.50f,
                    0.25f * -1.00f + -0.15f * 0.17f
                }
            };

            var expected = new float[, ]
            {
                { 0.0364182f, 0.068628f },
                { 0.04675125f, -0.06818625f }
            };

            // Act
            sigmoid.BackPropagate(trainingRun);

            // Assert
            var actual         = (trainingRun.InputError.ToMatrix() * 100).PointwiseRound() / 100;
            var expectedMatrix = (Matrix <float> .Build.DenseOfArray(expected) * 100).PointwiseRound() / 100;

            Assert.That(actual, Is.EqualTo(expectedMatrix));
        }
        public void should_feed_forward_correctly()
        {
            // Arrange
            var sigmoid = new Sigmoid(new Size(2, 2));

            var expected = new float[]
            {
                0.58f, 0.65f, 0.57f, 0.55f
            }.ToVector();

            // Act
            var actual = sigmoid.FeedForwards(new float[, ]
            {
                { 0.31f, 0.27f },
                { 0.61f, 0.19f },
            });

            // Assert
            Assert.That(actual.Size.Dimensions.Length, Is.EqualTo(2));
            Assert.That(actual.Size.Dimensions[0], Is.EqualTo(2));
            Assert.That(actual.Size.Dimensions[1], Is.EqualTo(2));

            actual = (actual.ToMatrix() * 100).PointwiseRound() / 100;
            Assert.That(actual.Value, Is.EqualTo(expected));
        }
Пример #3
0
        private void Setup(int[] aLayers)
        {
            var xLayers = aLayers;

            //Initialize
            mNetwork = new List <Neuron[]>(xLayers.Length);

            var activationFunction = new Sigmoid();

            foreach (var xCount in xLayers)
            {
                var neurons = new Neuron[xCount];
                for (int i = 0; i < xCount; i++)
                {
                    neurons[i] = new Neuron(activationFunction);
                }
                mNetwork.Add(neurons);
            }

            //Create fully connected network
            Neuron[] neuronLayer = null;
            //MLP Input - > Hidden -> MLP Output
            for (int i = 0; i < xLayers.Length - 1; i++)
            {
                neuronLayer = mNetwork[i];
                var NextLayer = mNetwork[i + 1];
                foreach (var neuron in neuronLayer)
                {
                    foreach (var connectedNeuron in NextLayer)
                    {
                        neuron.Connect(connectedNeuron);
                    }
                }
            }
        }
Пример #4
0
        /// <summary>Метод обработки одного слоя</summary>
        /// <param name="LayerWeights">Матрица коэффициентов передачи</param>
        /// <param name="Offset">Вектор смещений нейронов</param>
        /// <param name="OffsetWeight">Вектор весовых коэффициентов</param>
        /// <param name="Input">Вектор входного воздействия</param>
        /// <param name="Output">Вектор выходных значений нейронов</param>
        /// <param name="Activation">Активационная функция слоя (если не задана, то используется Сигмоид)</param>
        /// <param name="State">Вектор входа функции активации</param>
        private static void ProcessLayer(
            [NotNull] double[,] LayerWeights,
            [NotNull] double[] Offset,
            [NotNull] double[] OffsetWeight,
            Span <double> Input,
            Span <double> Output,
            [CanBeNull] ActivationFunction Activation = null,
            [CanBeNull] double[] State = null)
        {
            // Вычисляем X_next = f(Net = W * X + Wo*O)
            var layer_outputs_count = LayerWeights.GetLength(0);
            var layer_inputs_count  = LayerWeights.GetLength(1);

            for (var output_index = 0; output_index < layer_outputs_count; output_index++)
            {
                var output = Offset[output_index] * OffsetWeight[output_index];
                for (var input_index = 0; input_index < layer_inputs_count; input_index++)
                {
                    output += LayerWeights[output_index, input_index] * Input[input_index];
                }
                if (State != null)
                {
                    State[output_index] = output;
                }
                Output[output_index] = Activation?.Value(output) ?? Sigmoid.Activation(output);
            }
        }
Пример #5
0
        private IKernel getKernel()
        {
            IKernel kernel;

            if (rbGaussian.Checked)
            {
                kernel = new Gaussian((double)numSigma.Value);
            }
            else if (rbPolynomial.Checked)
            {
                kernel = new Polynomial((int)numDegree.Value, (double)numSigAlpha.Value);
            }
            else if (rbLaplacian.Checked)
            {
                kernel = new Laplacian((double)numLaplacianSigma.Value);
            }
            else if (rbSigmoid.Checked)
            {
                kernel = new Sigmoid((double)numSigAlpha.Value, (double)numSigB.Value);
            }
            else
            {
                throw new Exception();
            }

            return(kernel);
        }
Пример #6
0
    private void _ThrowingManagment()
    {
        if (_thrower.IsReady())
        {
            List <TrajectoryData> collectedData = _thrower.CollectTrajecoryData();

            if (collectedData.Count > 0)
            {
                float throwDistance;
                var   directionToLern = dataProcessor.findOptimalThrowDirection(collectedData, target, LastThrowDirection, out throwDistance);

                _deviationFactor = (float)Sigmoid.Output((double)throwDistance * 0.5 - 2) * 0.4f;

                var ppDirToLern = (directionToLern + Vector3.one) * 0.5f;

                if (!_first)
                {
                    neuralService.NetAdaptation(_lastThrowPosition, target.GetTargetCords(), ppDirToLern);
                }
            }
            var throwPosition       = _thrower.GetThrowPosition();
            var calcualtedDirection = neuralService.CalculateThrowDirection(throwPosition, target.GetTargetCords());
            var ppDir = calcualtedDirection * 2 - Vector3.one;

            _thrower.DataGatteringThrow(ppDir, _deviationFactor);

            LastThrowDirection = ppDir;
            _lastThrowPosition = throwPosition;
            _first             = false;
        }
    }
Пример #7
0
        static void Main(string[] args)
        {
            // создание экземпляра функции активации
            Sigmoid sigmoid = new Sigmoid();

            network = new NNetwork(sigmoid, new int[] { 2, 4, 2 });

            TrainNetwork();

            TestNetwork();

            Console.WriteLine();

            Console.WriteLine("Save Load Test");

            NNetworkSaver saver = new NNetworkSaver(network);

            saver.SaveNetwork("network.nwk");

            NNetworkLoader loader = new NNetworkLoader("network.nwk", new ConsoleLogger());

            network = loader.LoadNNetwork(sigmoid);

            TestNetwork();

            Console.ReadLine();
        }
Пример #8
0
 public void Propagate(double[] values)
 {
     if (values.Length != this.InputLayer.NeuronsCount)
     {
         throw new Exception("Number of passed values does not match with number of input neurons");
     }
     // passing values to input neurons
     for (int i = 0; i < values.Length; i++)
     {
         this.InputLayer.Neurons[i].Value = values[i];
     }
     // passing values to hdden layers and output layer
     for (int layerIndex = 1; layerIndex < this.LayersCount; layerIndex++)
     {
         Layer currentLayer  = this.Layers[layerIndex];
         Layer previousLayer = this.Layers[layerIndex - 1];
         for (int currentLayerNeuronIndex = 0; currentLayerNeuronIndex < currentLayer.NeuronsCount; currentLayerNeuronIndex++)
         {
             Neuron currentLayerNeuron = currentLayer.Neurons[currentLayerNeuronIndex];
             currentLayerNeuron.Value = currentLayerNeuron.Bias;
             for (int previousLayerNeuronIndex = 0; previousLayerNeuronIndex < previousLayer.NeuronsCount; previousLayerNeuronIndex++)
             {
                 Neuron previousLayerNeuron = previousLayer.Neurons[previousLayerNeuronIndex];
                 currentLayerNeuron.Value += previousLayerNeuron.Value * previousLayerNeuron.NextDendrites[currentLayerNeuronIndex].Weight;
             }
             currentLayerNeuron.Value = Sigmoid.Count(currentLayerNeuron.Value);
         }
     }
 }
Пример #9
0
 public void Visit(Sigmoid sigmoid)
 {
     AddToQueue(sigmoid);
     sigmoid.Arg.Accept(this);
     sigmoid.Mid.Accept(this);
     //return true;
 }
Пример #10
0
        public static IActivationFunction GetActivationFunction(ActivationFunction activationFunction)
        {
            IActivationFunction result;

            switch (activationFunction)
            {
            case ActivationFunction.ELU:
                result = new ELU();
                break;

            case ActivationFunction.ReLU:
                result = new ReLU();
                break;

            case ActivationFunction.Sigmoid:
                result = new Sigmoid();
                break;

            case ActivationFunction.None:
                result = null;
                break;

            case ActivationFunction.Swish:
                result = new Swish();
                break;

            default:
                result = new ReLU();
                break;
            }
            return(result);
        }
Пример #11
0
        /// <summary>
        /// The log-likelihood of the Weibull distribution on censored and uncensored arrays
        /// with features.
        /// </summary>
        /// <param name="w">The matrix of parameters.</param>
        /// <param name="fSamples">The features corresponding to the organic recoveries.
        /// Number of rows should be same as this.OrganicRecoveryDurations.Length</param>
        /// <param name="fCensored">The features corresponding to the reboots.
        /// Number of rows should be the same as this.InorganicRecoveryDurations.Length</param>
        /// <returns>The log-likelihood of the data along with features.</returns>
        public double LogLikelihood(Matrix <double> w, Matrix <double> fSamples,
                                    Matrix <double> fCensored)
        {
            List <double> t      = this.OrganicRecoveryDurations;
            List <double> x      = this.InorganicRecoveryDurations;
            double        lik    = 0;
            Sigmoid       sShape = new Sigmoid(this.ShapeUpperBound);
            Sigmoid       sScale = new Sigmoid(this.ScaleUpperBound);

            for (int i = 0; i < fSamples.RowCount; i++)
            {
                Vector <double> currentRow = fSamples.Row(i);
                Vector <double> theta      = w.Multiply(currentRow);
                double          shape      = sShape.Transform(theta[0]);
                double          scale      = sScale.Transform(theta[1]);
                lik += this.LogPdf(t.ElementAt(i), shape, scale);
            }

            for (int i = 0; i < fCensored.RowCount; i++)
            {
                Vector <double> currentRow = fCensored.Row(i);
                Vector <double> theta      = w.Multiply(currentRow);
                double          shape      = sShape.Transform(theta[0]);
                double          scale      = sScale.Transform(theta[1]);
                lik += this.LogSurvival(x.ElementAt(i), shape, scale);
            }

            return(lik);
        }
Пример #12
0
            public NDArray ForwardPass(NDArray input)
            {
                //input has size numDims x inputFeats
                //output has size numDims x outputFeats
                //weights has size inputFeats x outputFeats

                //TODO create weights, create biases,

                //number of input feats
                int num_input  = input.shape[1];
                int num_hidden = 16;
                int num_output = 1;

                var sigmoid = new Sigmoid();
                var tanh    = new Tanh();

                var fc1 = new Linear(num_input, num_hidden);
                var x   = fc1.Apply(input);

                x = tanh.Apply(x);

                var fc2 = new Linear(num_hidden, num_hidden);

                x = fc2.Apply(x);
                x = tanh.Apply(x);

                var fc3 = new Linear(num_hidden, num_output);

                x = fc3.Apply(x);
                x = sigmoid.Apply(x);

                return(x);
            }
Пример #13
0
        public void BackpropagatesErrors()
        {
            var last   = new Layer(3);
            var middle = new Layer(3, last, new Sigmoid());
            var first  = new Layer(3, middle);

            last.Errors  = Vec.Build.DenseOfArray(new double[] { -0.2, 0.1, 0.5 });
            last.Weights = Matrix.Build.DenseOfArray(new double[3, 3] {
                { 1, 2, 3 },
                { 4, 5, 6 },
                { 7, 8, 9 }
            });
            last.Biases = Vec.Build.Dense(3, 10);

            middle.Weights = Matrix.Build.DenseIdentity(3, 3);
            middle.Biases  = Vec.Build.Dense(3, 0);

            first.Activations = Vec.Build.DenseOfArray(new double[] { 1, 2, 3 });
            middle.Activate();
            var sigmaPrimeOfZs = new Sigmoid().ActivatePrime(Vec.Build.DenseOfArray(new double[] { 1, 2, 3 }));
            var expectedErrors = Vec.Build.DenseOfArray(new double[] { 3.7, 4.1, 4.5 }).PointwiseMultiply(sigmaPrimeOfZs);

            var errors = middle.Backpropagate();

            Assert.Equal(expectedErrors, errors);
        }
Пример #14
0
 public static Word[] Predict(IEnumerable <Word> Model, float[] Re, int max)
 {
     Word[] best = new Word[max];
     foreach (Word c in Model)
     {
         int b = 0;
         for (int j = 0; j < best.Length; j++)
         {
             if (best[j] == null)
             {
                 b = j;
                 break;
             }
             if (best[j].Re < best[b].Re)
             {
                 b = j;
             }
         }
         float dot = 0,
               score;
         for (int j = 0; j < Re.Length; j++)
         {
             dot += c.Elements[j].Im * Re[j];
         }
         score = (float)Sigmoid.f(dot);
         if (best[b] == null || best[b].Re < score)
         {
             best[b] = new Word(c.Id, c.HashCode)
             {
                 Re = score
             };
         }
     }
     return(best);
 }
Пример #15
0
 static void ScoreMelModel(Set sourceFiles, Matrix Model)
 {
     if (Model == null)
     {
         Console.WriteLine("Model not loaded.");
         return;
     }
     string[] Shuffle = ((IEnumerable <string>)sourceFiles).ToArray();
     foreach (string file in Shuffle)
     {
         Matrix Data = System.Ai.Model.LoadFromFile(file, SIZE, out string fmt, out CBOW.DIMS);
         Debug.Assert(fmt == "MIDI");
         var wo = Model["a"];
         if (wo == null)
         {
             continue;
         }
         foreach (var it in Data)
         {
             double dot = 0.0,
                      score;
             for (int j = 0; j < it.Axis.Length; j++)
             {
                 dot += it.Axis[j].Re * wo.Axis[j].Im;
             }
             score       = Sigmoid.f(dot);
             it.Score.Im = score;
             it.Axis     = null;
         }
         SaveMidi(Data.GetBuffer(), fmt, Path.ChangeExtension(file, ".score"));
     }
 }
Пример #16
0
    public void CalcHiddenStates(float[] in_visible_vector, float[] out_hidden)
    {
        // val for our prng
        // http://en.wikipedia.org/wiki/Linear_congruential_generator (Numerical Recipes vals)
        const uint a = 1664525;
        const uint c = 1013904223;

        fixed(float *v = in_visible_vector, h = out_hidden)
        {
            for (uint j = 0; j < _hidden; j++)
            {
                // activation
                h[j] = _hidden_biases[j];
                for (uint i = 0; i < _visible; i++)
                {
                    h[j] += _visible_features[j][i] * v[i];
                }
                // probability
                h[j]       = Sigmoid.Calc(h[j]);
                _random[j] = _random[j] * a + c;
                // state
                h[j] = (_random[j] < h[j] * uint.MaxValue) ? 1.0f : 0.0f;
            }
        }
    }
        private void btn_createNetwork_Click(object sender, RoutedEventArgs e)
        {
            int[] layers = new int[this.wp_layers.Items.Count + 2];
            layers[0] = Convert.ToInt32((this.ucInputLayer.Content as NetwokLayerSizeControl).GetLayerSize());

            for (int i = 1; i < layers.Length - 1; i++)
            {
                layers[i] = Convert.ToInt32((this.wp_layers.Items[i - 1] as NetwokLayerSizeControl).GetLayerSize());
            }

            layers[layers.Length - 1] = Convert.ToInt32((this.ucOutputLayer.Content as NetwokLayerSizeControl).GetLayerSize());

            ActivationStrategy acti = new Sigmoid();

            if (this.cb_activationFunctions.SelectedIndex == 0)
            {
                acti = new Sigmoid();
            }
            else if (this.cb_activationFunctions.SelectedIndex == 1)
            {
                acti = new TanH();
            }
            else if (this.cb_activationFunctions.SelectedIndex == 2)
            {
                acti = new ReLU();
            }

            NetworkHelper.SaveNetToFile(new NeuralNetwork(layers, acti), "neuralNetwork.txt");
            MessageBox.Show("Success!");
        }
Пример #18
0
        public static void SpeedInt32Test()
        {
            Console.WriteLine("================ Int Test ============== \n\n");
            int count = 10000;

            var Fc  = new FullConnect <int>(100, 3, 1);
            var sig = new Sigmoid <int>();
            var net = new Network <int>(OptimiserType.LevenbergMarquardt);

            net.Add(Fc);
            net.Add(sig);
            var rnd = new Random();

            var sw = new Stopwatch();

            var           tInp = new Tensor4 <int>(1, 1, 100, 1, rnd);
            Tensor4 <int> outp = new Tensor4 <int>(1, 1, 1, 1);

            sw.Start();
            for (int j = 0; j < count; j++)
            {
                outp = net.Forward(tInp);
            }

            Console.WriteLine("Прямой проход слоя составляет: " + sw.ElapsedMilliseconds / (double)count + " мс");

            sw.Stop();

            Console.WriteLine("\n\n");

            for (int i = 0; i < outp.D; i++)
            {
                Console.WriteLine(outp[0, 0, i, 0]);
            }
        }
Пример #19
0
        public void DistanceTest()
        {
            Sigmoid       dense  = new Sigmoid(3.6, 1);
            SparseSigmoid target = new SparseSigmoid(3.6, 1);

            double[] sx = { 1, -0.555556, 2, +0.250000, 3, -0.864407, 4, -0.916667 };
            double[] sy = { 1, -0.666667, 2, -0.166667, 3, -0.864407, 4, -0.916667 };
            double[] sz = { 1, -0.944444, 3, -0.898305, 4, -0.916667 };

            double[] dx = { -0.555556, +0.250000, -0.864407, -0.916667 };
            double[] dy = { -0.666667, -0.166667, -0.864407, -0.916667 };
            double[] dz = { -0.944444, +0.000000, -0.898305, -0.916667 };

            double expected, actual;

            expected = dense.Distance(dx, dy);
            actual   = target.Distance(sx, sy);
            Assert.AreEqual(expected, actual);

            expected = dense.Distance(dx, dz);
            actual   = target.Distance(sx, sz);
            Assert.AreEqual(expected, actual);

            expected = dense.Distance(dy, dz);
            actual   = target.Distance(sy, sz);
            Assert.AreEqual(expected, actual);
        }
Пример #20
0
        public void NN_activation_function_sigmoid()
        {
            Sigmoid aFunc = new Sigmoid();
            double  value = aFunc.GetValue(3.0);

            Assert.IsTrue(SupportFunctions.DoubleCompare(value, 0.9526));
        }
Пример #21
0
        //IMPLEMENTATION OF SHIFT - page 9
        internal ShiftedAddressing(Unit shift, GatedAddressing gatedAddressing)
        {
            _shift          = shift;
            GatedAddressing = gatedAddressing;
            _gatedVector    = GatedAddressing.GatedVector;
            _cellCount      = _gatedVector.Length;

            ShiftedVector = UnitFactory.GetVector(_cellCount);
            double cellCountDbl = _cellCount;

            //Max shift is from range -1 to 1
            _shiftWeight = Sigmoid.GetValue(_shift.Value);
            double maxShift       = ((2 * _shiftWeight) - 1);
            double convolutionDbl = (maxShift + cellCountDbl) % cellCountDbl;

            _simj         = 1 - (convolutionDbl - Math.Floor(convolutionDbl));
            _oneMinusSimj = (1 - _simj);
            _convolution  = (int)convolutionDbl;

            for (int i = 0; i < _cellCount; i++)
            {
                int imj = (i + _convolution) % _cellCount;

                Unit vectorItem = ShiftedVector[i];

                vectorItem.Value = (_gatedVector[imj].Value * _simj) +
                                   (_gatedVector[(imj + 1) % _cellCount].Value * _oneMinusSimj);
                if (vectorItem.Value < 0 || double.IsNaN(vectorItem.Value))
                {
                    throw new Exception("Error - weight should not be smaller than zero or nan");
                }
            }
        }
Пример #22
0
 public bool Visit(Sigmoid sigmoid)
 {
     sigmoid.Arg.Parents.Add(sigmoid);
     sigmoid.Mid.Parents.Add(sigmoid);
     //sigmoid.Arg.Accept(this);
     //sigmoid.Mid.Accept(this);
     return(false);
 }
Пример #23
0
 public bool Visit(Sigmoid sigmoid)
 {
     sigmoid.Parents.Clear();
     sigmoid.Arg.Accept(this);
     sigmoid.Mid.Accept(this);
     UpdateInterval(sigmoid, 0, 1);
     return(true);
 }
Пример #24
0
    public float CalculateGradient(float?target = null)
    {
        if (target == null)
        {
            return(Gradient = OutputSynapses.Sum(x => x.OutputNeuton.Gradient * x.Weight) * Sigmoid.Derivative(Value));
        }

        return(Gradient = CalculateError(target.Value) * Sigmoid.Derivative(Value));
    }
        private (Vector diffTanhGate, Vector diffInputGate, Vector diffForgetGate, Vector diffOutputGate) GetDiffForGates(Vector diffForget, Vector diffOutput)
        {
            var diffTanhGate   = diffForget * InputLayerGateResultI * Tanh.DeriveFunc(TanhLayerGateResultG);
            var diffInputGate  = diffForget * TanhLayerGateResultG * Sigmoid.DeriveFunc(InputLayerGateResultI);
            var diffForgetGate = diffForget * ForgetFromPreviousLayer * Sigmoid.DeriveFunc(ForgetGateResultF);
            var diffOutputGate = diffOutput * Tanh.Func(Forget) * Sigmoid.DeriveFunc(OutputLayerGateResultO);

            return(diffTanhGate, diffInputGate, diffForgetGate, diffOutputGate);
        }
Пример #26
0
    public double CalculateGradient(double?target = null)
    {
        if (target == null)
        {
            return(Gradient = OutputSynapses.Sum(a => a.OutputNeuron.Gradient * a.Weight) * Sigmoid.Derivative(Value));
        }

        return(Gradient = CalculateError(target.Value) * Sigmoid.Derivative(Value));
    }
Пример #27
0
    public double ActivationFunction(double value)
    {
        Sigmoid sigm = (x) =>
        {
            double k = (double)System.Math.Exp(x);
            return(k / (1.0f + k));
        };

        return(sigm(value));
    }
 private void collectLearningdata()
 {
     using (var writer = File.AppendText(LearnignDataFileName))
     {
         var expectedOutput = Sigmoid.Output(DistanceToRightWall - DistanceToLeftWall);
         Debug.Log(expectedOutput);
         writer.WriteLine(string.Format("{0}{3}{1}{3}{2}", DistanceToLeftWall, DistanceToRightWall, expectedOutput, learningFileDelimiter));
         Debug.Log(DistanceToLeftWall + " " + DistanceToRightWall + " " + expectedOutput);
     }
 }
Пример #29
0
        /// =================================================
        /// <summary>
        /// Creates the neural network with the given configuration
        /// </summary>
        ///
        /// <returns></returns>
        public Cerebro Build()
        {
            if (this.inputNeurons <= 0)
            {
                throw new System.InvalidOperationException(
                          $"The input neuron count is invalid: {this.inputNeurons}"
                          );
            }

            if (this.layerConfigs.Count == 0)
            {
                throw new System.InvalidOperationException("The layer configuration is empty");
            }

            Layer[] layers          = new Layer[this.layerConfigs.Count];
            int     lastNeuronCount = this.inputNeurons;

            for (int i = 0; i < this.layerConfigs.Count; i++)
            {
                LayerConfig config = this.layerConfigs[i];

                IActivator activator;
                switch (config.type)
                {
                case LayerType.Sine:
                    activator = new Sine();
                    break;

                case LayerType.Tanh:
                    activator = new Tanh();
                    break;

                case LayerType.Sigmoid:
                default:
                    activator = new Sigmoid();
                    break;
                }

                layers[i]       = new Layer(lastNeuronCount, config.neuronCount, activator);
                lastNeuronCount = config.neuronCount;
            }

            Cerebro net = new Cerebro(layers);

            if (this.genome != null)
            {
                net.SetGenome(this.genome);
            }
            else
            {
                net.Initialize(this.weightsBiasAmplitude);
            }

            return(net);
        }
Пример #30
0
    public void Compute()
    {
        float sum = 0;

        for (int i = 0; i < InputNeurons.Count; i++)
        {
            sum += InputNeurons[i].OutputValue * Weights[i];
        }

        OutputValue = Sigmoid.Output(sum + Weights[Weights.Count - 1]);
    }
Пример #31
0
        /// <summary>
        /// Instantiates a new Logistic Regression object.
        /// </summary>
        /// <param name="numVariables">The number of input features to develop a hypothesis with.</param>
        /// <param name="learningRate">Learning rate modifier. Setting this too high can cause divergence.</param>
        /// <param name="threshold">Anything above this threshold will be classified as positive.</param>
        public LogisticRegression(int numVariables, double learningRate = 0.05, float threshold = 0.5f, float lambda = 0.0f)
        {
            Theta = new DenseVector(numVariables);
            LearningRate = learningRate;
            Lambda = lambda;

            var rand = new Random();

            for (int i = 0; i < Theta.Count; i++)
            {
                Theta[i] = rand.NextDouble();
            }

            Threshold = threshold;
            sigmoid = new Sigmoid();
        }