コード例 #1
0
 public NeuralNetwork(NeuralNetworkParameters parameters, IContinuousDistribution rng)
 {
     NetworkParameters = parameters;
     HiddenWeights     = CreateMatrix.Random <float>(NetworkParameters.HiddenLayerNeuronCount, NetworkParameters.InputCount, rng);
     HiddenBiases      = CreateVector.Random <float>(NetworkParameters.HiddenLayerNeuronCount, rng);
     OutputWeights     = CreateMatrix.Random <float>(NetworkParameters.OutputCount, NetworkParameters.HiddenLayerNeuronCount, rng);
     OutputBiases      = CreateVector.Random <float>(NetworkParameters.OutputCount, rng);
 }
コード例 #2
0
 public void Reset()
 {
     mBias = CreateVector.Random <double>(mBias.Count, new Normal(0.0, 1.0));
     if (mWeights != null)
     {
         mWeights = CreateMatrix.Random <double>(mWeights.RowCount, mWeights.ColumnCount, new Normal(0.0, 1.0 / Math.Sqrt(mWeights.ColumnCount)));
     }
 }
コード例 #3
0
ファイル: SimplePerceptron.cs プロジェクト: 2021SIA/TP3
 public SimplePerceptron(int N, double learningRate, Func <double, double> activationFunction, Func <double, double> activationFunctionDerivative, bool adaptiveLearningRate)
 {
     this.N = N;
     this.W = CreateVector.Random <double>(N + 1, new ContinuousUniform(-1d, 1d));
     this.ActivationFunction           = activationFunction;
     this.ActivationFunctionDerivative = activationFunctionDerivative;
     this.LearningRate         = learningRate;
     this.AdaptiveLearningRate = adaptiveLearningRate;
 }
コード例 #4
0
        public OjaNetwork(double learningRate, int epochs, List <Vector <double> > values)
        {
            this.epochs = epochs;

            var columns = CreateMatrix.DenseOfRowVectors(values).EnumerateColumns();
            var mean    = CreateVector.DenseOfEnumerable(columns.Select(Statistics.Mean));
            var std     = CreateVector.DenseOfEnumerable(columns.Select(Statistics.StandardDeviation));

            this.values    = values.Select(v => (v - mean) / std).ToList();
            this.variables = values[1].Count;
            this.W         = CreateVector.Random <double>(variables, new ContinuousUniform(-1, 1));

            this.LearningRate = learningRate;
        }
コード例 #5
0
        public KohonenNetwork(List <Vector <double> > values, int n, bool weightEntries)
        {
            this.values = Normalize(values);
            W           = new Vector <double> [n, n];
            N           = n;

            inputLength = this.values[0].Count;
            var indexes = MathNet.Numerics.Combinatorics.GenerateVariationWithRepetition(this.values.Count, n * n);
            var current = 0;

            for (int i = 0; i < n; i++)
            {
                for (int j = 0; j < n; j++)
                {
                    W[i, j] = !weightEntries?CreateVector.Random <double>(inputLength, new ContinuousUniform(0, 1)) : this.values[indexes[current++]];
                }
            }
        }
コード例 #6
0
ファイル: SimplePerceptron.cs プロジェクト: 2021SIA/TP3
        public void Learn(
            Vector <double>[] trainingInput,
            Vector <double>[] trainingOutput,
            Vector <double>[] testInput,
            Vector <double>[] testOutput,
            int batch,
            double minError,
            int epochs)
        {
            Contract.Requires(trainingInput.Length == trainingOutput.Length);
            Contract.Requires(testInput.Length == testOutput.Length);
            Contract.Requires(trainingInput[0].Count == N + 1);
            Contract.Requires(testInput[0].Count == N + 1);

            double[] desiredTrainingOutput = trainingOutput.Select(o => o.At(0)).ToArray();
            double[] desiredTestOutput     = trainingOutput.Select(o => o.At(0)).ToArray();

            //Agrego el valor 1 al principio del input.
            Vector <double>[] input = new Vector <double> [trainingInput.Length];
            for (int i = 0; i < input.Length; i++)
            {
                input[i] = Vector <double> .Build.DenseOfEnumerable(new double[] { 1 }.Concat(trainingInput[i]));
            }
            int             p = input.Length;
            Vector <double> w = CreateVector.Random <double>(N + 1, new ContinuousUniform(-1d, 1d));
            Vector <double> deltaW = null;
            double          error = 1, error_min = p * 2;
            Vector <double> w_min = w;

            for (int i = 0, n = 0; i < epochs && error_min > minError; i++, n++)
            {
                if (n > 100 * p)
                {
                    w = CreateVector.Random <double>(N + 1, new ContinuousUniform(-1d, 1d));
                    n = 0;
                }

                int[]  rand = Combinatorics.GeneratePermutation(input.Length);
                double lr   = this.AdaptiveLearningRate ? optimizing(w, input, desiredTrainingOutput, batch, rand) : LearningRate;
                int    j;
                for (j = 0; j < input.Length; j++)
                {
                    int             ix    = rand[j];
                    double          h     = input[ix] * w;
                    double          act   = ActivationFunction(h);
                    Vector <double> delta = lr * (desiredTrainingOutput[ix] - act) * input[ix] * ActivationFunctionDerivative(h);
                    deltaW = deltaW == null ? delta : deltaW + delta;
                    if (j % batch == 0)
                    {
                        w     += deltaW;
                        deltaW = null;
                        error  = CalculateError(input, desiredTrainingOutput, w);
                        if (error < error_min)
                        {
                            error_min = error;
                            w_min     = w;
                        }
                    }
                }
                if (j % batch != 0)
                {
                    w     += deltaW;
                    deltaW = null;
                    error  = CalculateError(input, desiredTrainingOutput, w);
                    if (error < error_min)
                    {
                        error_min = error;
                        w_min     = w;
                    }
                }
            }
            W = w_min;
        }
コード例 #7
0
 public Layer(int size, int sizeofPreviousLayer) : this(CreateVector.Random <double>(size, new Normal(0.0, 1.0)),
                                                        sizeofPreviousLayer > 0 ? CreateMatrix.Random <double>(size, sizeofPreviousLayer, new Normal(0.0, 1.0 / Math.Sqrt(sizeofPreviousLayer))) : null)
 {
 }