コード例 #1
0
    /// <summary>
    /// This procedure contains the user code. Input parameters are provided as regular arguments,
    /// Output parameters as ref arguments. You don't have to assign output parameters,
    /// they will have a default value.
    /// </summary>
    private void RunScript(List <Point3d> P, Mesh M, double step, bool reset, bool go, ref object Pos, ref object status, ref object Trails)
    {
        if (reset || GD == null)
        {
            GD = new GradientDescent(M, P);
        }

        if (go)
        {
            // update live variables


            // update System


            // expiring solution forces the component to update
            Component.ExpireSolution(true);
        }

        // extracts positions and trails

        Pos    = pts;
        status = pStat;
        Trails = trs;
    }
コード例 #2
0
        /// <summary>Generate Logistic Regression model based on a set of examples.</summary>
        /// <param name="x">The Matrix to process.</param>
        /// <param name="y">The Vector to process.</param>
        /// <returns>Model.</returns>
        public override IModel Generate(Matrix x, Vector y)
        {
            // create initial theta
            Matrix copy = x.Copy();

            copy = PreProcessing.FeatureDimensions.IncreaseDimensions(copy, PolynomialFeatures);

            // add intercept term
            copy = copy.Insert(Vector.Ones(copy.Rows), 0, VectorType.Col);

            Vector theta = Vector.Ones(copy.Cols);

            var run = GradientDescent.Run(theta, copy, y, this.MaxIterations, this.LearningRate, new Functions.CostFunctions.LogisticCostFunction(),
                                          this.Lambda, new Regularization());

            LogisticRegressionModel model = new LogisticRegressionModel()
            {
                Descriptor         = this.Descriptor,
                Theta              = run.Item2,
                LogisticFunction   = new Math.Functions.Logistic(),
                PolynomialFeatures = this.PolynomialFeatures
            };

            return(model);
        }
コード例 #3
0
        /// <summary>Generate Linear Regression model based on a set of examples.</summary>
        /// <param name="x">The Matrix to process.</param>
        /// <param name="y">The Vector to process.</param>
        /// <returns>Model.</returns>
        public override IModel Generate(Matrix x, Vector y)
        {
            // create initial theta
            Vector theta = Vector.Ones(x.Cols + 1);
            Matrix copy  = x.Copy();

            // normalise features
            for (int i = 0; i < copy.Cols; i++)
            {
                var j = FeatureNormalizer.FeatureScale(copy[i, VectorType.Col]);
                for (int k = 0; k < copy.Rows; k++)
                {
                    copy[k, i] = j[k];
                }
            }

            // add intercept term
            copy = copy.Insert(Vector.Ones(copy.Rows), 0, VectorType.Col);

            // run gradient descent
            var run = GradientDescent.Run(theta, copy, y, this.MaxIterations, this.LearningRate, new LinearCostFunction(), this.Lambda, new Regularization());

            // once converged create model and apply theta

            LinearRegressionModel model = new LinearRegressionModel(x.Mean(VectorType.Row), x.StdDev(VectorType.Row))
            {
                Descriptor = this.Descriptor,
                Theta      = run.Item2
            };

            return(model);
        }
コード例 #4
0
        protected override OptimizationAlgorithm ConstructOptimizationAlgorithm(IChannel ch)
        {
            Contracts.CheckValue(ch, nameof(ch));
            OptimizationAlgorithm optimizationAlgorithm;
            IGradientAdjuster     gradientWrapper = MakeGradientWrapper(ch);

            switch (Args.OptimizationAlgorithm)
            {
            case BoostedTreeArgs.OptimizationAlgorithmType.GradientDescent:
                optimizationAlgorithm = new GradientDescent(Ensemble, TrainSet, InitTrainScores, gradientWrapper);
                break;

            case BoostedTreeArgs.OptimizationAlgorithmType.AcceleratedGradientDescent:
                optimizationAlgorithm = new AcceleratedGradientDescent(Ensemble, TrainSet, InitTrainScores, gradientWrapper);
                break;

            case BoostedTreeArgs.OptimizationAlgorithmType.ConjugateGradientDescent:
                optimizationAlgorithm = new ConjugateGradientDescent(Ensemble, TrainSet, InitTrainScores, gradientWrapper);
                break;

            default:
                throw ch.Except("Unknown optimization algorithm '{0}'", Args.OptimizationAlgorithm);
            }

            optimizationAlgorithm.TreeLearner          = ConstructTreeLearner(ch);
            optimizationAlgorithm.ObjectiveFunction    = ConstructObjFunc(ch);
            optimizationAlgorithm.Smoothing            = Args.Smoothing;
            optimizationAlgorithm.DropoutRate          = Args.DropoutRate;
            optimizationAlgorithm.DropoutRng           = new Random(Args.RngSeed);
            optimizationAlgorithm.PreScoreUpdateEvent += PrintTestGraph;

            return(optimizationAlgorithm);
        }
コード例 #5
0
ファイル: GradientDescentTests.cs プロジェクト: gatapia/ml
        [Test] public void TestFromPoints()
        {
            var gd = new GradientDescent(t, new [] { 0.0, 0.0 }, Regression.CostFunction, Regression.Hypothesis);

            gd.Alpha     = 1e-5;
            gd.Precision = 1e-4;
            ValidateParameters(gd.Optimize());
        }
コード例 #6
0
        public void CanGetBiasesUpdate()
        {
            GradientDescent gd          = new GradientDescent(0.5, 1);
            NetworkVector   testVector  = new NetworkVector(new double[] { 1, 2, 3 });
            NetworkVector   result      = gd.BiasesUpdate(testVector);
            NetworkVector   resultCheck = new NetworkVector(new double[] { -0.5, -1.0, -1.5 });

            Assert.AreEqual(resultCheck, result);
        }
コード例 #7
0
        // constructor (public if we use it from RunScript)
        public Particle(GradientDescent GraDesc, Point3d pos)
        {
            this.GraDesc = GraDesc;
            this.pos     = pos;
            vel          = Vector3d.Zero;
            trail        = new Polyline();
            trail.Add(new Point3d(pos));

            alive = true;
        }
コード例 #8
0
        public void OptimizeTest()
        {
            var target   = new TensorOld(new double[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 }, 3, 3);
            var gradient = new TensorOld(new double[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 }, 3, 3);
            var expected = new TensorOld(new double[] { 0.9, 1.8, 2.7, 3.6, 4.5, 5.4, 6.3, 7.2, 8.1 }, 3, 3);
            var gd       = new GradientDescent(0.1);

            gd.Optimize(target, gradient);
            Assert.Equal(expected, target);
        }
コード例 #9
0
 public void CanMakeWithStepSize()
 {
     try
     {
         GradientDescent gd = new GradientDescent(0.5, 1);
     }
     catch (Exception e)
     {
         Assert.Fail(string.Format("GradientDescent constructor threw exception: {0}", e));
     }
 }
コード例 #10
0
        public void CanGetWeightsUpdate()
        {
            GradientDescent gd         = new GradientDescent(0.5, 1);
            WeightsMatrix   testMatrix = new WeightsMatrix(new double[, ] {
                { 1, 2, 3 }, { 2, 3, 4 }
            });
            WeightsMatrix result      = gd.WeightsUpdate(testMatrix);
            WeightsMatrix resultCheck = new WeightsMatrix(new double[, ] {
                { -0.5, -1.0, -1.5 }, { -1.0, -1.5, -2.0 }
            });

            Assert.AreEqual(resultCheck, result);
        }
コード例 #11
0
        public void TestGradientDescent()
        {
            Vector3[] vectors  = new Vector3[2];
            Vector3[] vectors2 = new Vector3[2];
            var       rand     = new Random();

            for (int i = 0; i < 2; i++)
            {
                vectors[i]  = new Vector3(rand.Next(10, 50), rand.Next(10, 50), rand.Next(10, 50));
                vectors2[i] = new Vector3(rand.Next(50, 100), rand.Next(50, 100), rand.Next(50, 100));
            }
            GradientDescent gradient = new GradientDescent(vectors, vectors2);
            //var values = gradient.calculateGradientDescent(0.0001f,1000000);
            //Assert.IsNotNull(values);
        }
コード例 #12
0
        public void CanUpdate()
        {
            AdaptationStrategy strategy = new GradientDescent(1.0, 1);

            wc_2.Run(vector_3);
            wc_2.BackPropagate(vector_2);
            wc_2.Update(strategy);

            NetworkVector biasesCheck  = new NetworkVector(new double[] { 0, 0 });
            WeightsMatrix weightsCheck = new WeightsMatrix(new double[, ] {
                { 1 - (11 * 111), 2 - (11 * 112), 3 - (11 * 113) }, { 2 - (12 * 111), 3 - (12 * 112), 4 - (12 * 113) }
            });

            Assert.AreEqual(biasesCheck, wc_2.Biases);
            Assert.AreEqual(weightsCheck, wc_2.Weights);
        }
コード例 #13
0
        public void TestGradientDescent()
        {
            Func <double, double> function = x => x * x;
            double expectedPointValue      = 2;
            double epsilon      = 0.0001;
            var    minimumPoint = new GradientDescent().Calculate(new List <double>()
            {
                0, 0, 0, 0, 0
            }, TestFunction);

            Assert.Equal(minimumPoint.Count, 5);
            Assert.InRange(minimumPoint[0] - expectedPointValue, -epsilon, epsilon);
            Assert.InRange(minimumPoint[1] - expectedPointValue, -epsilon, epsilon);
            Assert.InRange(minimumPoint[2] - expectedPointValue, -epsilon, epsilon);
            Assert.InRange(minimumPoint[3] - expectedPointValue, -epsilon, epsilon);
            Assert.InRange(minimumPoint[4] - expectedPointValue, -epsilon, epsilon);
        }
コード例 #14
0
        public void GetResult_With_Success_Inside_Anchor_Bounds_Three_Anchors()
        {
            var gradientDescent = new GradientDescent();

            gradientDescent.SetAnchors(_anchors);
            IDistance[] node =
            {
                new DistanceBase(1,  41.4),
                new DistanceBase(3, 37.58),
                new DistanceBase(4, 56.79)
            };
            ICoordinate result = gradientDescent.GetResult(0, node);

            Assert.That(result.X, Is.EqualTo(26).Within(1.5));
            Assert.That(result.Y, Is.EqualTo(0).Within(3));
            Assert.That(result.Z, Is.EqualTo(34).Within(1.5));
        }
コード例 #15
0
ファイル: CNNTest.cs プロジェクト: pxqr/nanon
        public static void Test(string trainImagesPath, string trainLabelsPath, string testImagesPath, string testLabelsPath)
        {
            LoadDataSet(trainImagesPath, trainLabelsPath, testImagesPath, testLabelsPath);
            GC.Collect();

            var network   = NetworkBuilder.CreateMnist(trainDataSet);

            Console.WriteLine("Initial");
            Test(network, testDataSet);
            Console.WriteLine("StartLearning");

            var cost = Double.PositiveInfinity;
            var timer = new Stopwatch();
            timer.Start();

            var optimizer = new GradientDescent<Matrix, Vector>(3, 0.0007, 1,
                x => {
                    timer.Stop();
                    Console.Write("Ignored {0}% of samples ", 100 * NeuralNetwork<Matrix>.counter / (double)trainDataSet.Inputs.Count());
                    Console.WriteLine("and gradient descent step time: {0} ms", timer.ElapsedMilliseconds);
                    NeuralNetwork<Matrix>.counter = 0;
                    Console.Write("trainSet: ");
                    cost = Test(x, trainDataSet, cost);
                    Console.Write(" || ");
                    Console.Write("testSet:  ");
                    Test(x, testDataSet);
                    Console.WriteLine();
                    timer.Reset();
                    timer.Start();
                });

            var trainer = new Trainer<Matrix, Vector>(optimizer);

            for (var i = 0; i < 10; ++i)
            {
                Console.WriteLine("Generation {0}", i);
                trainer.Train(network, trainDataSet.Set);

                optimizer.IterationCount  += 1;
                optimizer.InitialStepSize *= 2;
            }

            Console.WriteLine("EndLearning");
            Test(network, testDataSet);
        }
コード例 #16
0
        public void GetResult_With_Success_Outside_Anchor_Bounds_Four_Anchors()
        {
            var gradientDescent = new GradientDescent();

            gradientDescent.SetAnchors(_anchors);
            IDistance[] node =
            {
                new DistanceBase(1, 101.11),
                new DistanceBase(3,  89.14),
                new DistanceBase(4,  48.25),
                new DistanceBase(2,   7.07),
            };
            ICoordinate result = gradientDescent.GetResult(0, node);

            Assert.That(result.X, Is.EqualTo(44).Within(4));
            Assert.That(result.Y, Is.EqualTo(0).Within(3));
            Assert.That(result.Z, Is.EqualTo(85).Within(4));
        }
コード例 #17
0
        private static void ExponentalGrowth()
        {
            IActivationFunction activationFunction = new SimpleActivationFunction();
            decimal             alpha = 0.01m;

            GradientDescent gradientDescent = new GradientDescent(activationFunction, alpha);

            decimal[][] rooms = new decimal[][] { new decimal[] { 0 }, new decimal[] { 1 },
                                                  new decimal[] { 2 }, new decimal[] { 3 }, new decimal[] { 4 }, new decimal[] { 5 },
                                                  new decimal[] { 6 }, new decimal[] { 7 }, new decimal[] { 8 }, new decimal[] { 9 } };

            decimal[] prices = new decimal[] { 0, 100, 200, 300, 400, 500, 600, 700, 800, 900 };

            var thehas = gradientDescent.Train(rooms, prices);

            var sixRoomPrice = activationFunction.Calculate(new decimal[] { 6 }, thehas);

            Console.WriteLine(sixRoomPrice);
        }
コード例 #18
0
    /// <summary>
    /// This procedure contains the user code. Input parameters are provided as regular arguments,
    /// Output parameters as ref arguments. You don't have to assign output parameters,
    /// they will have a default value.
    /// </summary>
    private void RunScript(List <Point3d> P, Mesh M, double step, bool reset, bool go, ref object Pos, ref object status, ref object Trails)
    {
        if (reset || GD == null || P.Count != GD.parts.Count)
        {
            GD = new GradientDescent(M, P);
        }

        if (go)
        {
            // update live variables
            GD.MaxSpeed = step;


            // update System
            GD.Update();


            // expiring solution forces the component to update
            Component.ExpireSolution(true);
        }

        // extracts positions and trails
        pts   = new Point3d[P.Count];
        trs   = new Polyline[P.Count];
        pStat = new bool[P.Count];
        for (int i = 0; i < GD.parts.Count; i++)
        {
            pts[i] = GD.parts[i].pos;
            if (GD.parts[i].trail.IsValid)
            {
                trs[i] = GD.parts[i].trail;
            }
            else
            {
                trs[i] = null;
            }
            pStat[i] = GD.parts[i].alive;
        }

        Pos    = pts;
        status = pStat;
        Trails = trs;
    }
コード例 #19
0
        public static IServiceCollection AddLocationCalculatorRouter(this IServiceCollection services)
        {
            var locationBuilder = new LocationCalculatorBuilder <GradientDescent, Anchor>();
            var anchors         = new IAnchor[]
            {
                new Anchor(9, 60, 27, 512, 0),
                new Anchor(9, 7, 27, 513, 0),
                new Anchor(35, 96, 27, 514, 0),
                new Anchor(99, 89, 27, 515, 0),
                new Anchor(145, 66, 27, 516, 0),
                new Anchor(160, 9, 27, 517, 0),
                new Anchor(97, 8, 27, 518, 0),
                new Anchor(56, 9, 27, 519, 0),
            };
            // GradientDescent gradientDescent = locationBuilder.WithAnchors(
            //         new Anchor(53.8, 73.4, 92, 208),
            //         new Anchor(905.8, 0, 1.5, 211),
            //         new Anchor(0, 458.5, 76.4, 204),
            //         new Anchor(925.3, 398.3, 92, 206))
            //     .Build();
            GradientDescent gradientDescent = locationBuilder.WithAnchors(anchors).Build();

            var particleFilterBuilder = new LocationCalculatorBuilder <Location.ParticleFilter.ParticleFilter, Anchor>(new Location.ParticleFilter.ParticleFilter(3500, 30));
            var particleFilter        = particleFilterBuilder.WithAnchors(anchors).Build();

            var particleFilter3DBuilder = new LocationCalculatorBuilder <ParticleFilter3D, Anchor>(new ParticleFilter3D(3500, 30));
            var particleFilter3D        = particleFilter3DBuilder.WithAnchors(anchors).Build();

            var particleAreaFilterBuilder = new LocationCalculatorBuilder <ParticleAreaFilter, Anchor>(new ParticleAreaFilter(3500, 30));
            var particleAreaFilter        = particleAreaFilterBuilder.WithAnchors(anchors).Build();

            var comexBuilder = new LocationCalculatorBuilder <ComexCalculator, Anchor>();
            var comex        = comexBuilder.WithAnchors(anchors).Build();

            services.AddLocationCalculatorRouter(builder =>
            {
                builder.AddLocationCalculator <MessageType4>(particleFilter);
            });
            return(services);
        }
コード例 #20
0
ファイル: SNNTest.cs プロジェクト: pxqr/nanon
        public static void Test(string trainImagesPath, string trainLabelsPath, string testImagesPath, string testLabelsPath)
        {
            var dataSet   = LoadDataSet(trainImagesPath, trainLabelsPath, testImagesPath, testLabelsPath);
            GC.Collect();

            var network   = NetworkBuilder.Create(dataSet
                            , new Tanh()
                            , new List<int> { 50 }
                            );

            var cost = Double.PositiveInfinity;
            var optimizer = new GradientDescent<Vector, Vector>(5, 0.02, 2,
                x => {
                    Console.Write("trainSet: ");
                    cost = Test(x, dataSet, cost);
                    Console.Write(" || ");
                    Console.Write("testSet:  ");
                    Test(x, testDataSet);
                    Console.WriteLine();
                });

            var trainer   = new Trainer<Vector, Vector>(optimizer);

            Console.WriteLine("Initial");
            Test(network, dataSet);
            Console.WriteLine("StartLearning");

            for (var i = 0; i < 3; ++i)
            {
                Console.WriteLine("next generation");
                trainer.Train(network, dataSet.Set);
                optimizer.IterationCount += 1;
                optimizer.InitialStepSize *= 2;
            }

            Console.WriteLine("EndLearning");
            Test(network, testDataSet);
        }
コード例 #21
0
        public void GradientDescent_Line()
        {
            var line = new Line(Point3d.WorldOrigin, new Point3d(1, 1, 0));
            var gd   = new GradientDescent(GradientDescentOptions.Default)
            {
                Options = { MaxIterations = 1 }
            };
            var input = 1;

            gd.Options.MaxIterations = 100;
            gd.Options.LearningRate  = 100;

            gd.Minimize(
                values => line.PointAt(values[0]).Y,
                new List <double> {
                input
            }
                );
            var err     = gd.Result.Error <= gd.Options.ErrorThreshold;
            var value   = gd.Result.Values[0] <= 0.01;
            var gLength = gd.Result.GradientLength <= gd.Options.Limit;

            Assert.True(err || value || gLength);
        }
コード例 #22
0
        public void CanUpdateBatch()
        {
            AdaptationStrategy strategy = new GradientDescent(1.0, 1);

            VectorBatch result = wc_2.Run(input_batch);

            wc_2.BackPropagate(gradient_batch);
            VectorBatch inputGradient = wc_2.InputGradient(gradient_batch);

            wc_2.Update(strategy);


            NetworkVector biasesCheck  = new NetworkVector(new double[] { 8, 7 });
            WeightsMatrix weightsCheck = new WeightsMatrix(new double[, ] {
                { -4, -6, -8 }, { -6, -10, -14 }
            });

            Assert.AreEqual(biasesCheck, wc_2.Biases);
            Assert.AreEqual(weightsCheck, wc_2.Weights);
            for (int i = 0; i < inputGradient.Count; i++)
            {
                Assert.AreEqual(inputgradient_check[i], inputGradient[i]);
            }
        }
コード例 #23
0
        public void MainFunctionToRun()
        {
            var             values2         = FileToArray.Converter(@"C:\Users\emiol\Downloads\TestPir.txt");
            var             values          = FileToArray.Converter(@"C:\Users\emiol\Downloads\TestPir2.txt");
            var             resultPath      = @"C:\Users\emiol\Desktop\2nd year Assignment\Neuro Computing\Neuro Computing\TestNeuroComputing\Result.txt";
            RelateTwoArrays relateTwoArrays = new RelateTwoArrays();

            relateTwoArrays.RelateTwoTuples(values.ToList(), values2.ToList());
            GradientDescent gradientDescent = new GradientDescent(relateTwoArrays.firstVector.ToArray(), relateTwoArrays.secondVector.ToArray());

            gradientDescent.calculateGradientDescent(0.0000001f, 300, 100000);
            var result = gradientDescent.result;

            using (StreamWriter writer = new StreamWriter(resultPath))
            {
                for (int i = 0; i < result.Count; i++)
                {
                    string value = (i + 1) + ". " + result.ElementAt(i).Item1 + " (" + result.ElementAt(i).Item2.alpha + "; " + result.ElementAt(i).Item2.beta + "; " +
                                   result.ElementAt(i).Item2.gamma + "; " + result.ElementAt(i).Item2.deltaX + "; "
                                   + result.ElementAt(i).Item2.deltaY + "; " + result.ElementAt(i).Item2.deltaZ + ")";
                    writer.WriteLine(value);
                }
            }
        }
コード例 #24
0
        // fields


        // constructor (public if we use it from RunScript)
        public Particle(GradientDescent GraDesc, Point3d pos)
        {
        }
コード例 #25
0
        public MainWindowModel()
        {
            model = new PlotModel()
            {
                Title = "Ejemplo"
            };
            ScatterSeries series = new ScatterSeries();

            double maxX = 0;

            StreamReader sr = new StreamReader("data.txt");

            string[] initParams = sr.ReadLine().Split(' ');
            int      m          = int.Parse(initParams[0]);
            int      n          = int.Parse(initParams[1]);

            float[][] input = new float[m][];
            for (int i = 0; i < m; i++)
            {
                input[i] = new float[n];
            }

            float[] output = new float[m];

            for (int i = 0; i < m; i++)
            {
                string[] xy = sr.ReadLine().Split(' ');

                double x = double.Parse(xy[0]);
                double y = double.Parse(xy[1]);

                series.Points.Add(new ScatterPoint(x, y));
                output[i] = (float)y;
                for (int j = 0; j < n; j++)
                {
                    input[i][j] = (float)Math.Pow(x, j);
                }
                if (x > maxX)
                {
                    maxX = x;
                }
            }
            sr.Close();

            float[][] outputParam = new float[m][];
            for (int i = 0; i < m; i++)
            {
                outputParam[i] = new float[] { output[i] };
            }

            model.Series.Add(series);

            float[] theta = new float[n];

            //TODO: Calcular hipotesis pambi
            GradientDescent.Start(RegressionGradient, RegressionCost, input, outputParam, ref theta, .005f, 0, 5000);
            //
            int steps = 50;

            LineSeries hypothesis = new LineSeries();

            for (int i = 0; i <= steps; i++)
            {
                double[] x = new double[n];

                for (int j = 0; j < n; j++)
                {
                    x[j] = Math.Pow((i * maxX) / steps, j);
                }

                double y = 0;
                for (int j = 0; j < n; j++)
                {
                    y += x[j] * theta[j];
                }
                hypothesis.Points.Add(new DataPoint(x[1], y));
            }

            model.Series.Add(hypothesis);
        }
コード例 #26
0
ファイル: Form1.cs プロジェクト: chriswblake/Optimization
        public Form1()
        {
            InitializeComponent();

            //Main objective function
            Func <DV, D> fOBJ = delegate(DV abx)
            {
                // y = a*ln(x) + b
                D a = abx[0];
                D b = abx[1];
                D x = abx[2];

                //Calculate result
                D r = a * AD.Log(x) + b;
                //D r = a * x*x+ b;
                return(r);
            };

            //Original Dataset Variables
            const double aOrig = 3.0;
            const double bOrig = 8.0;

            double[] xOrig = new double[100];
            double[] yOrig = new double[100];

            #region Calculate Original data
            //Calculate original dataset
            for (int x = 0; x < 100; x++)
            {
                xOrig[x] = x + 1;
                DV param = new DV(new D[] { aOrig, bOrig, xOrig[x] });
                yOrig[x] = fOBJ(param);
            }

            //Create series
            Series origSeries = new Series("Original")
            {
                ChartType       = SeriesChartType.Line,
                Color           = Color.Black,
                BorderDashStyle = ChartDashStyle.Solid,
                BorderWidth     = 2,
                ChartArea       = chartResults.ChartAreas[0].Name,
                LegendText      = "Original a=" + aOrig.ToString("F3") + " b=" + bOrig.ToString("F3")
            };
            for (int i = 0; i < xOrig.Length; i++)
            {
                origSeries.Points.AddXY(xOrig[i], yOrig[i]);
            }
            #endregion

            #region Calculate random error version
            double[] yNoisy = new double[100];
            Random   rand   = new Random();

            //Noisy version
            for (int i = 0; i < xOrig.Length; i++)
            {
                double percentModifyA = 0.98 + (1.02 - 0.98) * rand.NextDouble();
                double percentModifyB = 0.98 + (1.02 - 0.98) * rand.NextDouble();
                double percentModifyY = 1.0; // 0.98 + (1.02 - 0.98) * rand.NextDouble();

                DV param = new DV(new D[] { aOrig *percentModifyA, bOrig *percentModifyB, xOrig[i] });
                yNoisy[i] = fOBJ(param) * percentModifyY;
            }

            //Create series
            Series noisySeries = new Series("Noisy")
            {
                ChartType   = SeriesChartType.Point,
                Color       = Color.DarkGray,
                MarkerStyle = MarkerStyle.Star4,
                MarkerSize  = 7,
                BorderWidth = 0,
                ChartArea   = chartResults.ChartAreas[0].Name
            };
            for (int i = 0; i < xOrig.Length; i++)
            {
                noisySeries.Points.AddXY(xOrig[i], yNoisy[i]);
            }

            #endregion

            #region Calculate a and b using error function and gradient descent
            //Error equation
            Func <DV, D> fMSE = delegate(DV ab)
            {
                //Get parameters
                D a = ab[0];
                D b = ab[1];

                //Compute squared error for each point
                D errorSum = 0;
                for (int i = 0; i < xOrig.Length; i++)
                {
                    //Compute object function value
                    DV param = new DV(new D[] { a, b, xOrig[i] });
                    D  yCalc = fOBJ(param);

                    //Check for error
                    if (double.IsNaN(yCalc))
                    {
                        continue;
                    }

                    //Compute error between noisy version and calculated version
                    D err = AD.Pow(yNoisy[i] - yCalc, 2);
                    errorSum += err;
                }

                //Compute least square
                D mse = AD.Pow(errorSum, 0.5);

                //return results
                return(mse);
            };

            //Calculate optimization for a and b
            DV     startPoint = new DV(new D[] { 10, 5 });
            int    calcsF;
            int    calcsGradient;
            DV     result      = GradientDescent.FirstOrder_OneDimensionalMethod(fMSE, startPoint, 0.01, out calcsF, out calcsGradient);//, out calcsHessian);
            double aFit        = result[0];
            double bFit        = result[1];
            DV     paramMseFit = new DV(new D[] { aFit, bFit });
            double mseFit      = fMSE(paramMseFit);

            //Create series
            Series fitSeries = new Series("Fit")
            {
                ChartType   = SeriesChartType.Point,
                Color       = Color.Red,
                MarkerSize  = 3,
                MarkerStyle = MarkerStyle.Circle,
                ChartArea   = chartResults.ChartAreas[0].Name,
                LegendText  = "Fit a=" + aFit.ToString("F3") + " b=" + bFit.ToString("F3") + " MSE=" + mseFit.ToString("F3")
            };
            for (int i = 0; i < xOrig.Length; i++)
            {
                DV param = new DV(new D[] { aFit, bFit, xOrig[i] });
                fitSeries.Points.AddXY(xOrig[i], fOBJ(param));
            }
            #endregion

            //Add series to chart
            chartResults.Series.Clear();
            chartResults.Series.Add(origSeries);
            chartResults.Series.Add(noisySeries);
            chartResults.Series.Add(fitSeries);
        }
コード例 #27
0
ファイル: Network.cs プロジェクト: xuan2261/XNet
        public void InitNetwork(ECostType costType, CostSettings costSettings, EOptimizerType optimizerType, OptimizerSettings optimizerSettings)
        {
            Utility.Dims InShape;
            Utility.Dims OutShape;
            Utility.Dims WShape;

            for (int i = 1; i < Layers.Count; i++)
            {
                Data.Data["a" + i.ToString()] = new Matrix(Layers[i].NCount, 1);
                InShape = new Utility.Dims(Layers[i].NCount, 1);

                Data.Data["b" + i.ToString()] = Matrix.RandomMatrix(Layers[i].NCount, 1, 1, EDistrubution.Gaussian);

                OutShape = new Utility.Dims(Layers[i].NCount, 1);

                Data.Data["W" + i.ToString()] = Matrix.RandomMatrix(Layers[i - 1].NCount, Layers[i].NCount, 1, EDistrubution.Gaussian);
                WShape = new Utility.Dims(Layers[i - 1].NCount, Layers[i].NCount);

                Layers[i].SetSettings(new LayerSettings(InShape, OutShape, WShape));
            }

            Data.Data["a0"] = new Matrix(Layers[0].NCount, 1);
            InShape         = new Utility.Dims(Layers[0].NCount, 1);

            Data.Data["b0"] = new Matrix(Layers[0].NCount, 1);
            OutShape        = new Utility.Dims(Layers[0].NCount, 1);

            Data.Data["W0"] = new Matrix(Layers[0].NCount * Layers[1].NCount, Layers[1].NCount);
            WShape          = new Utility.Dims(Layers[0].NCount * Layers[1].NCount, Layers[1].NCount);

            Layers[0].SetSettings(new LayerSettings(InShape, OutShape, WShape));

            switch (costType)
            {
            case ECostType.Invalid:
                throw new ArgumentException("Invalid Cost Function Selected!");

            case ECostType.CrossEntropyCost:
                CostFunction = new CrossEntropyCost((CrossEntropyCostSettings)costSettings);
                break;

            case ECostType.ExponentionalCost:
                CostFunction = new ExponentionalCost((ExponentionalCostSettings)costSettings);
                break;

            case ECostType.GeneralizedKullbackLeiblerDivergence:
                CostFunction = new GeneralizedKullbackLeiblerDivergence((GeneralizedKullbackLeiblerDivergenceSettings)costSettings);
                break;

            case ECostType.HellingerDistance:
                CostFunction = new HellingerDistance((HellingerDistanceSettings)costSettings);
                break;

            case ECostType.ItakuraSaitoDistance:
                CostFunction = new ItakuraSaitoDistance((ItakuraSaitoDistanceSettings)costSettings);
                break;

            case ECostType.KullbackLeiblerDivergence:
                CostFunction = new KullbackLeiblerDivergence((KullbackLeiblerDivergenceSettings)costSettings);
                break;

            case ECostType.QuadraticCost:
                CostFunction = new QuadraticCost((QuadraticCostSettings)costSettings);
                break;

            default:
                throw new ArgumentException("Invalid Cost Function Selected!");
            }

            switch (optimizerType)
            {
            case EOptimizerType.Invalid:
                throw new ArgumentException("Invalid Optimizer Function Selected!");

            case EOptimizerType.AdaDelta:
                OptimizerFunction = new AdaDelta((AdaDeltaSettings)optimizerSettings);
                break;

            case EOptimizerType.AdaGrad:
                OptimizerFunction = new AdaGrad((AdaGradSettings)optimizerSettings);
                break;

            case EOptimizerType.Adam:
                OptimizerFunction = new Adam((AdamSettings)optimizerSettings);
                break;

            case EOptimizerType.Adamax:
                OptimizerFunction = new Adamax((AdamaxSettings)optimizerSettings);
                break;

            case EOptimizerType.GradientDescent:
                OptimizerFunction = new GradientDescent((GradientDescentSettings)optimizerSettings);
                break;

            case EOptimizerType.Momentum:
                OptimizerFunction = new Momentum((MomentumSettings)optimizerSettings);
                break;

            case EOptimizerType.Nadam:
                OptimizerFunction = new Nadam((NadamSettings)optimizerSettings);
                break;

            case EOptimizerType.NesterovMomentum:
                OptimizerFunction = new NesterovMomentum((NesterovMomentumSettings)optimizerSettings);
                break;

            case EOptimizerType.RMSProp:
                OptimizerFunction = new RMSProp((RMSPropSettings)optimizerSettings);
                break;

            default:
                throw new ArgumentException("Invalid Optimizer Function Selected!");
            }
        }
コード例 #28
0
        public static void Test_Compute()
        {
            List <string> movie_titles = LoadMovies();
            int           num_movies   = movie_titles.Count;

            // Step 1: create my ratings with missing entries
            double[] my_ratings   = new double[num_movies];
            int[]    my_ratings_r = new int[num_movies];
            for (int i = 0; i < num_movies; ++i)
            {
                my_ratings[i] = 0;
            }

            my_ratings[1]   = 4;
            my_ratings[98]  = 2;
            my_ratings[7]   = 3;
            my_ratings[12]  = 5;
            my_ratings[54]  = 4;
            my_ratings[64]  = 5;
            my_ratings[66]  = 3;
            my_ratings[69]  = 5;
            my_ratings[183] = 4;
            my_ratings[226] = 5;
            my_ratings[355] = 5;

            for (int i = 0; i < num_movies; ++i)
            {
                my_ratings_r[i] = my_ratings[i] > 0 ? 1 : 0;
            }

            // Step 2: load the current ratings of all users, i.e., Y and R
            List <List <double> > Y = DblDataTableUtil.LoadDataSet("Y.txt");
            List <List <int> >    R = IntDataTableUtil.LoadDataSet("R.txt");

            int num_users;

            DblDataTableUtil.GetSize(Y, out num_movies, out num_users);


            // Step 3: insert my ratings into the existing Y and R (as the first column)
            num_users++;
            List <RatedItem> records = new List <RatedItem>();

            for (int i = 0; i < num_movies; ++i)
            {
                double[] rec_Y = new double[num_users];
                bool[]   rec_R = new bool[num_users];
                for (int j = 0; j < num_users; ++j)
                {
                    if (j == 0)
                    {
                        rec_Y[j] = my_ratings[i];
                        rec_R[j] = my_ratings_r[i] == 1;
                    }
                    else
                    {
                        rec_Y[j] = Y[i][j - 1];
                        rec_R[j] = R[i][j - 1] == 1;
                    }
                }
                RatedItem rec = new RatedItem(null, rec_Y, rec_R);
                records.Add(rec);
            }

            int num_features = 10;

            double lambda = 10;
            CollaborativeFilteringRS <RatedItem> algorithm = new CollaborativeFilteringRS <RatedItem>();

            algorithm.Stepped += (s, step) =>
            {
                Console.WriteLine("#{0}: {1}", step, s.Cost);
            };
            algorithm.RegularizationLambda    = lambda;
            algorithm.MaxLocalSearchIteration = 100;
            GradientDescent local_search = algorithm.LocalSearch as GradientDescent;

            local_search.Alpha = 0.005;

            double[] Ymean;
            algorithm.DoMeanNormalization(records, out Ymean);

            algorithm.Compute(records, num_features);

            algorithm.UndoMeanNormalization(records, Ymean);

            int userId = 0;
            int topK   = 10;
            List <RatedItem> highest_ranks = algorithm.SelectHigestRanked(userId, records, topK);

            for (int i = 0; i < highest_ranks.Count; ++i)
            {
                RatedItem rec = highest_ranks[i];
                Console.WriteLine("#{0}: ({1}) {2}", i + 1, rec.UserRanks[0], movie_titles[rec.ItemIndex]);
            }
        }
コード例 #29
0
        //>>>>Here RUN Gradient Descent
        private void GradientDescentOptimization()
        {
            IsThereDataAvailable = false;
            double[] InitialParameters = new double[(VLoopModes.Count - 1) * 3];
            InitialParameters = GettingInitialValues();

            double[] LearningRate = new double[InitialParameters.Length];

            for (int i = 0; i < InitialParameters.Length / 3; i++)
            {
                LearningRate[0 + i * 3] = TryToDouble(textBoxLRateFreq.Text);
                LearningRate[1 + i * 3] = TryToDouble(textBoxLRateZeta.Text);
                LearningRate[2 + i * 3] = TryToDouble(textBoxLRateMass.Text);
            }

            GD = new GradientDescent(
                InitialParameters,
                LearningRate,
                (int)TryToDouble(textBoxIterations.Text)
                );

            GD.GD_Settings.Delta = TryToDouble(textBoxDelta.Text);
            GD.GD_Settings.Error = new Range {
                MaxValue = 0.0001, MinValue = 0
            };
            GD.Solve(CostFucntion);

            #region Writting Results
            textBoxOPFreq.Clear();
            textBoxOPZeta.Clear();
            textBoxOPMass.Clear();
            for (int i = 0; i < GD.OPTResult.Parameters.Length / 3; i++)
            {
                textBoxOPFreq.Text = textBoxOPFreq.Text + "   " + GD.OPTResult.Parameters[0 + i * 3].ToString("00.000");
                textBoxOPZeta.Text = textBoxOPZeta.Text + "   " + GD.OPTResult.Parameters[1 + i * 3].ToString("0.000");
                textBoxOPMass.Text = textBoxOPMass.Text + "   " + GD.OPTResult.Parameters[2 + i * 3].ToString("0.000");
            }
            textBoxOPError.Text = GD.OPTResult.target[0].ToString();
            #endregion

            IsThereDataAvailable = true;
            PlottingGAData();

            newVLoopModes = new List <Mode>();
            newVLoopModes.Add(new Mode
            {
                Freq = VLoopModes[0].Freq,
                Zeta = VLoopModes[0].Zeta,
                Mass = VLoopModes[0].Mass
            });

            for (int i = 0; i < InitialParameters.Length / 3; i++)
            {
                Mode mode = new Mode();
                mode.Freq = GD.OPTResult.Parameters[0 + i * 3];
                mode.Zeta = GD.OPTResult.Parameters[1 + i * 3];
                mode.Mass = GD.OPTResult.Parameters[2 + i * 3];
                newVLoopModes.Add(mode);
            }


            FRF[] Struct_sim = new FRF[FormMain.Struct_ref.Length];
            for (int i = 0; i < FormMain.Struct_ref.Length; i++)
            {
                Complex Res = Tool.GetStructureResponse(FormMain.Struct_ref[i].Freq, newVLoopModes, "Velocity");
                Struct_sim[i] = new FRF(FormMain.Struct_ref[i].Freq, Res);
            }

            FormMain.DrawResult(Struct_sim);
        }
コード例 #30
0
        public void TrainNetwork()
        {
            Console.WriteLine("Training Network");

            SampleSet samples = GenerateSamples(category.Compositions);

            ann = RNN();

            ann.rebuffer(samples.maxSequenceLength());
            ann.initializeWeights(rnd);

            GradientDescent trainer = new GradientDescent();
            trainer.setNet(ann);
            trainer.setRnd(rnd);
            trainer.setPermute(false);

            trainer.setTrainingSet(samples);

            trainer.setLearningRate(learningrate);
            trainer.setMomentum(momentum);
            trainer.setEpochs(epochs);

            trainer.train();

            Save();
        }
コード例 #31
0
 public NeuralNetwork UseGradientDescent(double learningRate)
 {
     Optimizer = new GradientDescent(learningRate);
     return(this);
 }