This class determines optimal values for multiple sigmas in a PNN kernel. This is done using a CJ (conjugate gradient) method. Some of the algorithms in this class are based on C++ code from: Advanced Algorithms for Neural Networks: A C++ Sourcebook by Timothy Masters John Wiley Sons Inc (Computers); April 3, 1995 ISBN: 0471105880
        /// <summary>
        /// 
        /// </summary>
        ///
        public override sealed void Iteration()
        {
            if (!_samplesLoaded)
            {
                _network.Samples = new BasicMLDataSet(_training);
                _samplesLoaded = true;
            }

            var globalMinimum = new GlobalMinimumSearch();
            var dermin = new DeriveMinimum();

            int k;

            if (_network.OutputMode == PNNOutputMode.Classification)
            {
                k = _network.OutputCount;
            }
            else
            {
                k = _network.OutputCount + 1;
            }

            _dsqr = new double[_network.InputCount];
            _v = new double[_network.InputCount*k];
            _w = new double[_network.InputCount*k];

            var x = new double[_network.InputCount];
            var bs = new double[_network.InputCount];
            var direc = new double[_network.InputCount];
            var g = new double[_network.InputCount];
            var h = new double[_network.InputCount];
            var dwk2 = new double[_network.InputCount];

            if (_network.Trained)
            {
                for (int i = 0; i < _network.InputCount; i++)
                {
                    x[i] = _network.Sigma[i];
                }
                globalMinimum.Y2 = 1.0e30d;
            }
            else
            {
                globalMinimum.FindBestRange(_sigmaLow, _sigmaHigh,
                                            _numSigmas, true, _maxError, this);

                for (int i = 0; i < _network.InputCount; i++)
                {
                    x[i] = globalMinimum.X2;
                }
            }

            double d = dermin.Calculate(32767, _maxError, 1.0e-8d,
                                        _minImprovement, this, _network.InputCount, x,
                                        globalMinimum.Y2, bs, direc, g, h, dwk2);
            globalMinimum.Y2 = d;

            for (int i = 0; i < _network.InputCount; i++)
            {
                _network.Sigma[i] = x[i];
            }

            _network.Error = Math.Abs(globalMinimum.Y2);
            _network.Trained = true; // Tell other routines net is trained

            return;
        }
Ejemplo n.º 2
0
        /// <summary>
        ///
        /// </summary>
        ///
        public override sealed void Iteration()
        {
            if (!_samplesLoaded)
            {
                _network.Samples = new BasicMLDataSet(_training);
                _samplesLoaded   = true;
            }

            var globalMinimum = new GlobalMinimumSearch();
            var dermin        = new DeriveMinimum();

            int k;

            if (_network.OutputMode == PNNOutputMode.Classification)
            {
                k = _network.OutputCount;
            }
            else
            {
                k = _network.OutputCount + 1;
            }

            _dsqr = new double[_network.InputCount];
            _v    = new double[_network.InputCount * k];
            _w    = new double[_network.InputCount * k];

            var x     = new double[_network.InputCount];
            var bs    = new double[_network.InputCount];
            var direc = new double[_network.InputCount];
            var g     = new double[_network.InputCount];
            var h     = new double[_network.InputCount];
            var dwk2  = new double[_network.InputCount];

            if (_network.Trained)
            {
                for (int i = 0; i < _network.InputCount; i++)
                {
                    x[i] = _network.Sigma[i];
                }
                globalMinimum.Y2 = 1.0e30d;
            }
            else
            {
                globalMinimum.FindBestRange(_sigmaLow, _sigmaHigh,
                                            _numSigmas, true, _maxError, this);

                for (int i = 0; i < _network.InputCount; i++)
                {
                    x[i] = globalMinimum.X2;
                }
            }

            double d = dermin.Calculate(32767, _maxError, 1.0e-8d,
                                        _minImprovement, this, _network.InputCount, x,
                                        globalMinimum.Y2, bs, direc, g, h, dwk2);

            globalMinimum.Y2 = d;

            for (int i = 0; i < _network.InputCount; i++)
            {
                _network.Sigma[i] = x[i];
            }

            _network.Error   = Math.Abs(globalMinimum.Y2);
            _network.Trained = true; // Tell other routines net is trained

            return;
        }