Exemplo n.º 1
0
    /// <summary>
    /// Processes the spectra in matrix xMatrix for prediction.
    /// </summary>
    /// <param name="xMatrix">The matrix of spectra. Each spectrum is a row of the matrix.</param>
    /// <param name="xMean">Not used.</param>
    /// <param name="xScale">Not used.</param>
    /// <param name="regionstart">Starting index of the region to process.</param>
    /// <param name="regionend">End index of the region to process.</param>
    public void ProcessForPrediction(IMatrix xMatrix, IROVector xMean, IROVector xScale, int regionstart, int regionend)
    {
      int regionlength = regionend - regionstart;

      IVector helpervector = VectorMath.ToVector(new double[regionlength]);
      for(int n=0;n<xMatrix.Rows;n++)
      {
        IVector vector = MatrixMath.RowToVector(xMatrix,n,regionstart,regionlength);
        _filter.Apply(vector,helpervector);
        VectorMath.Copy(helpervector,vector);
      }
    }
Exemplo n.º 2
0
        /// <summary>
        /// Processes the spectra in matrix xMatrix.
        /// </summary>
        /// <param name="xMatrix">The matrix of spectra. Each spectrum is a row of the matrix.</param>
        /// <param name="xMean">Output: On return, contains the ensemble mean of the spectra.</param>
        /// <param name="xScale">Not used.</param>
        /// <param name="regions">Vector of spectal regions. Each element is the index of the start of a new region.</param>
        public override void Process(IMatrix xMatrix, IVector xMean, IVector xScale, int[] regions)
        {
            // note: we have a light deviation here to the literature:
            // we repeat the multiple scattering correction until the xMean vector is self consistent,
            // in detail: after each MSC correction, we calculate the new xMean and compare with the xMean
            // of the step before. We repeat until the deviation of the xMean to the xMean_before is
            // reasonable small.
            // The reason for this deviation is that we don't want to store two separate xMean vectors: one used
            // for MSC (the x in linear regression) and another to center the MSC corrected spectra

            IVector xMeanBefore = null;
            double  threshold   = 1E-14 * MatrixMath.SumOfSquares(xMatrix) / xMatrix.Rows;

            for (int cycle = 0; cycle < 50; cycle++)
            {
                // 1.) Get the mean spectrum
                // we want to have the mean of each matrix column, but not center the matrix now, since this
                // is done later on
                int cols = xMatrix.Columns;
                int rows = xMatrix.Rows;
                for (int n = 0; n < cols; n++)
                {
                    double sum = 0;
                    for (int i = 0; i < rows; i++)
                    {
                        sum += xMatrix[i, n];
                    }
                    xMean[n] = sum / rows;
                }

                // 2.) Process the spectras
                ProcessForPrediction(xMatrix, xMean, xScale, regions);

                // 3. Compare the xMean with the xMean_before
                if (xMeanBefore == null)
                {
                    xMeanBefore = VectorMath.CreateExtensibleVector(xMean.Length);
                    VectorMath.Copy(xMean, xMeanBefore);
                }
                else
                {
                    double sumdiffsquare = VectorMath.SumOfSquaredDifferences(xMean, xMeanBefore);
                    if (sumdiffsquare < threshold)
                    {
                        break;
                    }
                    else
                    {
                        VectorMath.Copy(xMean, xMeanBefore);
                    }
                }
            }
        }
Exemplo n.º 3
0
        public void Execute()
        {
            using (var suspendToken = _destinationTable.SuspendGetToken())
            {
                var numRows = _sourceMatrix.RowCount;
                var numCols = _sourceMatrix.ColumnCount;

                int columnNumber = 0;

                var dataCols = _destinationTable.DataColumns;

                foreach (var tuple in _rowHeaderColumns)
                {
                    var col = dataCols.EnsureExistenceAtPositionStrictly <DoubleColumn>(columnNumber, tuple.Item2, GetIndependendVariableColumnKind(columnNumber), 0);
                    col.AssignVector = tuple.Item1;
                    col.CutToMaximumLength(numRows);
                    ++columnNumber;
                }

                for (int i = 0; i < _sourceMatrix.ColumnCount; ++i)
                {
                    string columnName;
                    if (null != ColumnNameGenerator)
                    {
                        columnName = ColumnNameGenerator(i);
                    }
                    else
                    {
                        columnName = string.Format("{0}{1}", string.IsNullOrEmpty(_columnNameBase) ? DefaultColumnBaseName : _columnNameBase, i);
                    }

                    var col = dataCols.EnsureExistenceAtPositionStrictly <DoubleColumn>(columnNumber, columnName, ColumnKind.V, 0);
                    col.AssignVector = MatrixMath.ColumnToROVector(_sourceMatrix, i);
                    col.CutToMaximumLength(numRows);
                    ++columnNumber;
                }

                // property columns
                var numXDataCols     = _rowHeaderColumns.Count;
                int propColumnNumber = 0;
                var propCols         = _destinationTable.PropertyColumns;
                foreach (var tuple in _columnHeaderColumns)
                {
                    var col = propCols.EnsureExistenceAtPositionStrictly <DoubleColumn>(propColumnNumber, tuple.Item2, GetIndependendVariableColumnKind(propColumnNumber), 0);
                    VectorMath.Copy(tuple.Item1, col.ToVector(numXDataCols, _sourceMatrix.ColumnCount));
                    col.CutToMaximumLength(numXDataCols + _sourceMatrix.ColumnCount);
                    ++propColumnNumber;
                }

                suspendToken.Dispose();
            }
        }
Exemplo n.º 4
0
        /// <summary>
        /// Processes the spectra in matrix xMatrix for prediction.
        /// </summary>
        /// <param name="xMatrix">The matrix of spectra. Each spectrum is a row of the matrix.</param>
        /// <param name="xMean">Not used.</param>
        /// <param name="xScale">Not used.</param>
        /// <param name="regionstart">Starting index of the region to process.</param>
        /// <param name="regionend">End index of the region to process.</param>
        public void ProcessForPrediction(IMatrix <double> xMatrix, IReadOnlyList <double> xMean, IReadOnlyList <double> xScale, int regionstart, int regionend)
        {
            int regionlength = regionend - regionstart;

            var helpervector = VectorMath.ToVector(new double[regionlength]);

            for (int n = 0; n < xMatrix.RowCount; n++)
            {
                var vector = MatrixMath.RowToVector(xMatrix, n, regionstart, regionlength);
                _filter.Apply(vector, helpervector);
                VectorMath.Copy(helpervector, vector);
            }
        }
Exemplo n.º 5
0
        public static void GenerateValues(MultivariateLinearFitParameters parameters, LinearFitBySvd fit)
        {
            DataColumn dependentColumn = parameters.Table[parameters.SelectedDataColumns[parameters.DependentColumnIndexIntoSelection]];

            if (parameters.GenerateRegressionValues)
            {
                var col = new DoubleColumn();
                VectorMath.Copy(VectorMath.ToROVector(fit.PredictedValues), DataColumnWrapper.ToVector(col, parameters.SelectedDataRows));
                parameters.Table.Add(col, dependentColumn.Name + "(predicted)", ColumnKind.V, parameters.Table.GetColumnGroup(dependentColumn));
            }

            if (parameters.GenerateResidualValues)
            {
                var col = new DoubleColumn();
                VectorMath.Copy(VectorMath.ToROVector(fit.ResidualValues), DataColumnWrapper.ToVector(col, parameters.SelectedDataRows));
                parameters.Table.Add(col, dependentColumn.Name + "(residual)", ColumnKind.V, parameters.Table.GetColumnGroup(dependentColumn));
            }
        }
Exemplo n.º 6
0
        /// <summary>
        /// Constructs an Akima bivariate spline.
        /// </summary>
        /// <param name="x">ARRAY OF DIMENSION LX STORING THE X COORDINATES OF INPUT GRID POINTS (IN ASCENDING ORDER)</param>
        /// <param name="y">ARRAY OF DIMENSION LY STORING THE Y COORDINATES OF INPUT GRID POINTS (IN ASCENDING ORDER)</param>
        /// <param name="z">DOUBLY-DIMENSIONED ARRAY OF DIMENSION (LX,LY) STORING THE VALUES OF THE FUNCTION (Z VALUES) AT INPUT GRID POINTS</param>
        /// <param name="copyDataLocally">If true, the data where cloned before stored here in this instance. If false, the data
        /// are stored directly. Make sure then, that the data are not changed outside.</param>
        public BivariateAkimaSpline(IReadOnlyList <double> x, IReadOnlyList <double> y, IROMatrix <double> z, bool copyDataLocally)
        {
            if (copyDataLocally)
            {
                _myX = VectorMath.ToVector(new double[x.Count]);
                VectorMath.Copy(x, (IVector <double>)_myX);

                _myY = VectorMath.ToVector(new double[y.Count]);
                VectorMath.Copy(y, (IVector <double>)_myY);

                _myZ = new MatrixMath.LeftSpineJaggedArrayMatrix <double>(_myZ.RowCount, _myZ.ColumnCount);
                MatrixMath.Copy(z, (IMatrix <double>)_myZ);
            }
            else
            {
                _myX = x;
                _myY = y;
                _myZ = z;
            }
        }
Exemplo n.º 7
0
        /// <summary>
        /// Constructs an Akima bivariate spline.
        /// </summary>
        /// <param name="x">ARRAY OF DIMENSION LX STORING THE X COORDINATES OF INPUT GRID POINTS (IN ASCENDING ORDER)</param>
        /// <param name="y">ARRAY OF DIMENSION LY STORING THE Y COORDINATES OF INPUT GRID POINTS (IN ASCENDING ORDER)</param>
        /// <param name="z">DOUBLY-DIMENSIONED ARRAY OF DIMENSION (LX,LY) STORING THE VALUES OF THE FUNCTION (Z VALUES) AT INPUT GRID POINTS</param>
        /// <param name="copyDataLocally">If true, the data where cloned before stored here in this instance. If false, the data
        /// are stored directly. Make sure then, that the data are not changed outside.</param>
        public BivariateAkimaSpline(IROVector x, IROVector y, IROMatrix z, bool copyDataLocally)
        {
            if (copyDataLocally)
            {
                _myX = VectorMath.ToVector(new double[x.Length]);
                VectorMath.Copy(x, (IVector)_myX);

                _myY = VectorMath.ToVector(new double[y.Length]);
                VectorMath.Copy(y, (IVector)_myY);

                _myZ = new MatrixMath.BEMatrix(_myZ.Rows, _myZ.Columns);
                MatrixMath.Copy(z, (IMatrix)_myZ);
            }
            else
            {
                _myX = x;
                _myY = y;
                _myZ = z;
            }
        }
Exemplo n.º 8
0
        private void EvaluateInternally(double?tout, out double t_result, double[] result)
        {
            if (_initializationState == InitializationState.NotInitialized) // not initialized so far
            {
                _initializationState = InitializationState.InitialValueReturned;

                if (null == tout)
                {
                    last_tout = t_result = currstate._tn;
                    currstate._zn.CopyColumn(0, result);
                    return;
                }
            }
            else if (_initializationState == InitializationState.Initialized)
            {
                // we have to clone some of the code from below to here
                // this is no good style, but a goto statement with a jump inside another code block will not work here.

                if (tout.HasValue)
                {
                    // Output data, but only if (i) we have requested a certain time point,
                    // and ii) as long as we can interpolate this point from the previous point and the current point
                    if (currstate._tn <= tout.Value && tout.Value <= currstate._tn + currstate._dt)
                    {
                        // VectorMath.Lerp(tout.Value, currstate.tn, xout, currstate.tn + currstate.dt, currstate.xn, result);
                        currstate.EvaluateYAtTime(tout.Value, result);
                        last_tout = t_result = tout.Value;
                        return;
                    }
                }
                else
                {
                    if (currstate._tn == last_tout)
                    {
                        last_tout = t_result = currstate._tn + currstate._dt;
                        VectorMath.Copy(currstate._xn, result);
                        return;
                    }
                }

                VectorMath.Copy(currstate._xn, xout); // save x of this step

                currstate._tn = currstate._tn + currstate._dt;

                if (opts.MaxStep < double.MaxValue)
                {
                    r = Math.Min(r, opts.MaxStep / currstate._dt);
                }

                if (opts.MinStep > 0)
                {
                    r = Math.Max(r, opts.MinStep / currstate._dt);
                }

                r = Math.Min(r, opts.MaxScale);
                r = Math.Max(r, opts.MinScale);

                currstate._dt = currstate._dt * r;

                currstate.Rescale(r);
            }

            _initializationState = InitializationState.Initialized;
            //Can produce any number of solution points
            while (true)
            {
                // Reset fail flag
                isIterationFailed = false;

                // Predictor step
                _zn_saved.CopyFrom(currstate._zn);
                currstate.ZNew();
                VectorMath.FillWith(currstate._en, 0); // TODO find out if this statement is neccessary
                currstate._zn.CopyColumn(0, currstate._xn);

                // Corrector step
                currstate.PredictorCorrectorScheme(ref isIterationFailed, f, _denseJacobianEvaluation, _sparseJacobianEvaluation, opts);

                if (isIterationFailed)                 // If iterations are not finished - bad convergence
                {
                    currstate._zn.CopyFrom(_zn_saved); // copy saved state back
                    currstate._nsuccess = 0;
                    currstate.DivideStepBy2();
                }
                else // Iterations finished, i.e. did not fail
                {
                    r = Math.Min(1.1d, Math.Max(0.2d, currstate._rFactor));

                    if (currstate._delta >= 1.0d)
                    {
                        if (opts.MaxStep < double.MaxValue)
                        {
                            r = Math.Min(r, opts.MaxStep / currstate._dt);
                        }

                        if (opts.MinStep > 0)
                        {
                            r = Math.Max(r, opts.MinStep / currstate._dt);
                        }

                        r = Math.Min(r, opts.MaxScale);
                        r = Math.Max(r, opts.MinScale);

                        currstate._dt = currstate._dt * r; // Decrease step
                        currstate.Rescale(r);
                    }
                    else // Iteration finished successfully
                    {
                        // Output data
                        if (tout.HasValue)
                        {
                            if (currstate._tn <= tout.Value && tout.Value <= currstate._tn + currstate._dt)
                            {
                                // VectorMath.Lerp(tout.Value, currstate.tn, xout, currstate.tn + currstate.dt, currstate.xn, result);
                                currstate.EvaluateYAtTime(tout.Value, result);
                                t_result = tout.Value;

                                return;
                            }
                        }
                        else
                        {
                            VectorMath.Copy(currstate._xn, result);
                            t_result = last_tout = currstate._tn + currstate._dt;
                            return;
                        }

                        VectorMath.Copy(currstate._xn, xout);

                        currstate._tn = currstate._tn + currstate._dt;

                        if (opts.MaxStep < double.MaxValue)
                        {
                            r = Math.Min(r, opts.MaxStep / currstate._dt);
                        }

                        if (opts.MinStep > 0)
                        {
                            r = Math.Max(r, opts.MinStep / currstate._dt);
                        }

                        r = Math.Min(r, opts.MaxScale);
                        r = Math.Max(r, opts.MinScale);

                        currstate._dt = currstate._dt * r;
                        currstate.Rescale(r);
                    }
                }
            }
        }
Exemplo n.º 9
0
            /// <summary>
            /// Execute predictor-corrector scheme for Nordsieck's method
            /// </summary>
            /// <param name="flag"></param>
            /// <param name="f">Evaluation of the deriatives. First argument is time, second arg are the state variables, and 3rd arg is the array to accomodate the derivatives.</param>
            /// <param name="denseJacobianEvaluation">Evaluation of the jacobian.</param>
            /// <param name="sparseJacobianEvaluation">Evaluation of the jacobian as a sparse matrix. Either this or the previous arg must be valid.</param>
            /// <param name="opts">current options</param>
            /// <returns>en - current error vector</returns>
            internal void PredictorCorrectorScheme(
                ref bool flag,
                Action <double, double[], double[]> f,
                Func <double, double[], IROMatrix <double> > denseJacobianEvaluation,
                Func <double, double[], SparseDoubleMatrix> sparseJacobianEvaluation,
                GearsBDFOptions opts
                )
            {
                NordsieckState currstate = this;
                NordsieckState newstate  = this;
                int            n         = currstate._xn.Length;

                VectorMath.Copy(currstate._en, ecurr);
                VectorMath.Copy(currstate._xn, xcurr);
                var x0 = currstate._xn;

                MatrixMath.Copy(currstate._zn, zcurr); // zcurr now is old nordsieck matrix
                var qcurr = currstate._qn;             // current degree
                var qmax  = currstate._qmax;           // max degree
                var dt    = currstate._dt;
                var t     = currstate._tn;

                MatrixMath.Copy(currstate._zn, z0); // save Nordsieck matrix

                //Tolerance computation factors
                double Cq  = Math.Pow(qcurr + 1, -1.0);
                double tau = 1.0 / (Cq * Factorial(qcurr) * l[qcurr - 1][qcurr]);

                int count = 0;

                double Dq = 0.0, DqUp = 0.0, DqDown = 0.0;
                double delta = 0.0;

                //Scaling factors for the step size changing
                //with new method order q' = q, q + 1, q - 1, respectively
                double rSame, rUp, rDown;

                if (null != denseJacobianEvaluation)
                {
                    var J = denseJacobianEvaluation(t + dt, xcurr);
                    if (J.GetType() != P?.GetType())
                    {
                        AllocatePMatrixForJacobian(J);
                    }

                    do
                    {
                        MatrixMath.MapIndexed(J, dt * b[qcurr - 1], (i, j, aij, factor) => (i == j ? 1 : 0) - aij * factor, P, Zeros.AllowSkip); // P = Identity - J*dt*b[qcurr-1]
                        VectorMath.Copy(xcurr, xprev);
                        f(t + dt, xcurr, ftdt);
                        MatrixMath.CopyColumn(z0, 1, colExtract);                                                       // 1st derivative/dt
                        VectorMath.Map(ftdt, colExtract, ecurr, dt, (ff, c, e, local_dt) => local_dt * ff - c - e, gm); // gm = dt * f(t + dt, xcurr) - z0.GetColumn(1) - ecurr;
                        gaussSolver.SolveDestructive(P, gm, tmpVec1);
                        VectorMath.Add(ecurr, tmpVec1, ecurr);                                                          //	ecurr = ecurr + P.SolveGE(gm);
                        VectorMath.Map(x0, ecurr, b[qcurr - 1], (x, e, local_b) => x + e * local_b, xcurr);             //	xcurr = x0 + b[qcurr - 1] * ecurr;

                        //Row dimension is smaller than zcurr has
                        int M_Rows    = ecurr.Length;
                        int M_Columns = l[qcurr - 1].Length;
                        //So, "expand" the matrix
                        MatrixMath.MapIndexed(z0, (i, j, z) => z + (i < M_Rows && j < M_Columns ? ecurr[i] * l[qcurr - 1][j] : 0.0d), zcurr);

                        Dq = ToleranceNorm(ecurr, opts.RelativeTolerance, opts.AbsoluteTolerance, xprev);
                        var factor_deltaE = (1.0 / (qcurr + 2) * l[qcurr - 1][qcurr - 1]);
                        VectorMath.Map(ecurr, currstate._en, factor_deltaE, (e, c, factor) => (e - c) * factor, deltaE); // deltaE = (ecurr - currstate.en)*(1.0 / (qcurr + 2) * l[qcurr - 1][qcurr - 1])

                        DqUp = ToleranceNorm(deltaE, opts.RelativeTolerance, opts.AbsoluteTolerance, xcurr);
                        zcurr.CopyColumn(qcurr - 1, colExtract);
                        DqDown = ToleranceNorm(colExtract, opts.RelativeTolerance, opts.AbsoluteTolerance, xcurr);
                        delta  = Dq / (tau / (2 * (qcurr + 2)));
                        count++;
                    } while (delta > 1.0d && count < opts.NumberOfIterations);
                }
                else if (null != sparseJacobianEvaluation)
                {
                    SparseDoubleMatrix J = sparseJacobianEvaluation(t + dt, xcurr);
                    var P = new SparseDoubleMatrix(J.RowCount, J.ColumnCount);

                    do
                    {
                        J.MapSparseIncludingDiagonal((x, i, j) => (i == j ? 1 : 0) - x * dt * b[qcurr - 1], P);
                        VectorMath.Copy(xcurr, xprev);
                        f(t + dt, xcurr, ftdt);
                        MatrixMath.CopyColumn(z0, 1, colExtract);
                        VectorMath.Map(ftdt, colExtract, ecurr, (ff, c, e) => dt * ff - c - e, gm); // gm = dt * f(t + dt, xcurr) - z0.GetColumn(1) - ecurr;
                        gaussSolver.SolveDestructive(P, gm, tmpVec1);
                        VectorMath.Add(ecurr, tmpVec1, ecurr);                                      //	ecurr = ecurr + P.SolveGE(gm);
                        VectorMath.Map(x0, ecurr, (x, e) => x + e * b[qcurr - 1], xcurr);           // xcurr = x0 + b[qcurr - 1] * ecurr;

                        //Row dimension is smaller than zcurr has
                        int M_Rows    = ecurr.Length;
                        int M_Columns = l[qcurr - 1].Length;
                        //So, "expand" the matrix
                        MatrixMath.MapIndexed(z0, (i, j, z) => z + (i < M_Rows && j < M_Columns ? ecurr[i] * l[qcurr - 1][j] : 0.0d), zcurr);

                        Dq = ToleranceNorm(ecurr, opts.RelativeTolerance, opts.AbsoluteTolerance, xprev);
                        var factor_deltaE = (1.0 / (qcurr + 2) * l[qcurr - 1][qcurr - 1]);
                        VectorMath.Map(ecurr, currstate._en, (e, c) => (e - c) * factor_deltaE, deltaE); // deltaE = (ecurr - currstate.en)*(1.0 / (qcurr + 2) * l[qcurr - 1][qcurr - 1])

                        DqUp   = ToleranceNorm(deltaE, opts.RelativeTolerance, opts.AbsoluteTolerance, xcurr);
                        DqDown = ToleranceNorm(zcurr.GetColumn(qcurr - 1), opts.RelativeTolerance, opts.AbsoluteTolerance, xcurr);
                        delta  = Dq / (tau / (2 * (qcurr + 2)));
                        count++;
                    } while (delta > 1.0d && count < opts.NumberOfIterations);
                }
                else // neither denseJacobianEvaluation nor sparseJacobianEvaluation valid
                {
                    throw new ArgumentNullException(nameof(denseJacobianEvaluation), "Either denseJacobianEvaluation or sparseJacobianEvaluation must be set!");
                }

                //======================================

                var nsuccess = count < opts.NumberOfIterations ? currstate._nsuccess + 1 : 0;

                if (count < opts.NumberOfIterations)
                {
                    flag = false;
                    MatrixMath.Copy(zcurr, newstate._zn);
                    zcurr.CopyColumn(0, newstate._xn);
                    VectorMath.Copy(ecurr, newstate._en);
                }
                else
                {
                    flag = true;
                    // MatrixMath.Copy(currstate.zn, newstate.zn); // null operation since currstate and newstate are identical
                    currstate._zn.CopyColumn(0, newstate._xn);
                    VectorMath.Copy(currstate._en, newstate._en); // null operation since currstate and newstate are identical
                }

                //Compute step size scaling factors
                rUp = 0.0;

                if (currstate._qn < currstate._qmax)
                {
                    rUp = rUp = 1.0 / 1.4 / (Math.Pow(DqUp, 1.0 / (qcurr + 2)) + 1e-6);
                }

                rSame = 1.0 / 1.2 / (Math.Pow(Dq, 1.0 / (qcurr + 1)) + 1e-6);

                rDown = 0.0;

                if (currstate._qn > 1)
                {
                    rDown = 1.0 / 1.3 / (Math.Pow(DqDown, 1.0 / (qcurr)) + 1e-6);
                }

                //======================================
                _nsuccess = nsuccess >= _qn ? 0 : nsuccess;
                //Step size scale operations

                if (rSame >= rUp)
                {
                    if (rSame <= rDown && nsuccess >= _qn && _qn > 1)
                    {
                        _qn = _qn - 1;
                        _Dq = DqDown;

                        for (int i = 0; i < n; i++)
                        {
                            for (int j = newstate._qn + 1; j < qmax + 1; j++)
                            {
                                _zn[i, j] = 0.0;
                            }
                        }
                        nsuccess = 0;
                        _rFactor = rDown;
                    }
                    else
                    {
                        // _qn = _qn;
                        _Dq      = Dq;
                        _rFactor = rSame;
                    }
                }
                else
                {
                    if (rUp >= rDown)
                    {
                        if (rUp >= rSame && nsuccess >= _qn && _qn < _qmax)
                        {
                            _qn      = _qn + 1;
                            _Dq      = DqUp;
                            _rFactor = rUp;
                            nsuccess = 0;
                        }
                        else
                        {
                            // _qn = _qn;
                            _Dq      = Dq;
                            _rFactor = rSame;
                        }
                    }
                    else
                    {
                        if (nsuccess >= _qn && _qn > 1)
                        {
                            _qn = _qn - 1;
                            _Dq = DqDown;

                            for (int i = 0; i < n; i++)
                            {
                                for (int j = newstate._qn + 1; j < qmax + 1; j++)
                                {
                                    _zn[i, j] = 0.0;
                                }
                            }
                            nsuccess = 0;
                            _rFactor = rDown;
                        }
                        else
                        {
                            // _qn = _qn;
                            _Dq      = Dq;
                            _rFactor = rSame;
                        }
                    }
                }

                _dt = dt;
                _tn = t;
            }