示例#1
0
    /// <summary>
    /// Warp a matrix M as R * M * R^T
    /// </summary>
    public static MatrixXD WarpMatrix(Quaternion R, MatrixXD M)
    {
        MatrixXD mout = new DenseMatrixXD(3, 3);

        mout.SetRow(0, ToVectorXD(R * ToVector3(M.Row(0))));
        mout.SetRow(1, ToVectorXD(R * ToVector3(M.Row(1))));
        mout.SetRow(2, ToVectorXD(R * ToVector3(M.Row(2))));
        mout.SetColumn(0, ToVectorXD(R * ToVector3(mout.Column(0))));
        mout.SetColumn(1, ToVectorXD(R * ToVector3(mout.Column(1))));
        mout.SetColumn(2, ToVectorXD(R * ToVector3(mout.Column(2))));
        return(mout);
    }
示例#2
0
        private static DenseMatrix Solve(DenseMatrix matrix, ICollection<int> vectorJ, int n)
        {
            var matrixC = DenseMatrix.CreateIdentity(n);
            var matrixB = DenseMatrix.CreateIdentity(n);
            DenseMatrix matrixE = DenseMatrix.CreateIdentity(n);
            var vectorSk = new int[n];

            for (var i = 0; i < n; i++)
            {
                var k = 0;

                //step 1
                foreach (var j in vectorJ)
                {
                    double alphaJ = Math.Abs(matrixE.Column(i) * matrixB * matrix.Column(j));

                    // get a first no zero alpha
                    if (ZeroComparer.IsBiggerThanZero(alphaJ))
                    {
                        k = j;
                        vectorSk[j] = i;
                        break;
                    }

                    // determinant is zero
                    if (j == vectorJ.Last())
                    {
                        return null;
                    }
                }

                // step 2
                vectorJ.Remove(k);
                matrixC.SetColumn(i, matrix.Column(k));

                var z = matrixB * matrixC.Column(i);
                var zk = z[i];
                z[i] = -1;
                var d = -1 / zk * z;
                var matrixD = DenseMatrix.CreateIdentity(n);
                matrixD.SetColumn(i, d);

                matrixB = matrixD * matrixB;
            }

            var listOfRows = vectorSk.Select(d => matrixB.Row(d)).ToList();

            return DenseMatrix.OfRowVectors(listOfRows);
        }
示例#3
0
		public void Smooth(ref double[,] inputValues)
		{
			// TODO: Using the matrix works, but does a lot of data accesses. Can improve by working out all the data access myself? I might be able to cut down on number of data accesses, but not sure.
		    var inputMatrix = new DenseMatrix(inputValues.GetLength(0), inputValues.GetLength(1));

			for (int i = 0; i < inputMatrix.RowCount; i++)
			{
				inputMatrix.SetRow(i, Smooth(inputMatrix.Row(i).ToArray()));
			}

			for (int i = 0; i < inputMatrix.ColumnCount; i++)
			{
				inputMatrix.SetColumn(i, Smooth(inputMatrix.Column(i).ToArray()));
			}

			inputValues = inputMatrix.ToArray();
		}
示例#4
0
        /// <summary>
        /// Run example
        /// </summary>
        public void Run()
        {
            // Format vector output to console
            var formatProvider = (CultureInfo)CultureInfo.InvariantCulture.Clone();
            formatProvider.TextInfo.ListSeparator = " ";

            // Create new empty square matrix
            var matrix = new DenseMatrix(10);
            Console.WriteLine(@"Empty matrix");
            Console.WriteLine(matrix.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 1. Fill matrix by data using indexer []
            var k = 0;
            for (var i = 0; i < matrix.RowCount; i++)
            {
                for (var j = 0; j < matrix.ColumnCount; j++)
                {
                    matrix[i, j] = k++;
                }
            }

            Console.WriteLine(@"1. Fill matrix by data using indexer []");
            Console.WriteLine(matrix.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 2. Fill matrix by data using At. The element is set without range checking.
            for (var i = 0; i < matrix.RowCount; i++)
            {
                for (var j = 0; j < matrix.ColumnCount; j++)
                {
                    matrix.At(i, j, k--);
                }
            }

            Console.WriteLine(@"2. Fill matrix by data using At");
            Console.WriteLine(matrix.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 3. Clone matrix
            var clone = matrix.Clone();
            Console.WriteLine(@"3. Clone matrix");
            Console.WriteLine(clone.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 4. Clear matrix
            clone.Clear();
            Console.WriteLine(@"4. Clear matrix");
            Console.WriteLine(clone.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 5. Copy matrix into another matrix
            matrix.CopyTo(clone);
            Console.WriteLine(@"5. Copy matrix into another matrix");
            Console.WriteLine(clone.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 6. Get submatrix into another matrix
            var submatrix = matrix.SubMatrix(2, 2, 3, 3);
            Console.WriteLine(@"6. Copy submatrix into another matrix");
            Console.WriteLine(submatrix.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 7. Get part of the row as vector. In this example: get 4 elements from row 5 starting from column 3
            var row = matrix.Row(5, 3, 4);
            Console.WriteLine(@"7. Get part of the row as vector");
            Console.WriteLine(row.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 8. Get part of the column as vector. In this example: get 3 elements from column 2 starting from row 6
            var column = matrix.Column(2, 6, 3);
            Console.WriteLine(@"8. Get part of the column as vector");
            Console.WriteLine(column.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 9. Get columns using column enumerator. If you need all columns you may use ColumnEnumerator without parameters
            Console.WriteLine(@"9. Get columns using column enumerator");
            foreach (var keyValuePair in matrix.ColumnEnumerator(2, 4))
            {
                Console.WriteLine(@"Column {0}: {1}", keyValuePair.Item1, keyValuePair.Item2.ToString("#0.00\t", formatProvider));
            }

            Console.WriteLine();

            // 10. Get rows using row enumerator. If you need all rows you may use RowEnumerator without parameters
            Console.WriteLine(@"10. Get rows using row enumerator");
            foreach (var keyValuePair in matrix.RowEnumerator(4, 3))
            {
                Console.WriteLine(@"Row {0}: {1}", keyValuePair.Item1, keyValuePair.Item2.ToString("#0.00\t", formatProvider));
            }

            Console.WriteLine();

            // 11. Convert matrix into multidimensional array
            var data = matrix.ToArray();
            Console.WriteLine(@"11. Convert matrix into multidimensional array");
            for (var i = 0; i < data.GetLongLength(0); i++)
            {
                for (var j = 0; j < data.GetLongLength(1); j++)
                {
                    Console.Write(data[i, j].ToString("#0.00\t"));
                }

                Console.WriteLine();
            }

            Console.WriteLine();

            // 12. Convert matrix into row-wise array
            var rowwise = matrix.ToRowWiseArray();
            Console.WriteLine(@"12. Convert matrix into row-wise array");
            for (var i = 0; i < matrix.RowCount; i++)
            {
                for (var j = 0; j < matrix.ColumnCount; j++)
                {
                    Console.Write(rowwise[(i * matrix.ColumnCount) + j].ToString("#0.00\t"));
                }

                Console.WriteLine();
            }

            Console.WriteLine();

            // 13. Convert matrix into column-wise array
            var columnise = matrix.ToColumnWiseArray();
            Console.WriteLine(@"13. Convert matrix into column-wise array");
            for (var i = 0; i < matrix.RowCount; i++)
            {
                for (var j = 0; j < matrix.ColumnCount; j++)
                {
                    Console.Write(columnise[(j * matrix.RowCount) + i].ToString("#0.00\t"));
                }

                Console.WriteLine();
            }

            Console.WriteLine();

            // 14. Get matrix diagonal as vector
            var diagonal = matrix.Diagonal();
            Console.WriteLine(@"14. Get matrix diagonal as vector");
            Console.WriteLine(diagonal.ToString("#0.00\t", formatProvider));
            Console.WriteLine();
        }
示例#5
0
        public static SimplexMethodResult Solve(double[][] array, double[] cVector, double[] bVector, double[] xVector, int[] basisIndexes)
        {
            DenseVector vectorC = DenseVector.OfArray(cVector);
            DenseVector vectorX = DenseVector.OfArray(xVector);
            List<int> jBasisIndexes = basisIndexes.ToList();
            DenseMatrix matrix = new DenseMatrix(1).FromSimpleArray(array);

            //Start
            while (true)
            {
                List<int> jH = new List<int>();

                xVector.ForEach(delegate(double d, int i) {
                    if (!jBasisIndexes.Contains(i)) jH.Add(i);
                });

                DenseMatrix basisMatrix = DenseMatrix.OfColumnVectors(jBasisIndexes.Select(matrix.Column).ToList());
                double basisMatrixDeterminantAbs = Math.Abs(basisMatrix.Determinant());

                if (!(ZeroComparer.IsBiggerThanZero(basisMatrixDeterminantAbs)))
                {
                    return new SimplexMethodResult(null, SimplexMethodResultType.ZeroDeterminant);
                }

                var matrixB = basisMatrix.Inverse();

                //step 1
                Vector<double> cB = DenseVector.OfArray(jBasisIndexes.Select(j => vectorC[j]).ToArray());
                Vector<double> vectorU = cB * matrixB;
                IList<double> deltas = jH.Select(j => vectorU * matrix.Column(j) - vectorC[j]).ToList();

                //step 2
                if (deltas.All(d => d >= 0))
                {
                    return new SimplexMethodResult(vectorX.ToArray(), SimplexMethodResultType.Optimal);
                }

                //step 3
                double firstNoZeroDelta = deltas.First(d => d < 0);
                int j0 = jH[deltas.IndexOf(firstNoZeroDelta)];
                Vector<double> vectorZ = matrixB * matrix.Column(j0);

                if (vectorZ.All(zI => zI <= 0))
                {
                    return new SimplexMethodResult(null, SimplexMethodResultType.NoSolutions);
                }

                //step 4
                var xToZ = XtoZ(vectorX, vectorZ, jBasisIndexes);
                double teta = xToZ.Min();
                int indexS = vectorZ.Select((zI, i) => vectorX[jBasisIndexes[i]]/zI).ToList().FindIndex(value => Math.Abs(value - teta) < 0.000001);

                //step 5
                for (int i = 0; i < vectorX.Count; i++)
                {
                    if (j0 == i)
                    {
                        vectorX[i] = teta;
                        continue;
                    }

                    if (jH.Contains(i))
                    {
                        vectorX[i] = 0;
                        continue;
                    }

                    var zElementByIndex = vectorZ[jBasisIndexes.FindIndex(element => element == i)];
                    vectorX[i] = vectorX[i] - teta * zElementByIndex;
                }

                jBasisIndexes.Remove(jBasisIndexes[indexS]);
                jBasisIndexes.Add(j0);
            }
        }
        /// <summary>
        /// Generate a random n-class classification problem.
        /// </summary>
        /// <param name="nSamples">The number of samples.</param>
        /// <param name="nFeatures">The total number of features. These comprise <paramref name="nInformative"/>
        /// informative features, <paramref name="nRedundant"/> redundant features, <paramref name="nRepeated"/>
        /// dupplicated features and `<paramref name="nFeatures"/>-<paramref name="nInformative"/>-<paramref name="nRedundant"/>-
        /// <paramref name="nRepeated"/>` useless features drawn at random.</param>
        /// <param name="nInformative">The number of informative features. Each class is composed of a number
        /// of gaussian clusters each located around the vertices of a hypercube
        /// in a subspace of dimension <paramref name="nInformative"/>. For each cluster,
        /// informative features are drawn independently from  N(0, 1) and then
        /// randomly linearly combined in order to add covariance. The clusters
        /// are then placed on the vertices of the hypercube.</param>
        /// <param name="nRedundant">The number of redundant features. These features are generated as
        /// random linear combinations of the informative features.</param>
        /// <param name="nRepeated"> The number of dupplicated features, drawn randomly from the informative
        /// and the redundant features.
        /// </param>
        /// <param name="nClasses">The number of classes (or labels) of the classification problem.</param>
        /// <param name="nClustersPerClass">The number of clusters per class.</param>
        /// <param name="weights">The proportions of samples assigned to each class. If None, then
        /// classes are balanced. Note that if `len(weights) == n_classes - 1`,
        /// then the last class weight is automatically inferred.
        /// </param>
        /// <param name="flipY">The fraction of samples whose class are randomly exchanged.</param>
        /// <param name="classSep">The factor multiplying the hypercube dimension.</param>
        /// <param name="hypercube">If True, the clusters are put on the vertices of a hypercube. If
        /// False, the clusters are put on the vertices of a random polytope.</param>
        /// <param name="shift">Shift all features by the specified value. If None, then features
        /// are shifted by a random value drawn in [-class_sep, class_sep].</param>
        /// <param name="scale">Multiply all features by the specified value. If None, then features
        /// are scaled by a random value drawn in [1, 100]. Note that scaling
        /// happens after shifting.
        /// </param>
        /// <param name="shuffle">Shuffle the samples and the features.</param>
        /// <param name="randomState">Random generator.</param>
        /// <returns>array of shape [n_samples]
        /// The integer labels for class membership of each sample.</returns>
        /// <remarks>
        /// The algorithm is adapted from Guyon [1] and was designed to generate
        /// the "Madelon" dataset.
        /// References
        /// ----------
        /// .. [1] I. Guyon, "Design of experiments for the NIPS 2003 variable
        ///   selection benchmark", 2003.
        /// </remarks>
        public static Classification MakeClassification(
            int nSamples = 100,
            int nFeatures = 20,
            int nInformative = 2,
            int nRedundant = 2,
            int nRepeated = 0,
            int nClasses = 2,
            int nClustersPerClass = 2,
            List<double> weights = null,
            double flipY = 0.01,
            double classSep = 1.0,
            bool hypercube = true,
            double? shift = 0.0,
            double? scale = 1.0,
            bool shuffle = true,
            Random randomState = null)
        {
            var generator = randomState ?? new Random();

            // Count features, clusters and samples
            if (nInformative + nRedundant + nRepeated > nFeatures)
            {
                throw new ArgumentException("Number of informative, redundant and repeated " +
                                            "features must sum to less than the number of total" +
                                            " features");
            }

            if (nInformative * nInformative < nClasses * nClustersPerClass)
            {
                throw new ArgumentException(
                    "n_classes * n_clusters_per_class must" +
                    "be smaller or equal 2 ** n_informative");
            }

            if (weights != null && !new[] { nClasses, nClasses - 1 }.Contains(weights.Count))
            {
                throw new ArgumentException("Weights specified but incompatible with number of classes.");
            }

            int nUseless = nFeatures - nInformative - nRedundant - nRepeated;
            int nClusters = nClasses * nClustersPerClass;

            if (weights != null && weights.Count == nClasses - 1)
            {
                weights.Add(1.0 - weights.Sum());
            }

            if (weights == null) 
            {
                weights = Enumerable.Repeat(1.0 / nClasses, nClasses).ToList();
                weights[weights.Count - 1] = 1.0 - weights.Take(weights.Count - 1).Sum();
            }

            var nSamplesPerCluster = new List<int>();

            for (int k = 0; k < nClusters; k++)
            {
                nSamplesPerCluster.Add(
                    (int)(nSamples * weights[k % nClasses] / nClustersPerClass));
            }

            for (int i = 0; i < nSamples - nSamplesPerCluster.Sum(); i++)
            {
                nSamplesPerCluster[i % nClusters] += 1;
            }

            // Intialize X and y
            Matrix x = new DenseMatrix(nSamples, nFeatures);
            int[] y = new int[nSamples];

            // Build the polytope
            Matrix c = new DenseMatrix(1 << nInformative, nInformative);
            for (int i = 0; i < 1 << nInformative; i++)
            {
                var row = new DenseVector(nInformative);
                for (int bitN = 0; bitN < nInformative; bitN++)
                {
                    row[bitN] = (i & (1 << bitN)) == 1 ? classSep : -classSep;
                }

                c.SetRow(i, row);
            }

            if (!hypercube)
            {
                for (int k = 0; k < nClusters; k++)
                {
                    c.SetRow(k, c.Row(k) * generator.NextDouble());
                }

                for (int f = 0; f < nInformative; f++)
                {
                    c.SetColumn(f, c.Column(f) * generator.NextDouble());
                }
            }

            // todo:
            // generator.shuffle(C)

            // Loop over all clusters
            int pos = 0;
            int posEnd = 0;

            for (int k = 0; k < nClusters; k++)
            {
                // Number of samples in cluster k
                int nSamplesK = nSamplesPerCluster[k];

                // Define the range of samples
                pos = posEnd;
                posEnd = pos + nSamplesK;

                // Assign labels
                for (int l = pos; l < posEnd; l++)
                {
                    y[l] = k % nClasses;
                }

                // Draw features at random
                var subMatrix = DenseMatrix.CreateRandom(
                    nSamplesK,
                    nInformative,
                    new Normal { RandomSource = generator });

                x.SetSubMatrix(pos, nSamplesK, 0, nInformative, subMatrix);

                // Multiply by a random matrix to create co-variance of the features
                var uniform = new ContinuousUniform(-1, 1) { RandomSource = generator };
                Matrix a = DenseMatrix.CreateRandom(nInformative, nInformative, uniform);

                x.SetSubMatrix(
                    pos,
                    nSamplesK,
                    0,
                    nInformative,
                    x.SubMatrix(pos, nSamplesK, 0, nInformative) * a);

                // Shift the cluster to a vertice
                var v = x.SubMatrix(pos, nSamplesK, 0, nInformative).AddRowVector(c.Row(k));
                x.SetSubMatrix(pos, nSamplesK, 0, nInformative, v);
            }

            // Create redundant features
            if (nRedundant > 0)
            {
                var uniform = new ContinuousUniform(-1, 1) { RandomSource = generator };
                Matrix b = DenseMatrix.CreateRandom(nInformative, nRedundant, uniform);
                x.SetSubMatrix(
                    0,
                    x.RowCount,
                    nInformative,
                    nRedundant,
                    x.SubMatrix(0, x.RowCount, 0, nInformative) * b);
            }

            // Repeat some features
            if (nRepeated > 0)
            {
                int n = nInformative + nRedundant;
                for (int i = 0; i < nRepeated; i++)
                {
                    int r = (int)((generator.Next(nRepeated) * (n - 1)) + 0.5);
                    x.SetColumn(i, x.Column(r));
                }
            }

            // Fill useless features
            var denseMatrix = DenseMatrix.CreateRandom(nSamples, nUseless, new Normal { RandomSource = generator });
            x.SetSubMatrix(0, nSamples, nFeatures - nUseless, nUseless, denseMatrix);

            // Randomly flip labels
            if (flipY >= 0.0)
            {
                for (int i = 0; i < nSamples; i++)
                {
                    if (generator.NextDouble() < flipY)
                    {
                        y[i] = generator.Next(nClasses);
                    }
                }
            }

            // Randomly shift and scale
            bool constantShift = shift != null;
            bool constantScale = scale != null;

            for (int f = 0; f < nFeatures; f++)
            {
                if (!constantShift)
                {
                    shift = ((2 * generator.NextDouble()) - 1) * classSep;
                }

                if (!constantScale)
                {
                    scale = 1 + (100 * generator.NextDouble());
                }

                x.SetColumn(f, (x.Column(f) + shift.Value) * scale.Value);
            }

            // Randomly permute samples and features
            // todo:
            /*
            if (shuffle)
            {
                X, y = util_shuffle(X, y, random_state=generator)

                indices = np.arange(n_features)
                generator.shuffle(indices)
                X[:, :] = X[:, indices]
            }*/

            return new Classification { X = x, Y = y };
        }
示例#7
0
        private void optimize(DenseMatrix coefficients, DenseVector objFunValues, bool artifical)
        {
            //for calculations on the optimal solution row
            int cCounter,
                width = coefficients.ColumnCount;
            DenseVector cBVect = new DenseVector(basics.Count);

            //Sets up the b matrix
            DenseMatrix b = new DenseMatrix(basics.Count, 1);

            //basics will have values greater than coefficients.ColumnCount - 1 if there are still artificial variables
            //or if Nathan is bad and didn't get rid of them correctly
            foreach (int index in basics)
            {
                b = (DenseMatrix)b.Append(DenseVector.OfVector(coefficients.Column(index)).ToColumnMatrix());
            }
            // removes the first column
            b = (DenseMatrix)b.SubMatrix(0, b.RowCount, 1, b.ColumnCount - 1);

            double[] cPrimes = new double[width];
            double[] rhsOverPPrime;
            DenseMatrix[] pPrimes = new DenseMatrix[width];
            DenseMatrix bInverse;

            int newEntering, exitingRow;

            bool optimal = false;

            if(artifical)
            {
                rhsOverPPrime = new double[numConstraints + 1];
            }
            else
            {
                rhsOverPPrime = new double[numConstraints];
            }

            while (!optimal)
            {
                //calculates the inverse of b for this iteration
                bInverse = (DenseMatrix)b.Inverse();

                //updates the C vector with the most recent basic variables
                cCounter = 0;
                foreach (int index in basics)
                {
                    cBVect[cCounter++] = objFunValues.At(index);
                }

                //calculates the pPrimes and cPrimes
                for (int i = 0; i < coefficients.ColumnCount; i++)
                {
                    if (!basics.Contains(i))
                    {
                        pPrimes[i] = (DenseMatrix)bInverse.Multiply((DenseMatrix)coefficients.Column(i).ToColumnMatrix());

                        //c' = objFunVals - cB * P'n
                        //At(0) to turn it into a double
                        cPrimes[i] = objFunValues.At(i) - (pPrimes[i].LeftMultiply(cBVect)).At(0);
                    }
                    else
                    {
                        pPrimes[i] = null;
                    }
                }

                //RHS'
                xPrime = (DenseMatrix)bInverse.Multiply((DenseMatrix)rhsValues.ToColumnMatrix());

                //Starts newEntering as the first nonbasic
                newEntering = -1;
                int iter = 0;
                while(newEntering == -1)
                {
                    if(!basics.Contains(iter))
                    {
                        newEntering = iter;
                    }

                    iter++;
                }

                //new entering becomes the small cPrime that corresponds to a non-basic value
                for (int i = 0; i < cPrimes.Length; i++)
                {
                    if (cPrimes[i] < cPrimes[newEntering] && !basics.Contains(i))
                    {
                        newEntering = i;
                    }
                }

                //if the smallest cPrime is >= 0, ie they are all positive
                if (cPrimes[newEntering] >= 0)
                {
                    optimal = true;
                }
                else
                {
                    //fix me to deal with if all these values are negative
                    exitingRow = 0;
                    for (int i = 0; i < xPrime.RowCount; i++)
                    {
                        double[,] pPrime = pPrimes[newEntering].ToArray();
                        rhsOverPPrime[i] = xPrime.ToArray()[i, 0] / pPrime[i, 0];

                        if (rhsOverPPrime[i] < rhsOverPPrime[exitingRow] && rhsOverPPrime[i] > 0 )
                        {
                            exitingRow = i;
                        }
                    }

                    //translates from the index in the basics list to the actual row
                    exitingRow = basics[exitingRow];

                    //make sure you're not being stupid here!!!!
                    int tempIndex = basics.IndexOf(exitingRow);
                    basics.Remove(exitingRow);

                    basics.Insert(tempIndex, newEntering);

                    b.SetColumn(basics.IndexOf(newEntering), coefficients.Column(newEntering));
                }
            }
        }
示例#8
0
文件: SOFNN.cs 项目: ifzz/QuantSys
        /*
         * Layer 4: weighted Layer.
         * 
        */

        public DenseMatrix CalculateGreatPsi(DenseMatrix X, DenseMatrix Psi)
        {
            int N = Psi.RowCount;
            int U = Psi.ColumnCount;
            int R = X.RowCount; //the number of inputs per observation

            var GreatPsi = new DenseMatrix(U*(R + 1), N);

            //foreach observation
            for (int i = 0; i < N; i++)
            {
                var x = new DenseVector(R);
                X.Column(i, x);

                var GreatPsiCol = new DenseVector(U*(R + 1));

                //foreach neuron
                for (int j = 0; j < U; j++)
                {
                    var temp = new DenseVector(x.Count + 1, 1);
                    temp.SetSubVector(1, x.Count, x);
                    GreatPsiCol.SetSubVector(j*(temp.Count), temp.Count, Psi[i, j]*temp);
                }

                GreatPsi.SetColumn(i, GreatPsiCol);
            }

            return GreatPsi;
        }
示例#9
0
文件: SOFNN.cs 项目: ifzz/QuantSys
        /*
         * Layer 3: Normalized Layer.
         * The number of neurons in this layer is equal to that of layer 2.
         * Psi_j = phi_j / sum(phi from k = 1 to u), for j = 1 to u
         * 
        */

        public DenseMatrix CalculatePsi(DenseMatrix X, DenseMatrix c, DenseMatrix sigma)
        {
            int U = c.ColumnCount; //the number of neurons in the structure
            int R = X.RowCount; //the number of inputs per observation
            int N = X.ColumnCount; //the number of observations

            var Psi = new DenseMatrix(N, U);

            for (int i = 0; i < N; i++)
            {
                DenseVector Phi;
                double SumPhi;

                var x = new DenseVector(R);
                X.Column(i, x);

                CalculatePhi(x, c, sigma, out Phi, out SumPhi);

                //for each neuron
                for (int j = 0; j < U; j++)
                {
                    //Psi - In a row you go through the neurons and in a column you go through number of
                    //observations **** Psi(#obs,IndexNeuron) ****
                    Psi[i, j] = (Phi[j]/SumPhi);
                }
            }

            return Psi;
        }
示例#10
0
文件: SOFNN.cs 项目: ifzz/QuantSys
        public void Train(DenseMatrix X, DenseVector d, DenseVector Kd)
        {
            int R = X.RowCount;
            int N = X.ColumnCount;
            int U = 0; //the number of neurons in the structure


            var c = new DenseMatrix(R, 1);
            var sigma = new DenseMatrix(R, 1);

            var Q = new DenseMatrix((R + 1), (R + 1));
            var O = new DenseMatrix(1, (R + 1));
            var pT_n = new DenseMatrix((R + 1), 1);

            double maxPhi = 0;
            int maxIndex;

            var Psi = new DenseMatrix(N, 1);

            Console.WriteLine("Running...");
            //for each observation n in X
            for (int i = 0; i < N; i++)
            {
                Console.WriteLine(100*(i/(double) N) + "%");

                var x = new DenseVector(R);
                X.Column(i, x);

                //if there are neurons in structure,
                //update structure recursively.
                if (U == 0)
                {
                    c = (DenseMatrix) x.ToColumnMatrix();
                    sigma = new DenseMatrix(R, 1, SigmaZero);
                    U = 1;
                    Psi = CalculatePsi(X, c, sigma);
                    UpdateStructure(X, Psi, d, ref Q, ref O);
                    pT_n =
                        (DenseMatrix)
                            (CalculateGreatPsi((DenseMatrix) x.ToColumnMatrix(), (DenseMatrix) Psi.Row(i).ToRowMatrix()))
                                .Transpose();
                }
                else
                {
                    StructureRecurse(X, Psi, d, i, ref Q, ref O, ref pT_n);
                }


                bool KeepSpinning = true;
                while (KeepSpinning)
                {
                    //Calculate the error and if-part criteria
                    double ee = pT_n.Multiply(O)[0, 0];

                    double approximationError = Math.Abs(d[i] - ee);

                    DenseVector Phi;
                    double SumPhi;
                    CalculatePhi(x, c, sigma, out Phi, out SumPhi);

                    maxPhi = Phi.Maximum();
                    maxIndex = Phi.MaximumIndex();

                    if (approximationError > delta)
                    {
                        if (maxPhi < threshold)
                        {
                            var tempSigma = new DenseVector(R);
                            sigma.Column(maxIndex, tempSigma);

                            double minSigma = tempSigma.Minimum();
                            int minIndex = tempSigma.MinimumIndex();
                            sigma[minIndex, maxIndex] = k_sigma*minSigma;
                            Psi = CalculatePsi(X, c, sigma);
                            UpdateStructure(X, Psi, d, ref Q, ref O);
                            var psi = new DenseVector(Psi.ColumnCount);
                            Psi.Row(i, psi);

                            pT_n =
                                (DenseMatrix)
                                    CalculateGreatPsi((DenseMatrix) x.ToColumnMatrix(), (DenseMatrix) psi.ToRowMatrix())
                                        .Transpose();
                        }
                        else
                        {
                            //add a new neuron and update strucutre

                            double distance = 0;
                            var cTemp = new DenseVector(R);
                            var sigmaTemp = new DenseVector(R);

                            //foreach input variable
                            for (int j = 0; j < R; j++)
                            {
                                distance = Math.Abs(x[j] - c[j, 0]);
                                int distanceIndex = 0;

                                //foreach neuron past 1
                                for (int k = 1; k < U; k++)
                                {
                                    if ((Math.Abs(x[j] - c[j, k])) < distance)
                                    {
                                        distanceIndex = k;
                                        distance = Math.Abs(x[j] - c[j, k]);
                                    }
                                }

                                if (distance < Kd[j])
                                {
                                    cTemp[j] = c[j, distanceIndex];
                                    sigmaTemp[j] = sigma[j, distanceIndex];
                                }
                                else
                                {
                                    cTemp[j] = x[j];
                                    sigmaTemp[j] = distance;
                                }
                            }
                            //end foreach

                            c = (DenseMatrix) c.InsertColumn(c.ColumnCount - 1, cTemp);
                            sigma = (DenseMatrix) sigma.InsertColumn(sigma.ColumnCount - 1, sigmaTemp);
                            Psi = CalculatePsi(X, c, sigma);
                            UpdateStructure(X, Psi, d, ref Q, ref O);
                            U++;
                            KeepSpinning = false;
                        }
                    }
                    else
                    {
                        if (maxPhi < threshold)
                        {
                            var tempSigma = new DenseVector(R);
                            sigma.Column(maxIndex, tempSigma);

                            double minSigma = tempSigma.Minimum();
                            int minIndex = tempSigma.MinimumIndex();
                            sigma[minIndex, maxIndex] = k_sigma*minSigma;
                            Psi = CalculatePsi(X, c, sigma);
                            UpdateStructure(X, Psi, d, ref Q, ref O);
                            var psi = new DenseVector(Psi.ColumnCount);
                            Psi.Row(i, psi);

                            pT_n =
                                (DenseMatrix)
                                    CalculateGreatPsi((DenseMatrix) x.ToColumnMatrix(), (DenseMatrix) psi.ToRowMatrix())
                                        .Transpose();
                        }
                        else
                        {
                            KeepSpinning = false;
                        }
                    }
                }
            }

            out_C = c;
            out_O = O;
            out_Sigma = sigma;

            Console.WriteLine("Done.");
        }
示例#11
0
文件: SOFNN.cs 项目: ifzz/QuantSys
        public void StructureRecurse(DenseMatrix X, DenseMatrix Psi, DenseVector d, int n, ref DenseMatrix Q,
            ref DenseMatrix O, ref DenseMatrix pT_n)
        {
            //O = O(t-1) O_enxt = O(t)
            //o should be a column vector ( in matrix form)
            var x = new DenseVector(X.RowCount);
            var psi = new DenseVector(Psi.ColumnCount);

            X.Column(n, x);
            Psi.Row(n, psi);

            DenseMatrix p_n = CalculateGreatPsi((DenseMatrix) x.ToColumnMatrix(), (DenseMatrix) psi.ToRowMatrix());

            pT_n = (DenseMatrix) p_n.Transpose();

            double ee = Math.Abs(d[n] - (pT_n.Multiply(O))[0, 0]);
            double temp = 1 + (pT_n.Multiply(Q)).Multiply(p_n)[0, 0];
            double ae = Math.Abs(ee/temp);

            if (ee >= ae)
            {
                var L = (DenseMatrix) Q.Multiply(p_n).Multiply(1/temp);
                Q = (DenseMatrix) ((DenseMatrix.Identity(Q.RowCount).Subtract(L.Multiply(pT_n))).Multiply(Q));
                O = (DenseMatrix) O.Add(L*ee);
            }
            else
            {
                Q = (DenseMatrix) DenseMatrix.Identity(Q.RowCount).Multiply(Q);
            }
        }
示例#12
0
文件: GomoriMethod.cs 项目: Kant8/IOp
 private DenseMatrix GetBaseMatrix(DenseMatrix matrix, List<int> baseJ)
 {
     var resM = new DenseMatrix(baseJ.Count);
     int i = 0;
     foreach (var j in baseJ)
     {
         resM.SetColumn(i, matrix.Column(j));
         i++;
     }
     return resM;
 }
示例#13
0
        public static void WriteDataToJson(string[] symbols, DateTime[] dateTimes, DenseMatrix data, string filename)
        {
            StringBuilder jsonStringBuilder = new StringBuilder();

            jsonStringBuilder.Append("[");
            for (int i = 0; i < data.ColumnCount; i++)
            {
                jsonStringBuilder.Append("[");
                jsonStringBuilder.Append(dateTimes[i].Subtract(new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc)).TotalMilliseconds.ToString());
                foreach (double d in data.Column(i))
                {
                    jsonStringBuilder.Append(",");
                    jsonStringBuilder.Append((d.Equals(double.NaN) ? "null" : Math.Round(d, 8).ToString()));
                }
                jsonStringBuilder.Append("]");
                if (i != data.ColumnCount - 1) jsonStringBuilder.Append(",\n");
            }
            jsonStringBuilder.Append("]");

            using (var file = new StreamWriter(QSConstants.DEFAULT_DATA_FILEPATH + @filename))
            {
                file.WriteLine(jsonStringBuilder.ToString());
            }

        }
示例#14
0
        public static DualSimplexMethodResult Solve(double[][] array, double[] cVector, double[] bVector, double[] yVector,
            int[] basisIndexes)
        {
            DenseVector vectorC = DenseVector.OfArray(cVector);
            DenseVector vectorB = DenseVector.OfArray(bVector);

            List<int> jBasisIndexes = new List<int>();

            basisIndexes.ForEach(i => jBasisIndexes.Add(i - 1));

            DenseMatrix matrix = new DenseMatrix(1).FromSimpleArray(array);
            List<int> jH = new List<int>();
            DenseVector cBasisVector = DenseVector.OfEnumerable(jBasisIndexes.Select(cB => cVector[cB]).ToList());

            Vector<double> vectorY = cBasisVector * DenseMatrix.OfColumnVectors(jBasisIndexes.Select(matrix.Column).ToList()).Inverse();
            int iterationNumber = 0;
            while (true)
            {
                iterationNumber++;
                jH.Clear();
                yVector.ForEach(delegate(double d, int i)
                {
                    if (!jBasisIndexes.Contains(i)) jH.Add(i);
                });

                DenseMatrix basisMatrix = DenseMatrix.OfColumnVectors(jBasisIndexes.Select(matrix.Column).ToList());
                double basisMatrixDeterminantAbs = Math.Abs(basisMatrix.Determinant());

                if (!(ZeroComparer.IsBiggerThanZero(basisMatrixDeterminantAbs)))
                {
                    return new DualSimplexMethodResult(null, null, SimplexMethodResultType.ZeroDeterminant);
                }

                Matrix<double> matrixB = basisMatrix.Inverse();

                // step 1
                Vector<double> xiVector = matrixB * vectorB;
                List<double> xi = xiVector.ToList();

                if (xi.All(d => !(d < 0)))
                {
                    double[] xiResult = new double[vectorC.Count];
                    for (var i = 0; i < xi.Count; i++)
                    {
                        xiResult[jBasisIndexes[i]] = xi[i];
                    }

                    return new DualSimplexMethodResult(xiResult, vectorY.ToArray(), SimplexMethodResultType.Optimal);
                }
                // step 2
                double xiJs = xi.First(d => d < 0);
                int js = jBasisIndexes[xi.IndexOf(xiJs)];
                int indexS = jBasisIndexes.IndexOf(js);

                // step 3
                DenseVector eVector = new DenseVector(vectorB.Count);
                eVector[indexS] = 1;

                var deltaY = eVector * matrixB;

                double[] my = jH.Select(j => deltaY * matrix.Column(j)).ToArray();

                if (my.All(zI => zI >= 0))
                {
                    return new DualSimplexMethodResult(null, null, SimplexMethodResultType.NoSolutions);
                }

                //step 4
                double ro = jH.Where(jElem => my[jH.IndexOf(jElem)] < 0).Select(jElem => (vectorC[jElem] - matrix.Column(jElem) * vectorY) / my[jH.ToList().IndexOf(jElem)]).Min();
                int j0 = jH[jH.Select(jElem => (vectorC[jElem] - matrix.Column(jElem) * vectorY) / my[jH.IndexOf(jElem)]).ToList().FindIndex(x => Math.Abs(x - ro) < 0.000001)];

                //tep 5
                vectorY = vectorY + ro * deltaY;

                jBasisIndexes.Remove(js);
                jBasisIndexes.Add(j0);
            }
        }
示例#15
0
        private void ReduceCosts()
        {
            ReducedCosts = (DenseMatrix)Costs.Clone();

            for (int i = 0; i < N; i++)
            {
                var min = ReducedCosts.Row(i).Minimum();
                for (int j = 0; j < N; j++)
                {
                    ReducedCosts[i, j] -= min;
                }
            }
            //todo: maybe round result
            for (int j = 0; j < N; j++)
            {
                var min = ReducedCosts.Column(j).Minimum();
                for (int i = 0; i < N; i++)
                {
                    ReducedCosts[i, j] -= min;
                }
            }
            //todo: maybe round result
        }