示例#1
0
    /// <summary>
    /// Warp a matrix M as R * M * R^T
    /// </summary>
    public static MatrixXD WarpMatrix(Quaternion R, MatrixXD M)
    {
        MatrixXD mout = new DenseMatrixXD(3, 3);

        mout.SetRow(0, ToVectorXD(R * ToVector3(M.Row(0))));
        mout.SetRow(1, ToVectorXD(R * ToVector3(M.Row(1))));
        mout.SetRow(2, ToVectorXD(R * ToVector3(M.Row(2))));
        mout.SetColumn(0, ToVectorXD(R * ToVector3(mout.Column(0))));
        mout.SetColumn(1, ToVectorXD(R * ToVector3(mout.Column(1))));
        mout.SetColumn(2, ToVectorXD(R * ToVector3(mout.Column(2))));
        return(mout);
    }
示例#2
0
 private void PrepareMatrices(ArrayList kinectCoors, ArrayList projectorCoors)
 {
     foundCoordinatesMatrix = new DenseMatrix(projectorCoors.Count * 2, 11);
     rightSideMatrix = new DenseMatrix(projectorCoors.Count * 2, 1);
     for (int i = 0; i < projectorCoors.Count; i = i + 2)
     {
         Point3D kc = (Point3D) kinectCoors[i / 2];
         Point2D projC = (Point2D) projectorCoors[i / 2];
         double[] valueArray = new double[] {kc.X, kc.Y, kc.Z, 1, 0, 0, 0, 0, -projC.X * kc.X, -projC.X * kc.Y, -projC.X * kc.Z};
         Vector<double> values = Vector<double>.Build.Dense(valueArray);
         foundCoordinatesMatrix.SetRow(i, values);
         Vector<double> rightSide = Vector<double>.Build.Dense(1, projC.X);
         rightSideMatrix.SetRow(i, rightSide);
         valueArray = new double[] {0, 0, 0, 0, kc.X, kc.Y, kc.Z, 1, -projC.Y * kc.X, -projC.Y * kc.Y, -projC.Y * kc.Z};
         values = Vector<double>.Build.Dense(valueArray);
         foundCoordinatesMatrix.SetRow(i + 1, values);
         rightSide = Vector<double>.Build.Dense(1, projC.Y);
         rightSideMatrix.SetRow(i + 1, rightSide);
     }
 }
        public void build_prob_map()
        {
            Normal N_x = new Normal(X / 2, STD_X);
            Normal N_y = new Normal(Y / 2, STD_Y);

            DenseMatrix M_x = new DenseMatrix(Y, X, 0.0);
            DenseMatrix M_y = new DenseMatrix(Y, X, 0.0);

            DenseVector V_x = new DenseVector(X);
            for (int i = 0; i < X; i++)
            {
                V_x[i] = N_x.Density(i);
            }

            for (int j = 0; j < Y; j++)
            {
                M_x.SetRow(j, V_x);
            }

            DenseVector V_y = new DenseVector(Y);
            for (int i = 0; i < Y; i++)
            {
                V_y[i] = N_y.Density(i);
            }

            for (int j = 0; j < X; j++)
            {
                M_y.SetColumn(j, V_y);
            }

            DenseMatrix MULT = (DenseMatrix)M_x.PointwiseMultiply(M_y);
            double s = MULT.Data.Sum();
            MULT = (DenseMatrix)MULT.PointwiseDivide(new DenseMatrix(Y, X, s));
            //this.dataGridView1.DataSource = MULT;
            //Console.WriteLine(MULT.Data.Sum());
            PROB_MAP_ORIG = MULT;

            s = MULT[Y / 2, X / 2];
            MULT = (DenseMatrix)MULT.PointwiseDivide(new DenseMatrix(Y, X, s));

            /*
            for (int i = 0; i < Y; i++)
            {
                Console.Write(i + " - ");
                for (int j = 0; j < X; j++)
                {
                    Console.Write(MULT[i, j] + " ");
                }
                Console.WriteLine();
                Console.WriteLine();
            }
            */
            PROB_MAP = MULT;
        }
示例#4
0
        public DenseMatrix NormalizeData(DenseMatrix data)
        {
            var normalizedData = new DenseMatrix(data.RowCount, data.ColumnCount);

            for (int i = 0; i < data.RowCount; i++)
            {
                normalizedData.SetRow(i, normalizeArrayInput[i].Process(data.Row(i).ToArray()));
            }

            return normalizedData;
        }
示例#5
0
		public void Smooth(ref double[,] inputValues)
		{
			// TODO: Using the matrix works, but does a lot of data accesses. Can improve by working out all the data access myself? I might be able to cut down on number of data accesses, but not sure.
		    var inputMatrix = new DenseMatrix(inputValues.GetLength(0), inputValues.GetLength(1));

			for (int i = 0; i < inputMatrix.RowCount; i++)
			{
				inputMatrix.SetRow(i, Smooth(inputMatrix.Row(i).ToArray()));
			}

			for (int i = 0; i < inputMatrix.ColumnCount; i++)
			{
				inputMatrix.SetColumn(i, Smooth(inputMatrix.Column(i).ToArray()));
			}

			inputValues = inputMatrix.ToArray();
		}
示例#6
0
        public static void Process(FXSession session, string symbol1, string symbol2, string timeframe, int length)
        {


            HistoricPriceEngine h1 = new HistoricPriceEngine(session);
            h1.GetLongHistoricPrices(symbol1, timeframe, length);

            while (!h1.Complete)
            {
                Thread.Sleep(100);
            }

            HistoricPriceEngine h2 = new HistoricPriceEngine(session);
            h2.GetLongHistoricPrices(symbol2, timeframe, length);

            while (!h2.Complete)
            {
                Thread.Sleep(100);
            }
            //-----------------------

            var dateTimeList = new SortedList<DateTime, int>();

            Quantum q1 = h1.Data;
            Quantum q2 = h2.Data;


            var priceData = new DenseMatrix(2, q1.Data.Count);

            for (int j = 0; j < ((q1.Data.Count <= q2.Data.Count)?q1.Data.Count:q2.Data.Count); j++ )
            {
                dateTimeList.Add(q1.Data.Values[j].Time, 1);
                priceData[0, j] = q1.Data.Values[j].BidClose;
                priceData[1, j] = q2.Data.Values[j].BidClose;
            }

            Vector<double> price1 = priceData.Row(0);
            Vector<double> price2 = priceData.Row(1);
            //Statistics.ApplyFunction((DenseVector)price1, Math.Log);
            //Statistics.ApplyFunction((DenseVector)price2, Math.Log);

            DenseVector norm1 = price1.ToArray().NormalizeZScore();
            DenseVector norm2 = price2.ToArray().NormalizeZScore();

            var newsym = new string[] {symbol1, symbol2, "spread"};

            var m = new DenseMatrix(6, norm1.Count);
            m.SetRow(0, norm1);
            m.SetRow(1, norm2);
            m.SetRow(2, (norm1 - norm2).ToArray().NormalizeZScore());

            string filename = symbol1.Replace('/', '_') + "-" + symbol2.Replace('/', '_') + ".html";


            Visualize.GenerateMultiPaneGraph(newsym, dateTimeList.Keys.ToArray(), m, QSConstants.DEFAULT_DATA_FILEPATH + filename,
                new ChartOption[]{new ChartOption(), new ChartOption(){Layover = true, YPosition = 0}, new ChartOption(){YPosition = 1} }, null, filename + ".json");

            FileUpload.UploadFileToFTP(QSConstants.DEFAULT_DATA_FILEPATH + filename, filename);
            FileUpload.UploadFileToFTP(QSConstants.DEFAULT_DATA_FILEPATH + filename + ".json", filename + ".json");

            double Spread = m[2, m.ColumnCount - 1];

            if (Spread > 2.0 && m[2, m.ColumnCount - 2] <= 2.0)
                Emailer.SendEmail(symbol1 + "-" + symbol2 + " Spread Above 2.0", "Test");

            if (Spread < -2.0 && m[2, m.ColumnCount - 2] >= -2.0)
                Emailer.SendEmail(symbol1 + "-" + symbol2 + " Spread Below -2.0", "Test");

        }
示例#7
0
        public DenseMatrix createMatrix(Model model)
        {
            int numConstraints = model.Constraints.Count;
            int numDecisionVars = model.Goal.Coefficients.Length;
            int varCounter = numDecisionVars;
            //  matrix(rows, columns)
            DenseMatrix coefficients = new DenseMatrix(numConstraints, numDecisionVars);
            DenseMatrix artificialVars = new DenseMatrix(numConstraints, 1);
            var constraintCounter = 0;
            this.rhsValues = new DenseVector(numConstraints);
            this.basics = new List<int>();
            this.artificialRows = new List<int>();
            foreach (var constraint in model.Constraints) {
                rhsValues[constraintCounter] = constraint.Value;

                // if the constraint RHS is negative, invert the coefficients and flip the inequality sign
                if (constraint.Value < 0)
                {
                    for (int i = 0; i< model.Goal.Coefficients.Length; i++) {
                        model.Goal.Coefficients[i] = model.Goal.Coefficients[i] * -1;
                    }
                    if (constraint.Relationship == Relationship.LessThanOrEquals)
                    {
                        constraint.Relationship = Relationship.GreaterThanOrEquals;
                    }
                    else if (constraint.Relationship == Relationship.GreaterThanOrEquals)
                    {
                        constraint.Relationship = Relationship.LessThanOrEquals;
                    }
                    // also flip the rhs value which we already put in the array for the simplex setup
                    rhsValues[constraintCounter] = rhsValues[constraintCounter] * -1;
                }

                coefficients.SetRow(constraintCounter, 0, constraint.Coefficients.Length, new DenseVector(constraint.Coefficients));
                // if it's a less than, add a slack column to the coefs matrix
                if (constraint.Relationship == Relationship.LessThanOrEquals)
                {
                    DenseVector slack = DenseVector.Create(model.Constraints.Count, delegate(int s) { return 0; });
                    slack.At(constraintCounter, 1);
                    coefficients = (DenseMatrix)coefficients.Append(slack.ToColumnMatrix());

                    this.basics.Add(varCounter);
                }
                else
                {
                    // Need to add an artificial variable for >= and = constraints

                    DenseVector surplus = DenseVector.Create(model.Constraints.Count, delegate(int s) { return 0; });
                    surplus.At(constraintCounter, -1);
                    coefficients = (DenseMatrix)coefficients.Append(surplus.ToColumnMatrix());

                    DenseVector artificial = DenseVector.Create(model.Constraints.Count, delegate(int s) { return 0; });
                    artificial.At(constraintCounter, 1);
                    artificialVars = (DenseMatrix)artificialVars.Append(artificial.ToColumnMatrix());

                    // Keeps track of the rows with artificial variable, for setting w
                    artificialRows.Add(constraintCounter);
                }
                varCounter++;
                constraintCounter++;
            }

            // put the constraints and stuff into the matrix
            if (artificialVars.ColumnCount > 1)
            {
                artificialVars = (DenseMatrix)artificialVars.SubMatrix(0, artificialVars.RowCount, 1, artificialVars.ColumnCount - 1);

                for (int i = coefficients.ColumnCount; i < coefficients.ColumnCount + artificialVars.ColumnCount; i++)
                {
                    this.basics.Add(i);
                }

                coefficients = (DenseMatrix)coefficients.Append(artificialVars);

                numArtificial = artificialVars.ColumnCount;
            }
            else
            {
                numArtificial = 0;
            }

            return coefficients;
        }
        /// <summary>
        /// Run example
        /// </summary>
        public void Run()
        {
            // Format matrix output to console
            var formatProvider = (CultureInfo)CultureInfo.InvariantCulture.Clone();
            formatProvider.TextInfo.ListSeparator = " ";
            
            // Create square matrix
            var matrix = new DenseMatrix(5);
            var k = 0;
            for (var i = 0; i < matrix.RowCount; i++)
            {
                for (var j = 0; j < matrix.ColumnCount; j++)
                {
                    matrix[i, j] = k++;
                }
            }

            Console.WriteLine(@"Initial matrix");
            Console.WriteLine(matrix.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // Create vector
            var vector = new DenseVector(new[] { 50.0, 51.0, 52.0, 53.0, 54.0 });
            Console.WriteLine(@"Sample vector");
            Console.WriteLine(vector.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 1. Insert new column
            var result = matrix.InsertColumn(3, vector);
            Console.WriteLine(@"1. Insert new column");
            Console.WriteLine(result.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 2. Insert new row
            result = matrix.InsertRow(3, vector);
            Console.WriteLine(@"2. Insert new row");
            Console.WriteLine(result.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 3. Set column values
            matrix.SetColumn(2, (Vector)vector);
            Console.WriteLine(@"3. Set column values");
            Console.WriteLine(matrix.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 4. Set row values. 
            matrix.SetRow(3, (double[])vector);
            Console.WriteLine(@"4. Set row values");
            Console.WriteLine(matrix.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 5. Set diagonal values. SetRow/SetColumn/SetDiagonal accepts Vector and double[] as input parameter
            matrix.SetDiagonal(new[] { 5.0, 4.0, 3.0, 2.0, 1.0 });
            Console.WriteLine(@"5. Set diagonal values");
            Console.WriteLine(matrix.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 6. Set submatrix values
            matrix.SetSubMatrix(1, 3, 1, 3, DenseMatrix.Identity(3));
            Console.WriteLine(@"6. Set submatrix values");
            Console.WriteLine(matrix.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // Permutations. 
            // Initialize a new instance of the Permutation class. An array represents where each integer is permuted too: 
            // indices[i] represents that integer "i" is permuted to location indices[i]
            var permutations = new Permutation(new[] { 0, 1, 3, 2, 4 });
            
            // 7. Permute rows 3 and 4
            matrix.PermuteRows(permutations);
            Console.WriteLine(@"7. Permute rows 3 and 4");
            Console.WriteLine(matrix.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 8. Permute columns 1 and 2, 3 and 5
            permutations = new Permutation(new[] { 1, 0, 4, 3, 2 });
            matrix.PermuteColumns(permutations);
            Console.WriteLine(@"8. Permute columns 1 and 2, 3 and 5");
            Console.WriteLine(matrix.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 9. Concatenate the matrix with the given matrix
            var append = matrix.Append(matrix);

            // Concatenate into result matrix
            matrix.Append(matrix, append);
            Console.WriteLine(@"9. Append matrix to matrix");
            Console.WriteLine(append.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

             // 10. Stack the matrix on top of the given matrix matrix
            var stack = matrix.Stack(matrix);

            // Stack into result matrix
            matrix.Stack(matrix, stack);
            Console.WriteLine(@"10. Stack the matrix on top of the given matrix matrix");
            Console.WriteLine(stack.ToString("#0.00\t", formatProvider));
            Console.WriteLine();

            // 11. Diagonally stack the matrix on top of the given matrix matrix
            var diagoinalStack = matrix.DiagonalStack(matrix);

            // Diagonally stack into result matrix
            matrix.DiagonalStack(matrix, diagoinalStack);
            Console.WriteLine(@"11. Diagonally stack the matrix on top of the given matrix matrix");
            Console.WriteLine(diagoinalStack.ToString("#0.00\t", formatProvider));
            Console.WriteLine();
        }
示例#9
0
 /// <summary>
 /// adds new row to matrix
 /// </summary>
 /// <param name="dest">matrix which add row</param>
 /// <param name="rowToAdd">row added to matrix</param>
 /// <returns>new matrix</returns>
 private static Matrix AddRow(Matrix dest, Vector rowToAdd)
 {
     Matrix res = new DenseMatrix(dest.RowCount + 1, dest.ColumnCount);
     res.SetSubMatrix(0, dest.RowCount, 0, dest.ColumnCount, dest);
     res.SetRow(res.RowCount - 1, rowToAdd);
     return res;
 }
示例#10
0
        public void Predict(DenseMatrix newPredictData, double[] newPredictPrices)
        {
            double error = 0;
            int c = 0;

            var newNormalizedPredictData = new DenseMatrix(newPredictData.RowCount, newPredictData.ColumnCount,
                double.NaN);

            for (int i = 0; i < newPredictData.RowCount; i++)
            {
                newNormalizedPredictData.SetRow(i, normalizeArrayInput[i].Process(newPredictData.Row(i).ToArray()));
            }

            double[] normalizedPrices = normalizeArrayOutput.Process(newPredictPrices);

            var d = new DenseMatrix(2, normalizedPrices.Length + 1, double.NaN);
            int count = 0;
            for (int i = 0; i < normalizedPrices.Length; i++)
            {
                // calculate based on actual data
                IMLData input = new BasicMLData(inputs);
                for (int j = 0; j < input.Count; j++)
                {
                    input.Data[j] = newNormalizedPredictData[j, i];
                }

                IMLData output = network.Compute(input);
                double prediction = output.Data[0];


                error +=
                    Math.Pow(
                        (normalizeArrayOutput.Stats.DeNormalize(prediction) - newPredictPrices[i])/newPredictPrices[i],
                        2);
                c++;
                d[0, count] = newPredictPrices[i];
                d[1, count] = normalizeArrayOutput.Stats.DeNormalize(prediction);
                count++;
            }

            /////////////////////////////////////////////////////////////////

            IMLData input1 = new BasicMLData(inputs);
            for (int j = 0; j < input1.Count; j++)
            {
                input1.Data[j] = newNormalizedPredictData[j, newNormalizedPredictData.ColumnCount - 1];
            }

            IMLData output1 = network.Compute(input1);
            d[1, count] = normalizeArrayOutput.Stats.DeNormalize(output1.Data[0]);


            /////////////////////////////////////////////////////////////////


            error /= c;
            error = Math.Pow(error, .5);
            Console.WriteLine(error);

            string[] symbols = {"actual", "predicted"};
            Visualize.GeneratePredictionGraph(symbols, d, new DateTime(), new TimeSpan(24, 0, 0),
                "C:\\Sangar\\resultfinal.html");

            outputCorre =
                StatisticsExtension.Correlation(d.Row(0).ToArray().Take(d.ColumnCount - 1).ToArray().RawRateOfReturn(),
                    d.Row(1).ToArray().Take(d.ColumnCount - 1).ToArray().RawRateOfReturn());

            Console.WriteLine("ST2 Correlation: " + outputCorre);

            outputRMSE = error;

            Console.WriteLine("Predicted return for D+1:" +
                              (d[1, d.ColumnCount - 1] - d[1, d.ColumnCount - 2])/d[1, d.ColumnCount - 2]*100 +
                              " percent");
        }
示例#11
0
        public void Execute()
        {
            var nnSet = new Stage1NeuralNetwork[vectors.Length];

            int trainLength = 2000;
            int validationLength = 500;
            int predictLength = 500;
            int useLength = 3000;

            var totalData = new DenseMatrix(vectors.Length, useLength);
            var outputData = new DenseMatrix(vectors.Length, validationLength - window);

            /////////////////populate the actual price data we want to predict
            var pricingData = new double[useLength];

            for (int i = 2; i < 2 + useLength; i++)
            {
                pricingData[i - 2] = (double) data[useLength + 3 - i, 5];
            }


            double[] returnpricingData = pricingData.RawRateOfReturn();
            for (int i = 0; i < returnpricingData.Length; i++) pricingData[i + 1] = returnpricingData[i];
            pricingData[0] = 0;


            ////////////////////////training and validation////////////////////////
            for (int i = 2; i < 2 + useLength; i++)
            {
                for (int j = 0; j < vectors.Length; j++)
                {
                    totalData[j, i - 2] = (double) data[useLength + 3 - i, vectors[j]];
                }
            }


            for (int j = 0; j < vectors.Length; j++)
            {
                double[] train = totalData.Row(j).ToArray().Take(trainLength).ToArray();
                double[] validate =
                    totalData.Row(j).ToArray().Skip(trainLength).ToArray().Take(validationLength).ToArray();
                nnSet[j] = new Stage1NeuralNetwork(window, cycles, train, validate);
                nnSet[j].Execute(j);
                outputData.SetRow(j, nnSet[j].OutputData);
            }

            var s1 = new Stage2NeuralNetwork(vectors.Length, cycles, outputData,
                pricingData.Skip(trainLength).ToArray().Take(validationLength).ToArray().Skip(window).ToArray());
            s1.Execute();

            //////////////////////////////////////////////////////////////////////////
            //////////////////////////////////prediction/////////////////////////////
            var predictedData = new DenseMatrix(vectors.Length, predictLength - window + 1);

            var lastPredData = new double[vectors.Length];

            for (int j = 0; j < vectors.Length; j++)
            {
                double[] predictData =
                    totalData.Row(j)
                        .ToArray()
                        .Skip(trainLength + validationLength)
                        .ToArray()
                        .Take(predictLength)
                        .ToArray();
                nnSet[j].Predict(predictData);
                predictedData.SetRow(j, nnSet[j].OutputData);
                lastPredData[j] = nnSet[j].NextPrediction;
            }

            s1.Predict(predictedData,
                pricingData.ToArray()
                    .Skip(trainLength + validationLength)
                    .ToArray()
                    .Take(predictLength)
                    .ToArray()
                    .Skip(window)
                    .ToArray());

            correlation = s1.outputCorre;
            RMSE = s1.outputRMSE;
        }
示例#12
0
        private void solveActuator2CartesianDisp(double[] adisp)
        {
            bool check = false;
            DenseVector cartDisp = new DenseVector(6);
            DenseVector newAct = new DenseVector(adisp);
            DenseVector actError = (DenseVector)newAct.Subtract(actuatorDisp);
            cartesianDisp.CopyTo(cartDisp);
            int iterations = 0;

            while (check == false)
            {
                List2String l2s = new List2String();

                DenseMatrix JacobianMatrix = new DenseMatrix(6, 6);

                for (int i = 0; i < 6; i++)
                {
                    DenseVector DL_Dd = actuators[i].calcNewDiffs(cartDisp.Values);
                    JacobianMatrix.SetRow(i, DL_Dd);
                }
                DenseVector diffCart = (DenseVector)JacobianMatrix.LU().Solve(actError);
                log.Debug("Cartesian differences " + l2s.ToString(diffCart.Values));
                cartDisp = (DenseVector)cartDisp.Add(diffCart);
                setCartesianDisp(cartDisp.Values);
                log.Debug("New cartesian estimate " + this);
                actError = (DenseVector)newAct.Subtract(actuatorDisp);
                log.Debug("Actuator error " + l2s.ToString(actError.Values));

                check = withinErrorWindow(actError);
                if (iterations > 20)
                {
                    check = true;
                    log.Error("Calculations for " + label + " won't converge with " + this);
                }
                iterations++;
            }
        }
示例#13
0
        /// <summary>
        /// Solve the ridge equation by the method of normal equations.
        /// </summary>
        /// <param name="x">[n_samples, n_features]
        /// Training data</param>
        /// <param name="y">[n_samples, n_targets]
        /// Target values</param>
        /// <param name="alpha"></param>
        /// <param name="sampleWeight">Individual weights for each sample.</param>
        /// <param name="solver">Solver to use in the computational routines.</param>
        /// <param name="maxIter">Maximum number of iterations for least squares solver. </param>
        /// <param name="tol">Precision of the solution.</param>
        /// <returns>[n_targets, n_features]
        /// Weight vector(s)</returns>
        /// <remarks>
        /// This function won't compute the intercept;
        /// </remarks>
        public static Matrix<double> RidgeRegression(
            Matrix<double> x,
            Matrix<double> y,
            double alpha,
            Vector<double> sampleWeight = null,
            RidgeSolver solver = RidgeSolver.Auto,
            int? maxIter = null,
            double tol = 1E-3)
        {
            int nSamples = x.RowCount;
            int nFeatures = x.ColumnCount;

            if (solver == RidgeSolver.Auto)
            {
                // cholesky if it's a dense array and lsqr in
                // any other case
                if (x is DenseMatrix)
                {
                    solver = RidgeSolver.DenseCholesky;
                }
                else
                {
                    solver = RidgeSolver.Lsqr;
                }
            }

            if (sampleWeight != null)
            {
                solver = RidgeSolver.DenseCholesky;
            }

            if (solver == RidgeSolver.Lsqr)
            {
                // According to the lsqr documentation, alpha = damp^2.
                double sqrtAlpha = Math.Sqrt(alpha);
                Matrix coefs = new DenseMatrix(y.ColumnCount, x.ColumnCount);
                foreach (var column in y.ColumnEnumerator())
                {
                    Vector<double> c = Lsqr.lsqr(
                        x,
                        column.Item2,
                        damp: sqrtAlpha,
                        atol: tol,
                        btol: tol,
                        iterLim: maxIter).X;

                    coefs.SetRow(column.Item1, c);
                }

                return coefs;
            }

            if (solver == RidgeSolver.DenseCholesky)
            {
                //# normal equations (cholesky) method
                if (nFeatures > nSamples || sampleWeight != null)
                {
                    // kernel ridge
                    // w = X.T * inv(X X^t + alpha*Id) y
                    var k = x.TransposeAndMultiply(x);
                    Vector<double> sw = null;
                    if (sampleWeight != null)
                    {
                        sw = sampleWeight.Sqrt();
                        // We are doing a little danse with the sample weights to
                        // avoid copying the original X, which could be big

                        y = y.MulColumnVector(sw);

                        k = k.PointwiseMultiply(sw.Outer(sw));
                    }

                    k.Add(DenseMatrix.Identity(k.RowCount)*alpha, k);
                    try
                    {
                        var dualCoef = k.Cholesky().Solve(y);
                        if (sampleWeight != null)
                            dualCoef = dualCoef.MulColumnVector(sw);

                        return x.TransposeThisAndMultiply(dualCoef).Transpose();
                    }
                    catch (Exception) //todo:
                    {
                        // use SVD solver if matrix is singular
                        solver = RidgeSolver.Svd;
                    }
                }
                else
                {
                    // ridge
                    // w = inv(X^t X + alpha*Id) * X.T y
                    var a = x.TransposeThisAndMultiply(x);
                    a.Add(DenseMatrix.Identity(a.ColumnCount)*alpha, a);

                    var xy = x.TransposeThisAndMultiply(y);

                    try
                    {
                        return a.Cholesky().Solve(xy).Transpose();
                    }
                    catch (Exception) //todo:
                    {
                        // use SVD solver if matrix is singular
                        solver = RidgeSolver.Svd;
                    }
                }
            }

            if (solver == RidgeSolver.Svd)
            {
                // slower than cholesky but does not break with
                // singular matrices
                var svd = x.Svd(true);
                //U, s, Vt = linalg.svd(X, full_matrices=False)
                int k = Math.Min(x.ColumnCount, x.RowCount);
                var d = svd.S().SubVector(0, k);
                d.MapInplace(v => v > 1e-15 ? v/(v*v + alpha) : 0.0);

                var ud = svd.U().SubMatrix(0, x.RowCount, 0, k).TransposeThisAndMultiply(y).Transpose();
                ud = ud.MulRowVector(d);
                return ud.Multiply(svd.VT().SubMatrix(0, k, 0, x.ColumnCount));
            }

            return null;
        }
示例#14
0
        public Matrix<double> FindLinearEstimationOfCameraMatrix()
        {
            Matrix<double> equationsMat = new DenseMatrix(2 * Points.Count, 12);

            for(int p = 0; p < Points.Count; p++)
            {
                // Fill matrix A with point info
                equationsMat.SetRow(2 * p, new double[12] {
                     0, 0, 0, 0,
                     -RealPoints[0, p], -RealPoints[1, p], -RealPoints[2, p], -1.0f,
                     ImagePoints[1, p] * RealPoints[0, p], ImagePoints[1, p] * RealPoints[1, p],
                     ImagePoints[1, p] * RealPoints[2, p], ImagePoints[1, p] });
                equationsMat.SetRow(2 * p + 1, new double[12] {
                    RealPoints[0, p], RealPoints[1, p], RealPoints[2, p], 1.0f,
                    0, 0, 0, 0,
                    -ImagePoints[0, p] * RealPoints[0, p], -ImagePoints[0, p] * RealPoints[1, p],
                    -ImagePoints[0, p] * RealPoints[2, p], -ImagePoints[0, p]});
            }

            _linearSolver.EquationsMatrix = equationsMat;
            _linearSolver.Solve();
            Vector<double> p_vec = _linearSolver.ResultVector;

            Matrix<double> cameraMatrix = new DenseMatrix(3, 4);
            cameraMatrix.SetRow(0, p_vec.SubVector(0, 4));
            cameraMatrix.SetRow(1, p_vec.SubVector(4, 4));
            cameraMatrix.SetRow(2, p_vec.SubVector(8, 4));

            return cameraMatrix;
        }
示例#15
0
        public void FinishAndProcess()
        {
            try
            {
                var priceData = new DenseMatrix(symbols.Length, numTicks);

                for (int j = 0; j < symbols.Length; j++)
                {
                    SortedList<DateTime, Tick> d = mktData[j].data.Data;
                    for (int k = 0; k < d.Count; k++)
                    {
                        //if (!symbols[j].Substring(0, 3).Equals("USD")) priceData[j, k] = 1/d.Values[k].BidClose;
                        priceData[j, k] = d.Values[k].BidOpen;
                    }
                }

                Vector<double> price1 = priceData.Row(0);
                Vector<double> price2 = priceData.Row(1);
                //Statistics.ApplyFunction((DenseVector)price1, Math.Log);
                //Statistics.ApplyFunction((DenseVector)price2, Math.Log);

                DenseVector norm1 = price1.ToArray().NormalizeZScore();
                DenseVector norm2 = price2.ToArray().NormalizeZScore();

                var newsym = new string[symbols.Length + 4];
                for (int i = 0; i < symbols.Length; i++) newsym[i] = symbols[i];

                newsym[2] = "spread";
                newsym[3] = "EMA5";
                newsym[4] = "EMA15";
                newsym[5] = "EMA30";


                var m = new DenseMatrix(6, norm1.Count);
                m.SetRow(0, norm1);
                m.SetRow(1, norm2);
                m.SetRow(2, (norm1 - norm2).ToArray().NormalizeZScore());
                m.SetRow(3, EMA.CalcEMA(m.Row(2).ToArray(), 5));
                m.SetRow(4, EMA.CalcEMA(m.Row(2).ToArray(), 15));
                m.SetRow(5, EMA.CalcEMA(m.Row(2).ToArray(), 30));

                string filename = symbols[0].Replace('/', '_') + "-" + symbols[1].Replace('/', '_') + ".html";


                ((DenseVector) m.Row(0)).GenerateSimpleGraph("C:\\Sangar\\result.html");

                Visualize.GenerateMultiSymbolGraph(newsym, m, DateTime.Now.AddSeconds(-60*5*300), new TimeSpan(0, 5, 0),
                    "C:\\Sangar\\" + filename);

                FileUpload.UploadFileToFTP("C:\\Sangar\\" + filename, filename);

                Spread = m[2, m.ColumnCount - 1];

                if (Spread > 2.0 && m[2, m.ColumnCount - 2] <= 2.0)
                    Emailer.SendEmail(symbols[0] + "-" + symbols[1] + " Spread Above 2.0", "Test");

                if (Spread < -2.0 && m[2, m.ColumnCount - 2] >= -2.0)
                    Emailer.SendEmail(symbols[0] + "-" + symbols[1] + " Spread Below -2.0", "Test");

                //if (m[2, m.ColumnCount - 1] < 0.5 && m[2, m.ColumnCount - 2] >= 0.5)
                //    Emailer.SendEmail(symbols[0] + "-" + symbols[1] + " Spread Below 0.5", "Test");

                //if (m[2, m.ColumnCount - 1] > -0.5 && m[2, m.ColumnCount - 2] <= -0.5)
                //    Emailer.SendEmail(symbols[0] + "-" + symbols[1] + " Spread Above -0.5", "Test");
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }
        }
示例#16
0
文件: GomoriMethod.cs 项目: Kant8/IOp
        private bool GomoriIteration()
        {
            // Шаг 1
            _writer.WriteLine("Iteration: {0}", iterationNumber);
            var simplexMethod = new SimplexMethod(_writer);
            simplexMethod.Solve(_task);     // Решаем задачу симплекс методом

            _writer.WriteLine("Optimal plan is found: {0}", _task.xo);
            _writer.WriteLine("Target function value = {0}", _task.c * _task.xo);

            // Шаг 2
            //var artJToRemoveRow = -1;
            //var artJToRemoveColumn = -1;
            //artJToRemoveRow = -1;
            //artJToRemoveColumn = -1;

            //foreach (var artJ in _artJ)
            //{
            //    if (_task.Jb.Contains(artJ.Column))
            //    {
            //        var rowToRemove = artJ.Row;     // TODO probably need to rewrite row selection

            //        var ai = _task.A.Row(rowToRemove); // Выбираем строку с искусственым ограничением
            //        ai = -ai / ai[artJ.Column];
            //        var rowList = ai.ToList();
            //        rowList.RemoveAt(artJ.Column);
            //        ai = DenseVector.OfEnumerable(rowList);

            //        var aj = _task.A.Column(artJ.Column);   // Выбираем столбец с искусственным ограничением
            //        var columnList = aj.ToList();
            //        var bCoef = _task.b[rowToRemove] / columnList[rowToRemove];
            //        columnList.RemoveAt(rowToRemove);
            //        aj = DenseVector.OfEnumerable(columnList);

            //        var newA = DenseMatrix.Create(_task.A.RowCount - 1, _task.A.ColumnCount - 1,
            //            (i, j) => _task.A[i < rowToRemove ? i : i + 1, j < artJ.Column ? j : j + 1]);

            //        newA += DenseMatrix.OfMatrix(aj.ToColumnMatrix() * ai.ToRowMatrix());   // Удаляем искусственные строку
            //        _task.A = newA;                                                         // и столбец из матрицы А
            //        _task.b = DenseVector.Create(_task.b.Count - 1, i => i < rowToRemove ? _task.b[i] : _task.b[i + 1]);
            //        _task.b += bCoef * aj;

            //        _task.c = DenseVector.Create(_task.c.Count - 1, i => i < artJ.Column ? _task.c[i] : _task.c[i + 1]);    // Удаляем искусственную переменную из вектора с

            //        _task.xo = DenseVector.Create(_task.xo.Count - 1, i => i < artJ.Column ? _task.xo[i] : _task.xo[i + 1]);    // Удаляем искусственную переменную из xo

            //        _task.Jb.Remove(artJ.Column);
            //        artJToRemoveColumn = artJ.Column;
            //        artJToRemoveRow = artJ.Row;
            //        break;
            //    }
            //}

            //if (artJToRemoveRow > 0)        // Удаляем искусственную переменную из базисных
            //{
            //    _artJ.RemoveAll(x => x.Row == artJToRemoveRow);
            //    for (int i = 0; i < _artJ.Count; i++)
            //    {
            //        if (_artJ[i].Row > artJToRemoveRow)
            //        {
            //            _artJ[i].Row--;         // Сдвигаем индексы базисных переменных на один
            //            _artJ[i].Column--;      // После удаления искусственной переменной
            //        }
            //    }

            //    for (int i = 0; i < _task.Jb.Count; i++)
            //    {
            //        _task.Jb[i] = _task.Jb[i] > artJToRemoveColumn ? _task.Jb[i] - 1 : _task.Jb[i];
            //    }
            //}

            // Шаг 3
            var falseIndex = -1;
            var maxFract = 0d;
            for (int i = 0; i < _task.xo.Count(); i++)
            {
                if (Math.Abs(Math.Round(_task.xo[i]) - _task.xo[i]) > Eps)
                {
                    var fract = Math.Abs(_task.xo[i] - Math.Floor(_task.xo[i]));    // Находим базисную переменную
                    if (_task.Jb.Contains(i) && fract > Eps)                        // С максимальной дробной частью
                    {                                                               // и запоминаем ее индекс
                        if (fract > maxFract)
                        {
                            maxFract = fract;
                            falseIndex = i;
                        }
                    }
                }
            }

            if (falseIndex < 0)     // Если все переменные целые - решение найдено
            {
                return false;   // Прерываем выполнение метода
            }
            _writer.WriteLine("Jk = {0}", falseIndex);

            // Шаг 4
            var aB = new DenseMatrix(_task.Jb.Count());
            int index = 0;
            foreach (var j in _task.Jb)
            {
                aB.SetColumn(index, _task.A.Column(j));     // Формируем матрицу Ab из базисных столбцов А
                index++;
            }
            _writer.Write("Jb: ");
            _task.Jb.ForEach(x => _writer.Write("{0} ", x));
            _writer.WriteLine();
            _writer.WriteLine("Basis matrix: {0}", aB);
            var y = DenseMatrix.Identity(_task.A.RowCount).Column(_task.Jb.IndexOf(falseIndex)) * aB.Inverse(); //Находим e'*Ab

            var newRow = new DenseVector(_task.A.ColumnCount + 1);
            newRow.SetSubVector(0, _task.A.ColumnCount, y * _task.A);   // Находим данные для нового отсекающего ограничения

            _writer.WriteLine("Data for new limitation: {0}", newRow);

            for (int i = 0; i < newRow.Count; i++)      // Формируем новое отсекающее ограничение
            {
                if (i < _task.A.ColumnCount)
                {
                    if (Math.Abs(newRow[i]) < Eps)
                    {
                        newRow[i] = 0;
                    }
                    else
                    {
                        newRow[i] = newRow[i] > 0
                                    ? -(newRow[i] - Math.Floor(newRow[i]))
                                    : -(Math.Ceiling(Math.Abs(newRow[i])) - Math.Abs(newRow[i]));
                    }
                }
                else
                {
                    newRow[i] = 1;
                }
            }
            newRow[falseIndex] = 0;
            _writer.WriteLine("New limitation: {0}", newRow);

            var newb = (y * _task.b);   // Находим новый элемент вектора b
            newb = newb > 0 ? -(newb - Math.Floor(newb)) : -(Math.Ceiling(Math.Abs(newb)) - Math.Abs(newb)); // TODO probably need to rewrite this

            _writer.WriteLine("New b = {0}", newb);

            // Шаг 5
            var newMatrix = new DenseMatrix(_task.A.RowCount + 1, _task.A.ColumnCount + 1); // Формируем новую
            newMatrix.SetSubMatrix(0, _task.A.RowCount, 0, _task.A.ColumnCount, _task.A);   // матрицу А
            newMatrix.SetRow(_task.A.RowCount, newRow);
            newMatrix[_task.A.RowCount, _task.A.ColumnCount] = 1;

            var newBVector = new DenseVector(_task.b.Count + 1);    // Формируем новый
            newBVector.SetSubVector(0, _task.b.Count, _task.b);     // вектор b
            newBVector[_task.b.Count] = newb;

            var newCVector = new DenseVector(_task.c.Count + 1);    // Добавляем новую
            newCVector.SetSubVector(0, _task.c.Count, _task.c);     // компоненту вектора с

            var newJb = _task.Jb.ToList();
            newJb.Add(newJb[newJb.Count - 1] + 1);
            _artJ.Add(new ArtJEntry { Column = newMatrix.ColumnCount - 1, Row = newMatrix.RowCount - 1 });

            _task.A = newMatrix.Clone();        // Создаем
            _task.b = newBVector.Clone();       // новую задачу
            _task.c = newCVector.Clone();       // для следующей итерации
            _task.Jb = newJb;

            iterationNumber++;              // Присваиваем новый номер итерации

            return true;
        }
        /// <summary>
        /// Generate a random n-class classification problem.
        /// </summary>
        /// <param name="nSamples">The number of samples.</param>
        /// <param name="nFeatures">The total number of features. These comprise <paramref name="nInformative"/>
        /// informative features, <paramref name="nRedundant"/> redundant features, <paramref name="nRepeated"/>
        /// dupplicated features and `<paramref name="nFeatures"/>-<paramref name="nInformative"/>-<paramref name="nRedundant"/>-
        /// <paramref name="nRepeated"/>` useless features drawn at random.</param>
        /// <param name="nInformative">The number of informative features. Each class is composed of a number
        /// of gaussian clusters each located around the vertices of a hypercube
        /// in a subspace of dimension <paramref name="nInformative"/>. For each cluster,
        /// informative features are drawn independently from  N(0, 1) and then
        /// randomly linearly combined in order to add covariance. The clusters
        /// are then placed on the vertices of the hypercube.</param>
        /// <param name="nRedundant">The number of redundant features. These features are generated as
        /// random linear combinations of the informative features.</param>
        /// <param name="nRepeated"> The number of dupplicated features, drawn randomly from the informative
        /// and the redundant features.
        /// </param>
        /// <param name="nClasses">The number of classes (or labels) of the classification problem.</param>
        /// <param name="nClustersPerClass">The number of clusters per class.</param>
        /// <param name="weights">The proportions of samples assigned to each class. If None, then
        /// classes are balanced. Note that if `len(weights) == n_classes - 1`,
        /// then the last class weight is automatically inferred.
        /// </param>
        /// <param name="flipY">The fraction of samples whose class are randomly exchanged.</param>
        /// <param name="classSep">The factor multiplying the hypercube dimension.</param>
        /// <param name="hypercube">If True, the clusters are put on the vertices of a hypercube. If
        /// False, the clusters are put on the vertices of a random polytope.</param>
        /// <param name="shift">Shift all features by the specified value. If None, then features
        /// are shifted by a random value drawn in [-class_sep, class_sep].</param>
        /// <param name="scale">Multiply all features by the specified value. If None, then features
        /// are scaled by a random value drawn in [1, 100]. Note that scaling
        /// happens after shifting.
        /// </param>
        /// <param name="shuffle">Shuffle the samples and the features.</param>
        /// <param name="randomState">Random generator.</param>
        /// <returns>array of shape [n_samples]
        /// The integer labels for class membership of each sample.</returns>
        /// <remarks>
        /// The algorithm is adapted from Guyon [1] and was designed to generate
        /// the "Madelon" dataset.
        /// References
        /// ----------
        /// .. [1] I. Guyon, "Design of experiments for the NIPS 2003 variable
        ///   selection benchmark", 2003.
        /// </remarks>
        public static Classification MakeClassification(
            int nSamples = 100,
            int nFeatures = 20,
            int nInformative = 2,
            int nRedundant = 2,
            int nRepeated = 0,
            int nClasses = 2,
            int nClustersPerClass = 2,
            List<double> weights = null,
            double flipY = 0.01,
            double classSep = 1.0,
            bool hypercube = true,
            double? shift = 0.0,
            double? scale = 1.0,
            bool shuffle = true,
            Random randomState = null)
        {
            var generator = randomState ?? new Random();

            // Count features, clusters and samples
            if (nInformative + nRedundant + nRepeated > nFeatures)
            {
                throw new ArgumentException("Number of informative, redundant and repeated " +
                                            "features must sum to less than the number of total" +
                                            " features");
            }

            if (nInformative * nInformative < nClasses * nClustersPerClass)
            {
                throw new ArgumentException(
                    "n_classes * n_clusters_per_class must" +
                    "be smaller or equal 2 ** n_informative");
            }

            if (weights != null && !new[] { nClasses, nClasses - 1 }.Contains(weights.Count))
            {
                throw new ArgumentException("Weights specified but incompatible with number of classes.");
            }

            int nUseless = nFeatures - nInformative - nRedundant - nRepeated;
            int nClusters = nClasses * nClustersPerClass;

            if (weights != null && weights.Count == nClasses - 1)
            {
                weights.Add(1.0 - weights.Sum());
            }

            if (weights == null) 
            {
                weights = Enumerable.Repeat(1.0 / nClasses, nClasses).ToList();
                weights[weights.Count - 1] = 1.0 - weights.Take(weights.Count - 1).Sum();
            }

            var nSamplesPerCluster = new List<int>();

            for (int k = 0; k < nClusters; k++)
            {
                nSamplesPerCluster.Add(
                    (int)(nSamples * weights[k % nClasses] / nClustersPerClass));
            }

            for (int i = 0; i < nSamples - nSamplesPerCluster.Sum(); i++)
            {
                nSamplesPerCluster[i % nClusters] += 1;
            }

            // Intialize X and y
            Matrix x = new DenseMatrix(nSamples, nFeatures);
            int[] y = new int[nSamples];

            // Build the polytope
            Matrix c = new DenseMatrix(1 << nInformative, nInformative);
            for (int i = 0; i < 1 << nInformative; i++)
            {
                var row = new DenseVector(nInformative);
                for (int bitN = 0; bitN < nInformative; bitN++)
                {
                    row[bitN] = (i & (1 << bitN)) == 1 ? classSep : -classSep;
                }

                c.SetRow(i, row);
            }

            if (!hypercube)
            {
                for (int k = 0; k < nClusters; k++)
                {
                    c.SetRow(k, c.Row(k) * generator.NextDouble());
                }

                for (int f = 0; f < nInformative; f++)
                {
                    c.SetColumn(f, c.Column(f) * generator.NextDouble());
                }
            }

            // todo:
            // generator.shuffle(C)

            // Loop over all clusters
            int pos = 0;
            int posEnd = 0;

            for (int k = 0; k < nClusters; k++)
            {
                // Number of samples in cluster k
                int nSamplesK = nSamplesPerCluster[k];

                // Define the range of samples
                pos = posEnd;
                posEnd = pos + nSamplesK;

                // Assign labels
                for (int l = pos; l < posEnd; l++)
                {
                    y[l] = k % nClasses;
                }

                // Draw features at random
                var subMatrix = DenseMatrix.CreateRandom(
                    nSamplesK,
                    nInformative,
                    new Normal { RandomSource = generator });

                x.SetSubMatrix(pos, nSamplesK, 0, nInformative, subMatrix);

                // Multiply by a random matrix to create co-variance of the features
                var uniform = new ContinuousUniform(-1, 1) { RandomSource = generator };
                Matrix a = DenseMatrix.CreateRandom(nInformative, nInformative, uniform);

                x.SetSubMatrix(
                    pos,
                    nSamplesK,
                    0,
                    nInformative,
                    x.SubMatrix(pos, nSamplesK, 0, nInformative) * a);

                // Shift the cluster to a vertice
                var v = x.SubMatrix(pos, nSamplesK, 0, nInformative).AddRowVector(c.Row(k));
                x.SetSubMatrix(pos, nSamplesK, 0, nInformative, v);
            }

            // Create redundant features
            if (nRedundant > 0)
            {
                var uniform = new ContinuousUniform(-1, 1) { RandomSource = generator };
                Matrix b = DenseMatrix.CreateRandom(nInformative, nRedundant, uniform);
                x.SetSubMatrix(
                    0,
                    x.RowCount,
                    nInformative,
                    nRedundant,
                    x.SubMatrix(0, x.RowCount, 0, nInformative) * b);
            }

            // Repeat some features
            if (nRepeated > 0)
            {
                int n = nInformative + nRedundant;
                for (int i = 0; i < nRepeated; i++)
                {
                    int r = (int)((generator.Next(nRepeated) * (n - 1)) + 0.5);
                    x.SetColumn(i, x.Column(r));
                }
            }

            // Fill useless features
            var denseMatrix = DenseMatrix.CreateRandom(nSamples, nUseless, new Normal { RandomSource = generator });
            x.SetSubMatrix(0, nSamples, nFeatures - nUseless, nUseless, denseMatrix);

            // Randomly flip labels
            if (flipY >= 0.0)
            {
                for (int i = 0; i < nSamples; i++)
                {
                    if (generator.NextDouble() < flipY)
                    {
                        y[i] = generator.Next(nClasses);
                    }
                }
            }

            // Randomly shift and scale
            bool constantShift = shift != null;
            bool constantScale = scale != null;

            for (int f = 0; f < nFeatures; f++)
            {
                if (!constantShift)
                {
                    shift = ((2 * generator.NextDouble()) - 1) * classSep;
                }

                if (!constantScale)
                {
                    scale = 1 + (100 * generator.NextDouble());
                }

                x.SetColumn(f, (x.Column(f) + shift.Value) * scale.Value);
            }

            // Randomly permute samples and features
            // todo:
            /*
            if (shuffle)
            {
                X, y = util_shuffle(X, y, random_state=generator)

                indices = np.arange(n_features)
                generator.shuffle(indices)
                X[:, :] = X[:, indices]
            }*/

            return new Classification { X = x, Y = y };
        }
示例#18
0
        /// <summary>
        /// Adaptive Cross Approximation (ACA) matrix compression
        /// the result is stored in U and V matrices like U*V
        /// </summary>
        /// <param name="acaThres">Relative error threshold to stop adding rows and columns in ACA iteration</param>
        /// <param name="m">Row indices of Z submatrix to compress</param>
        /// <param name="n">Column indices of Z submatrix to compress</param>
        /// <param name="U">to store result</param>
        /// <param name="V">to store result</param>
        /// <returns>pair with matrix U and V</returns>
        public static Tuple<Matrix, Matrix> Aca(double acaThres, List<int> m, List<int> n, Matrix U, Matrix V)
        {
            int M = m.Count;
            int N = n.Count;
            int Min = Math.Min(M, N);
            U = new DenseMatrix(Min, Min);
            V = new DenseMatrix(Min, Min);
            //if Z is a vector, there is nothing to compress
            if (M == 1 || N == 1)
            {
                U = UserImpedance(m, n);
                V = new DenseMatrix(1, 1);
                V[0, 0] = 1.0;
                return new Tuple<Matrix,Matrix>(U,V);
            }

            //Indices of columns picked up from Z
            //Vector J = new DenseVector(N);
            //List<int> J = new List<int>(N);

            List<int> J = new List<int>(new int [N]);
            //int[] J = new int[N];
            //Indices of rows picked up from Z
            //Vector I = new DenseVector(M);
            List<int> I = new List<int>(new int [M]);
            //int[] I = new int[M];
            //Row indices to search for maximum in R
            //Vector i = new DenseVector(M);
            List<int> i = new List<int>();
            //int[] i = new int[M];
            //Column indices to search for maximum in R
            //Vector j = new DenseVector(N);
            List<int> j = new List<int>();
            //int[] j = new int[N];

            for (int k = 1; k < M; k++)
            {
                i.Add(k);
            }

            for (int k = 0; k < N; k++)
            {
                j.Add(k);
            }

            //Initialization

            //Initialize the 1st row index I(1) = 1
            I[0] = 0;

            //Initialize the 1st row of the approximate error matrix
            List<int> m0 = new List<int>();
            m0.Add(m[I[0]]);
            Matrix Rik = UserImpedance(m0, n);

            //Find the 1st column index J(0)
            double max = -1.0;
            int col = 0;

            foreach (int ind in j)
            {
                if (Math.Abs(Rik[0, ind]) > max)
                {
                    max = Math.Abs(Rik[0, ind]);
                    col = ind;
                }
            }

            //J[0] = j[col];
            J[0] = col;
            j.Remove(J[0]);

            //First row of V
            V = new DenseMatrix(1, Rik.ColumnCount);
            V.SetRow(0, Rik.Row(0).Divide(Rik[0, J[0]]));

            //Initialize the 1st column of the approximate error matrix
            List<int> n0 = new List<int>();
            n0.Add(n[J[0]]);
            Matrix Rjk = UserImpedance(m, n0);

            //First column of U
            U = new DenseMatrix(Rjk.RowCount, 1);
            U.SetColumn(0, Rjk.Column(0));

            // Norm of (approximate) Z, to test error
            double d1 = U.L2Norm();
            double d2 = V.L2Norm();
            double normZ = d1 * d1 * d2 * d2;

            //Find 2nd row index I(2)
            int row = 0;
            max = -1.0;

            foreach (int ind in i)
            {
                if (Math.Abs(Rjk[ind, 0]) > max)
                {
                    max = Math.Abs(Rjk[ind, 0]);
                    row = ind;
                }
            }

            //I[1] = i[row];
            I[1] = row;
            i.Remove(I[1]);

            //Iteration
            for (int k = 1; k < Math.Min(M, N); k++)
            {
                //Update (Ik)th row of the approximate error matrix:
                List<int> t1 = new List<int>();
                t1.Add(m[I[k]]);
                Rik = (Matrix)(UserImpedance(t1, n) - U.SubMatrix(I[k], 1, 0, U.ColumnCount).Multiply(V));
                //CHECKED OK all code before works fine
                //Find kth column index Jk
                max = -1.0;
                col = 0;

                foreach (int ind in j)
                {
                    if (Math.Abs(Rik[0, ind]) > max)
                    {
                        max = Math.Abs(Rik[0, ind]);
                        col = ind;
                    }
                }

                J[k] = col;
                j.Remove(J[k]);

                //Terminate if R(I(k),J(k)) == 0
                if (Rik[0, J[k]] == 0)
                {
                    break;
                }

                //Set k-th row of V equal to normalized error
                Matrix Vk = (Matrix)Rik.Divide(Rik[0, J[k]]);

                //Update (Jk)th column of the approximate error matrix
                List<int> n1 = new List<int>();
                n1.Add(n[J[k]]);
                Rjk = (Matrix)(UserImpedance(m, n1) - U.Multiply(V.SubMatrix(0, V.RowCount, J[k], 1)));

                // Set k-th column of U equal to updated error
                Matrix Uk = Rjk;

                //Norm of approximate Z
                //Matrix s = (Matrix)(U.Transpose().Multiply(Uk)).Multiply((Vk.Multiply(V.Transpose())).Transpose());
                //Matrix s = (Matrix)((U.Transpose()).Multiply(Uk)).Multiply(((Vk.Multiply(V.Transpose())).Transpose()));
                Matrix a = (Matrix)U.Transpose().Multiply(Uk);
                Matrix b = (Matrix)Vk.Multiply(V.Transpose()).Transpose();
                Matrix s = (Matrix)a.PointwiseMultiply(b);
                double sum = 0;

                for (int i1 = 0; i1 < s.RowCount; i1++)
                {
                    for (int j1 = 0; j1 < s.ColumnCount; j1++)
                    {
                        sum += s[i1, j1];
                    }
                }

                d1 = Uk.L2Norm();
                d2 = Vk.L2Norm();

                normZ += 2 * sum + d1 * d1 * d2 * d2;

                //Update U and V

                //U.SetColumn(U.ColumnCount - 1, Uk.Column(0));
                //V.SetRow(V.RowCount - 1, Vk.Row(0));
                U = AddColumn(U, (Vector)Uk.Column(0));
                //U.SetColumn(k, Uk.Column(0));
                V = AddRow(V, (Vector)Vk.Row(0));
                //V.SetRow(k, Vk.Row(0));

                if (d1 * d2 <= acaThres * Math.Sqrt(normZ))
                {
                    break;
                }

                if (k == Math.Min(N, M) - 1)
                {
                    break;
                }

                max = -1;
                row = 0;

                foreach (int ind in i)
                {
                    if (Math.Abs(Rjk[ind, 0]) > max)
                    {
                        max = Math.Abs(Rjk[ind, 0]);
                        row = ind;
                    }
                }

                I[k + 1] = row;
                //i = removeIndex(i,I[k+1]);
                i.Remove(I[k + 1]);
            }
            return new Tuple<Matrix, Matrix>(U, V);
        }
示例#19
0
        private void disp_button_Click(object sender, EventArgs e)
        {
            /*
            Normal N = new Normal(0.0, 1.0);
            DenseVector VALS = new DenseVector(20, 0);
            for (int i = -10; i < 10; i++)
            {
                VALS[i + 10] = N.Density(i);
                Console.WriteLine(VALS[i + 10]);
            }
            */

            /*
            //double[] d = new double[]{0,0};
            DenseMatrix mu = new DenseMatrix(2, 1, new[] { 0.0, 0.0 });
            DenseMatrix K = new DenseMatrix(1, 1, new[] { 1.0 });
            DenseMatrix V = new DenseMatrix(2,2, new[] {4.0, 0.0, 0.0, 1.0 });
            MatrixNormal mN = new MatrixNormal(mu, V, K);

            Console.WriteLine(mu.RowCount);
            Console.WriteLine(mu.ColumnCount);
            Console.WriteLine(mN.Sample());
            Console.WriteLine(mN.Sample());
            DenseMatrix sample_pt = new DenseMatrix(2, 1, new[] { 0.0, 0.1 });
            double pt = mN.Density(sample_pt);
            */

            int M = 150;
            int N = 200;

            Normal N_x = new Normal(N/2, 50);
            Normal N_y = new Normal(M/2, 30);

            DenseMatrix M_x = new DenseMatrix(M, N, 0.0);
            DenseMatrix M_y = new DenseMatrix(M, N, 0.0);

            DenseVector V_x = new DenseVector(N);
            for (int i = 0; i < N; i++)
            {
                V_x[i] = N_x.Density(i);
            }

            for (int j = 0; j < M; j++)
            {
                M_x.SetRow(j, V_x);
            }

            DenseVector V_y = new DenseVector(M);
            for (int i = 0; i < M; i++)
            {
                V_y[i] = N_y.Density(i);
            }

            for (int j = 0; j < N; j++)
            {
                M_y.SetColumn(j, V_y);
            }

            DenseMatrix MULT = (DenseMatrix)M_x.PointwiseMultiply(M_y);
            double s = MULT.Data.Sum();
            MULT = (DenseMatrix)MULT.PointwiseDivide(new DenseMatrix(M,N,s));
            //this.dataGridView1.DataSource = MULT;
            //Console.WriteLine(MULT.Data.Sum());

            s = MULT[M / 2, N / 2];
            MULT = (DenseMatrix)MULT.PointwiseDivide(new DenseMatrix(M, N, s));

            /*
            for (int i = 0; i < M; i++)
            {
                Console.Write(i + " - ");
                for (int j = 0; j < N; j++)
                {
                    Console.Write(MULT[i, j] + " ");
                }
                Console.WriteLine();
                Console.WriteLine();
            }
            */
            Console.WriteLine(Environment.ProcessorCount);
        }
示例#20
0
        public static void TestChannelLive(this AbstractChannel ind, string symbol, string timeframe, int length)
        {
            //------------grab data
            FXSession session = new FXSession();
            session.InitializeSession();
            while (!session.LoggedIn)
            {
                Thread.Sleep(100);
            }

            HistoricPriceEngine h = new HistoricPriceEngine(session);
            h.GetLongHistoricPrices(symbol, timeframe, length);

            while (!h.Complete)
            {
                Thread.Sleep(100);
            }
            //-----------------------

            var highList = new List<double>();
            var medList = new List<double>();
            var lowList = new List<double>();

            var dataList = new List<double>();
            var dateTimeList = new SortedList<DateTime, int>();

            Quantum q = h.Data;

            int count = 0;
            foreach (Tick t in q)
            {
                try{

                    ind.HandleNextTick(t);
                    highList.Add(ind.HI(0));
                    medList.Add(ind.MID(0));
                    lowList.Add(ind.LOW(0));

                    dataList.Add(t.BidClose);
                    dateTimeList.Add(t.Time, 1);
                }
                catch (Exception e)
                {
                    e.printStackTrace();
                }
                if (count++ > length) break;
            }

            var dz = new DenseMatrix(4, medList.Count);
            dz.SetRow(0, new DenseVector(dataList.ToArray()));
            dz.SetRow(1, new DenseVector(highList.ToArray()));
            dz.SetRow(2, new DenseVector(medList.ToArray()));
            dz.SetRow(3, new DenseVector(lowList.ToArray()));

            Visualize.GenerateMultiPaneGraph(new[] { "data", "high", ind.ToString(), "low" }, dateTimeList.Keys.ToArray(), dz, QSConstants.DEFAULT_DATA_FILEPATH + @"results.html"
                , new ChartOption[]
                {
                    new ChartOption(){Height = 500}, 
                    new ChartOption(){Height = 0, Layover = true, YPosition = 0},
                    new ChartOption(){Height = 0, Layover = true, YPosition = 0},
                    new ChartOption(){Height = 0, Layover = true, YPosition = 0}
                });

            Console.WriteLine("Done Generating Graph for " + ind.ToString());
            }