コード例 #1
0
        public static void csv_serializedBinary()
        {
            SimpleMatrix <DMatrixRMaj> A = new SimpleMatrix <DMatrixRMaj>(2, 3, true, new double[] { 1, 2, 3, 4, 5, 6 });

            try
            {
                A.saveToFileBinary("matrix_file.data");
                SimpleMatrix <DMatrixRMaj> B = SimpleMatrix <DMatrixRMaj> .loadBinary("matrix_file.data");

                B.print();
            }
            catch (IOException e)
            {
                throw new InvalidOperationException(e.Message, e);
            }
        }
コード例 #2
0
        /**
         * Returns the R matrix.
         */
        public SimpleMatrix <DMatrixRMaj> getR()
        {
            SimpleMatrix <DMatrixRMaj> R = new SimpleMatrix <DMatrixRMaj>(QR.numRows(), QR.numCols());

            int N = Math.Min(QR.numCols(), QR.numRows());

            for (int i = 0; i < N; i++)
            {
                for (int j = i; j < QR.numCols(); j++)
                {
                    R.set(i, j, QR.get(i, j));
                }
            }

            return(R);
        }
コード例 #3
0
        /// <summary>Applies softmax to all of the elements of the matrix.</summary>
        /// <remarks>
        /// Applies softmax to all of the elements of the matrix.  The return
        /// matrix will have all of its elements sum to 1.  If your matrix is
        /// not already a vector, be sure this is what you actually want.
        /// </remarks>
        public static SimpleMatrix Softmax(SimpleMatrix input)
        {
            SimpleMatrix output = new SimpleMatrix(input);

            for (int i = 0; i < output.NumRows(); ++i)
            {
                for (int j = 0; j < output.NumCols(); ++j)
                {
                    output.Set(i, j, Math.Exp(output.Get(i, j)));
                }
            }
            double sum = output.ElementSum();

            // will be safe, since exp should never return 0
            return(output.Scale(1.0 / sum));
        }
コード例 #4
0
    public double Dot(SimpleMatrix m)
    {
        if (yDim != m.YDim || xDim != 1 || m.XDim != 1)
        {
            throw (new InvalidOperationException
                       ("Dotproduct only possible for two equal n x 1 matrices"));
        }

        double sum = 0.0;

        for (int y = 0; y < yDim; ++y)
        {
            sum += values[y, 0] * m[y, 0];
        }

        return(sum);
    }
コード例 #5
0
        /// <summary>Return as a double the probability of the predicted class.</summary>
        /// <remarks>
        /// Return as a double the probability of the predicted class. If it is not defined for a node,
        /// it will return -1
        /// </remarks>
        /// <returns>Either the label probability or -1.0 if none</returns>
        public static double GetPredictedClassProb(ILabel label)
        {
            if (!(label is CoreLabel))
            {
                throw new ArgumentException("CoreLabels required to get the attached predicted class probability");
            }
            int          val         = ((CoreLabel)label).Get(typeof(RNNCoreAnnotations.PredictedClass));
            SimpleMatrix predictions = ((CoreLabel)label).Get(typeof(RNNCoreAnnotations.Predictions));

            if (val != null)
            {
                return(predictions.Get(val));
            }
            else
            {
                return(-1.0);
            }
        }
コード例 #6
0
        public ForeNetwork(SimpleMatrix input, SimpleMatrix output, int hiddenNum)
        {
            InputData    = input;
            OutputData   = output;
            HiddenNumber = hiddenNum;
            SampleSize   = input.Row;
            InputNumber  = input.Column;
            OutputNumber = output.Column;

            // init matrix
            WeightInput  = new SimpleMatrix(InputNumber, HiddenNumber);
            WeightOutput = new SimpleMatrix(HiddenNumber, OutputNumber);
            WeightInput.Randomize();
            WeightOutput.Randomize();

            //MaxIterNum = 300;
            Trained = false;
        }
コード例 #7
0
        /// <summary>Concatenates several column vectors into one large column vector</summary>
        public static SimpleMatrix Concatenate(params SimpleMatrix[] vectors)
        {
            int size = 0;

            foreach (SimpleMatrix vector in vectors)
            {
                size += vector.NumRows();
            }
            SimpleMatrix result = new SimpleMatrix(size, 1);
            int          index  = 0;

            foreach (SimpleMatrix vector_1 in vectors)
            {
                result.InsertIntoThis(index, 0, vector_1);
                index += vector_1.NumRows();
            }
            return(result);
        }
コード例 #8
0
        public StaticExtendedKalmanFilter()
        {
            x_pred = new SimpleMatrix <Matrix>(numStates, 1);
            F      = SimpleMatrix <Matrix> .identity(numStates);

            Q      = new SimpleMatrix <Matrix>(numStates, numStates);
            P_pred = new SimpleMatrix <Matrix>(numStates, numStates);
            x_meas = new SimpleMatrix <Matrix>(numStates, 1);
            P_meas = new SimpleMatrix <Matrix>(numStates, numStates);
            S_f    = h_0 / 2.0;
            S_g    = 2.0 * Math.Pow(Constants.PI_ORBIT, 2) * h_2;

            // Initialization of the state transition matrix
            F.set(idxClockBias, idxClockDrift, DELTA_T);

            // Initialization of the process noise matrix
            initQ();
        }
コード例 #9
0
        /*
         * // An example of how you could read in old models with readObject to fix the serialization
         * // You would first read in the old model, then reserialize it
         * private void readObject(ObjectInputStream in)
         * throws IOException, ClassNotFoundException
         * {
         * ObjectInputStream.GetField fields = in.readFields();
         * binaryTransform = ErasureUtils.uncheckedCast(fields.get("binaryTransform", null));
         *
         * // transform binaryTensors
         * binaryTensors = TwoDimensionalMap.treeMap();
         * TwoDimensionalMap<String, String, edu.stanford.nlp.rnn.SimpleTensor> oldTensors = ErasureUtils.uncheckedCast(fields.get("binaryTensors", null));
         * for (String first : oldTensors.firstKeySet()) {
         * for (String second : oldTensors.get(first).keySet()) {
         * binaryTensors.put(first, second, new SimpleTensor(oldTensors.get(first, second).slices));
         * }
         * }
         *
         * binaryClassification = ErasureUtils.uncheckedCast(fields.get("binaryClassification", null));
         * unaryClassification = ErasureUtils.uncheckedCast(fields.get("unaryClassification", null));
         * wordVectors = ErasureUtils.uncheckedCast(fields.get("wordVectors", null));
         *
         * if (fields.defaulted("numClasses")) {
         * throw new RuntimeException();
         * }
         * numClasses = fields.get("numClasses", 0);
         *
         * if (fields.defaulted("numHid")) {
         * throw new RuntimeException();
         * }
         * numHid = fields.get("numHid", 0);
         *
         * if (fields.defaulted("numBinaryMatrices")) {
         * throw new RuntimeException();
         * }
         * numBinaryMatrices = fields.get("numBinaryMatrices", 0);
         *
         * if (fields.defaulted("binaryTransformSize")) {
         * throw new RuntimeException();
         * }
         * binaryTransformSize = fields.get("binaryTransformSize", 0);
         *
         * if (fields.defaulted("binaryTensorSize")) {
         * throw new RuntimeException();
         * }
         * binaryTensorSize = fields.get("binaryTensorSize", 0);
         *
         * if (fields.defaulted("binaryClassificationSize")) {
         * throw new RuntimeException();
         * }
         * binaryClassificationSize = fields.get("binaryClassificationSize", 0);
         *
         * if (fields.defaulted("numUnaryMatrices")) {
         * throw new RuntimeException();
         * }
         * numUnaryMatrices = fields.get("numUnaryMatrices", 0);
         *
         * if (fields.defaulted("unaryClassificationSize")) {
         * throw new RuntimeException();
         * }
         * unaryClassificationSize = fields.get("unaryClassificationSize", 0);
         *
         * rand = ErasureUtils.uncheckedCast(fields.get("rand", null));
         * op = ErasureUtils.uncheckedCast(fields.get("op", null));
         * op.classNames = op.DEFAULT_CLASS_NAMES;
         * op.equivalenceClasses = op.APPROXIMATE_EQUIVALENCE_CLASSES;
         * op.equivalenceClassNames = op.DEFAULT_EQUIVALENCE_CLASS_NAMES;
         * }
         */
        /// <summary>
        /// Given single matrices and sets of options, create the
        /// corresponding SentimentModel.
        /// </summary>
        /// <remarks>
        /// Given single matrices and sets of options, create the
        /// corresponding SentimentModel.  Useful for creating a Java version
        /// of a model trained in some other manner, such as using the
        /// original Matlab code.
        /// </remarks>
        internal static Edu.Stanford.Nlp.Sentiment.SentimentModel ModelFromMatrices(SimpleMatrix W, SimpleMatrix Wcat, SimpleTensor Wt, IDictionary <string, SimpleMatrix> wordVectors, RNNOptions op)
        {
            if (!op.combineClassification || !op.simplifiedModel)
            {
                throw new ArgumentException("Can only create a model using this method if combineClassification and simplifiedModel are turned on");
            }
            TwoDimensionalMap <string, string, SimpleMatrix> binaryTransform = TwoDimensionalMap.TreeMap();

            binaryTransform.Put(string.Empty, string.Empty, W);
            TwoDimensionalMap <string, string, SimpleTensor> binaryTensors = TwoDimensionalMap.TreeMap();

            binaryTensors.Put(string.Empty, string.Empty, Wt);
            TwoDimensionalMap <string, string, SimpleMatrix> binaryClassification = TwoDimensionalMap.TreeMap();
            IDictionary <string, SimpleMatrix> unaryClassification = Generics.NewTreeMap();

            unaryClassification[string.Empty] = Wcat;
            return(new Edu.Stanford.Nlp.Sentiment.SentimentModel(binaryTransform, binaryTensors, binaryClassification, unaryClassification, wordVectors, op));
        }
コード例 #10
0
        public SimpleMatrix Multiply(SimpleMatrix m)
        {
            var result = new double[this.RowCount, m.ColumCount];

            for (int row = 0; row < this.RowCount; row++)
            {
                for (int column = 0; column < m.ColumCount; column++)
                {
                    double sum = 0;
                    for (int iterator = 0; iterator < this.ColumCount; iterator++)
                    {
                        sum += this[row, iterator] * m[iterator, column];
                    }
                    result[row, column] = sum;
                }
            }
            return(new SimpleMatrix(result));
        }
コード例 #11
0
        public virtual void Evaluate(Tree guess, Tree gold, PrintWriter pw, double weight)
        {
            IList <ILabel> words = guess.Yield();
            int            pos   = 0;

            foreach (ILabel word in words)
            {
                ++pos;
                SimpleMatrix wv = model.GetWordVector(word.Value());
                // would be faster but more implementation-specific if we
                // removed wv.equals
                if (wv == unk || wv.Equals(unk))
                {
                    pw.Printf("  Unknown word in position %d: %s%n", pos, word.Value());
                    unkWords.Add(word.Value());
                }
            }
        }
コード例 #12
0
        public virtual void RunCoref(Document document)
        {
            IList <Mention> sortedMentions = CorefUtils.GetSortedMentions(document);
            IDictionary <int, IList <Mention> > mentionsByHeadIndex = new Dictionary <int, IList <Mention> >();

            foreach (Mention m in sortedMentions)
            {
                IList <Mention> withIndex = mentionsByHeadIndex.ComputeIfAbsent(m.headIndex, null);
                withIndex.Add(m);
            }
            SimpleMatrix documentEmbedding = embeddingExtractor.GetDocumentEmbedding(document);
            IDictionary <int, SimpleMatrix> antecedentEmbeddings = new Dictionary <int, SimpleMatrix>();
            IDictionary <int, SimpleMatrix> anaphorEmbeddings    = new Dictionary <int, SimpleMatrix>();
            ICounter <int> anaphoricityScores = new ClassicCounter <int>();

            foreach (Mention m_1 in sortedMentions)
            {
                SimpleMatrix mentionEmbedding = embeddingExtractor.GetMentionEmbeddings(m_1, documentEmbedding);
                antecedentEmbeddings[m_1.mentionID] = model.GetAntecedentEmbedding(mentionEmbedding);
                anaphorEmbeddings[m_1.mentionID]    = model.GetAnaphorEmbedding(mentionEmbedding);
                anaphoricityScores.IncrementCount(m_1.mentionID, model.GetAnaphoricityScore(mentionEmbedding, featureExtractor.GetAnaphoricityFeatures(m_1, document, mentionsByHeadIndex)));
            }
            IDictionary <int, IList <int> > mentionToCandidateAntecedents = CorefUtils.HeuristicFilter(sortedMentions, maxMentionDistance, maxMentionDistanceWithStringMatch);

            foreach (KeyValuePair <int, IList <int> > e in mentionToCandidateAntecedents)
            {
                double bestScore  = anaphoricityScores.GetCount(e.Key) - 50 * (greedyness - 0.5);
                int    m_2        = e.Key;
                int    antecedent = null;
                foreach (int ca in e.Value)
                {
                    double score = model.GetPairwiseScore(antecedentEmbeddings[ca], anaphorEmbeddings[m_2], featureExtractor.GetPairFeatures(new Pair <int, int>(ca, m_2), document, mentionsByHeadIndex));
                    if (score > bestScore)
                    {
                        bestScore  = score;
                        antecedent = ca;
                    }
                }
                if (antecedent != null)
                {
                    CorefUtils.MergeCoreferenceClusters(new Pair <int, int>(antecedent, m_2), document);
                }
            }
        }
コード例 #13
0
        public DynamicExtendedKalmanFilter()
        {
            x_pred = new SimpleMatrix <Matrix>(numStates, 1);
            F      = SimpleMatrix <Matrix> .identity(numStates);

            Q      = new SimpleMatrix <Matrix>(numStates, numStates);
            P_pred = new SimpleMatrix <Matrix>(numStates, numStates);
            x_meas = new SimpleMatrix <Matrix>(numStates, 1);
            P_meas = new SimpleMatrix <Matrix>(numStates, numStates);

            // Initialization of the state transition matrix
            F.set(idxX, idxU, DELTA_T);
            F.set(idxY, idxV, DELTA_T);
            F.set(idxZ, idxW, DELTA_T);
            F.set(idxClockBias, idxClockDrift, DELTA_T);

            // Initialization of the process noise matrix
            initQ();
        }
コード例 #14
0
        /**
         * Returns the Q matrix.
         */
        public SimpleMatrix <DMatrixRMaj> getQ()
        {
            SimpleMatrix <DMatrixRMaj> Q = SimpleMatrix <DMatrixRMaj> .identity(QR.numRows());

            int N = Math.Min(QR.numCols(), QR.numRows());

            // compute Q by first extracting the householder vectors from the columns of QR and then applying it to Q
            for (int j = N - 1; j >= 0; j--)
            {
                SimpleMatrix <DMatrixRMaj> u = new SimpleMatrix <DMatrixRMaj>(QR.numRows(), 1);
                u.insertIntoThis(j, 0, QR.extractMatrix(j, SimpleMatrix <DMatrixRMaj> .END, j, j + 1));
                u.set(j, 1.0);

                // A = (I - &gamma;*u*u<sup>T</sup>)*A<br>
                Q = Q.plus(-gammas[j], u.mult(u.transpose()).mult(Q)) as SimpleMatrix <DMatrixRMaj>;
            }

            return(Q);
        }
コード例 #15
0
        private static SimpleMatrix EncodeDistance(int d)
        {
            SimpleMatrix m = new SimpleMatrix(11, 1);

            if (d < 5)
            {
                m.Set(d, 1);
            }
            else
            {
                if (d < 8)
                {
                    m.Set(5, 1);
                }
                else
                {
                    if (d < 16)
                    {
                        m.Set(6, 1);
                    }
                    else
                    {
                        if (d < 32)
                        {
                            m.Set(7, 1);
                        }
                        else
                        {
                            if (d < 64)
                            {
                                m.Set(8, 1);
                            }
                            else
                            {
                                m.Set(9, 1);
                            }
                        }
                    }
                }
            }
            m.Set(10, Math.Min(d, 64) / 64.0);
            return(m);
        }
コード例 #16
0
        public override void calculateCorrection(Time currentTime, Coordinates <Matrix> approximatedPose, SatellitePosition satelliteCoordinates, NavigationProducer navigationProducer, Location initialLocation)
        {
            // Compute the difference vector between the receiver and the satellite
            SimpleMatrix <Matrix> diff = approximatedPose.minusXYZ(satelliteCoordinates);

            // Compute the geometric distance between the receiver and the satellite

            double geomDist = Math.Sqrt(Math.Pow(diff.get(0), 2) + Math.Pow(diff.get(1), 2) + Math.Pow(diff.get(2), 2));

            // Compute the geocentric distance of the receiver
            double geoDistRx = Math.Sqrt(Math.Pow(approximatedPose.getX(), 2) + Math.Pow(approximatedPose.getY(), 2) + Math.Pow(approximatedPose.getZ(), 2));

            // Compute the geocentric distance of the satellite
            double geoDistSv = Math.Sqrt(Math.Pow(satelliteCoordinates.getX(), 2) + Math.Pow(satelliteCoordinates.getY(), 2) + Math.Pow(satelliteCoordinates.getZ(), 2));


            // Compute the shapiro correction
            correctionValue = ((2.0 * Constants.EARTH_GRAVITATIONAL_CONSTANT) / Math.Pow(Constants.SPEED_OF_LIGHT, 2)) * Math.Log((geoDistSv + geoDistRx + geomDist) / (geoDistSv + geoDistRx - geomDist));
        }
        private static double ScaleAndRegularize(TwoDimensionalMap <string, string, SimpleMatrix> derivatives, TwoDimensionalMap <string, string, SimpleMatrix> currentMatrices, double scale, double regCost, bool dropBiasColumn)
        {
            double cost = 0.0;

            // the regularization cost
            foreach (TwoDimensionalMap.Entry <string, string, SimpleMatrix> entry in currentMatrices)
            {
                SimpleMatrix D         = derivatives.Get(entry.GetFirstKey(), entry.GetSecondKey());
                SimpleMatrix regMatrix = entry.GetValue();
                if (dropBiasColumn)
                {
                    regMatrix = new SimpleMatrix(regMatrix);
                    regMatrix.InsertIntoThis(0, regMatrix.NumCols() - 1, new SimpleMatrix(regMatrix.NumRows(), 1));
                }
                D = D.Scale(scale).Plus(regMatrix.Scale(regCost));
                derivatives.Put(entry.GetFirstKey(), entry.GetSecondKey(), D);
                cost += regMatrix.ElementMult(regMatrix).ElementSum() * regCost / 2.0;
            }
            return(cost);
        }
コード例 #18
0
 /// <summary>Compute dot product between two vectors.</summary>
 public static double Dot(SimpleMatrix vector1, SimpleMatrix vector2)
 {
     if (vector1.NumRows() == 1)
     {
         // vector1: row vector, assume that vector2 is a row vector too
         return(vector1.Mult(vector2.Transpose()).Get(0));
     }
     else
     {
         if (vector1.NumCols() == 1)
         {
             // vector1: col vector, assume that vector2 is also a column vector.
             return(vector1.Transpose().Mult(vector2).Get(0));
         }
         else
         {
             throw new AssertionError("Error in neural.Utils.dot: vector1 is a matrix " + vector1.NumRows() + " x " + vector1.NumCols());
         }
     }
 }
コード例 #19
0
    private SimpleMatrix GetAdjustment(ScalePoint point,
                                       int level, int x, int y, out double dp)
    {
        dp = 0.0;
        if (point.Level <= 0 || point.Level >= (spaces.Length - 1))
        {
            throw (new ArgumentException("point.Level is not within [bottom-1;top-1] range"));
        }

        ImageMap below   = spaces[level - 1];
        ImageMap current = spaces[level];
        ImageMap above   = spaces[level + 1];

        SimpleMatrix H = new SimpleMatrix(3, 3);

        H[0, 0] = below[x, y] - 2 * current[x, y] + above[x, y];
        H[0, 1] = H[1, 0] = 0.25 * (above[x, y + 1] - above[x, y - 1] -
                                    (below[x, y + 1] - below[x, y - 1]));
        H[0, 2] = H[2, 0] = 0.25 * (above[x + 1, y] - above[x - 1, y] -
                                    (below[x + 1, y] - below[x - 1, y]));
        H[1, 1] = current[x, y - 1] - 2 * current[x, y] + current[x, y + 1];
        H[1, 2] = H[2, 1] = 0.25 * (current[x + 1, y + 1] - current[x - 1, y + 1] -
                                    (current[x + 1, y - 1] - current[x - 1, y - 1]));
        H[2, 2] = current[x - 1, y] - 2 * current[x, y] + current[x + 1, y];

        SimpleMatrix d = new SimpleMatrix(3, 1);

        d[0, 0] = 0.5 * (above[x, y] - below[x, y]);
        d[1, 0] = 0.5 * (current[x, y + 1] - current[x, y - 1]);
        d[2, 0] = 0.5 * (current[x + 1, y] - current[x - 1, y]);

        SimpleMatrix b = (SimpleMatrix)d.Clone();

        b.Negate();

        H.SolveLinear(b);

        dp = b.Dot(d);

        return(b);
    }
コード例 #20
0
        /// <summary>
        /// Concatenates several column vectors into one large column
        /// vector, adds a 1.0 at the end as a bias term
        /// </summary>
        public static SimpleMatrix ConcatenateWithBias(params SimpleMatrix[] vectors)
        {
            int size = 0;

            foreach (SimpleMatrix vector in vectors)
            {
                size += vector.NumRows();
            }
            // one extra for the bias
            size++;
            SimpleMatrix result = new SimpleMatrix(size, 1);
            int          index  = 0;

            foreach (SimpleMatrix vector_1 in vectors)
            {
                result.InsertIntoThis(index, 0, vector_1);
                index += vector_1.NumRows();
            }
            result.Set(index, 0, 1.0);
            return(result);
        }
コード例 #21
0
        public static void main(string[] args)
        {
            IMersenneTwister rand = new MersenneTwisterFast(234);

            Equation.Equation eq = new Equation.Equation();
            eq.getFunctions().add("multTransA", createMultTransA());

            SimpleMatrix <DMatrixRMaj> A = new SimpleMatrix <DMatrixRMaj>(1, 1); // will be resized
            SimpleMatrix <DMatrixRMaj> B = SimpleMatrix <DMatrixRMaj> .random64(3, 4, -1, 1, rand);

            SimpleMatrix <DMatrixRMaj> C = SimpleMatrix <DMatrixRMaj> .random64(3, 4, -1, 1, rand);

            eq.alias(A, "A", B, "B", C, "C");

            eq.process("A=multTransA(B,C)");

            Console.WriteLine("Found");
            Console.WriteLine(A);
            Console.WriteLine("Expected");
            B.transpose().mult(C).print();
        }
コード例 #22
0
        internal virtual void ReadWordVectors()
        {
            Embedding embedding = new Embedding(op.wordVectors, op.numHid);

            this.wordVectors = Generics.NewTreeMap();
            //    Map<String, SimpleMatrix> rawWordVectors = NeuralUtils.readRawWordVectors(op.wordVectors, op.numHid);
            //    for (String word : rawWordVectors.keySet()) {
            foreach (string word in embedding.KeySet())
            {
                // TODO: factor out unknown word vector code from DVParser
                wordVectors[word] = embedding.Get(word);
            }
            string       unkWord           = op.unkWord;
            SimpleMatrix unknownWordVector = wordVectors[unkWord];

            wordVectors[UnknownWord] = unknownWordVector;
            if (unknownWordVector == null)
            {
                throw new Exception("Unknown word vector not specified in the word vector file");
            }
        }
コード例 #23
0
        /// <summary>Outputs the scores from the tree.</summary>
        /// <remarks>
        /// Outputs the scores from the tree.  Counts the tree nodes the
        /// same as setIndexLabels.
        /// </remarks>
        private static int OutputTreeScores(TextWriter @out, Tree tree, int index)
        {
            if (tree.IsLeaf())
            {
                return(index);
            }
            @out.Write("  " + index + ':');
            SimpleMatrix vector = RNNCoreAnnotations.GetPredictions(tree);

            for (int i = 0; i < vector.GetNumElements(); ++i)
            {
                @out.Write("  " + Nf.Format(vector.Get(i)));
            }
            @out.WriteLine();
            index++;
            foreach (Tree child in tree.Children())
            {
                index = OutputTreeScores(@out, child, index);
            }
            return(index);
        }
コード例 #24
0
ファイル: frSift.cs プロジェクト: zanderphh/Hownet
        private void frSift_Load(object sender, EventArgs e)
        {
            SimpleMatrix A = new SimpleMatrix(4, 4);

            A[0, 0] = 5; A[0, 1] = 3; A[0, 2] = -1; A[0, 3] = 0;
            A[1, 0] = 2; A[1, 1] = 0; A[1, 2] = 4; A[1, 3] = 1;
            A[2, 0] = -3; A[2, 1] = 3; A[2, 2] = -3; A[2, 3] = 5;
            A[3, 0] = 0; A[3, 1] = 6; A[3, 2] = -2; A[3, 3] = 3;

            SimpleMatrix b = new SimpleMatrix(4, 1);

            b[0, 0] = 11; b[1, 0] = 1; b[2, 0] = -2; b[3, 0] = 9;

            Console.WriteLine("Correct results should be: (1.0, 2.0, 0.0, -1.0)\n");
            Console.WriteLine("CALCULATING");
            A.SolveLinear(b);
            Console.WriteLine("Results:");
            for (int n = 0; n < 4; ++n)
            {
                Console.WriteLine("b[{0}] = {1}", n, b[n, 0]);
            }
        }
コード例 #25
0
 private SentimentModel(TwoDimensionalMap <string, string, SimpleMatrix> binaryTransform, TwoDimensionalMap <string, string, SimpleTensor> binaryTensors, TwoDimensionalMap <string, string, SimpleMatrix> binaryClassification, IDictionary <string,
                                                                                                                                                                                                                                              SimpleMatrix> unaryClassification, IDictionary <string, SimpleMatrix> wordVectors, RNNOptions op)
 {
     this.op = op;
     this.binaryTransform      = binaryTransform;
     this.binaryTensors        = binaryTensors;
     this.binaryClassification = binaryClassification;
     this.unaryClassification  = unaryClassification;
     this.wordVectors          = wordVectors;
     this.numClasses           = op.numClasses;
     if (op.numHid <= 0)
     {
         int nh = 0;
         foreach (SimpleMatrix wv in wordVectors.Values)
         {
             nh = wv.GetNumElements();
         }
         this.numHid = nh;
     }
     else
     {
         this.numHid = op.numHid;
     }
     this.numBinaryMatrices = binaryTransform.Size();
     binaryTransformSize    = numHid * (2 * numHid + 1);
     if (op.useTensors)
     {
         binaryTensorSize = numHid * numHid * numHid * 4;
     }
     else
     {
         binaryTensorSize = 0;
     }
     binaryClassificationSize = (op.combineClassification) ? 0 : numClasses * (numHid + 1);
     numUnaryMatrices         = unaryClassification.Count;
     unaryClassificationSize  = numClasses * (numHid + 1);
     rand     = new Random(op.randomSeed);
     identity = SimpleMatrix.Identity(numHid);
 }
コード例 #26
0
        public static void VectorToParams(double[] theta, params IEnumerator <SimpleMatrix>[] matrices)
        {
            int index = 0;

            foreach (IEnumerator <SimpleMatrix> matrixIterator in matrices)
            {
                while (matrixIterator.MoveNext())
                {
                    SimpleMatrix matrix      = matrixIterator.Current;
                    int          numElements = matrix.GetNumElements();
                    for (int i = 0; i < numElements; ++i)
                    {
                        matrix.Set(i, theta[index]);
                        ++index;
                    }
                }
            }
            if (index != theta.Length)
            {
                throw new AssertionError("Did not entirely use the theta vector");
            }
        }
コード例 #27
0
        //@Override
        public void update(DMatrixRMaj _z, DMatrixRMaj _R)
        {
            // a fast way to make the matrices usable by SimpleMatrix
            SimpleMatrix <DMatrixRMaj> z = SimpleMatrix <DMatrixRMaj> .wrap(_z);

            SimpleMatrix <DMatrixRMaj> R = SimpleMatrix <DMatrixRMaj> .wrap(_R);

            // y = z - H x
            SimpleMatrix <DMatrixRMaj> y = z.minus(H.mult(x)) as SimpleMatrix <DMatrixRMaj>;

            // S = H P H' + R
            SimpleMatrix <DMatrixRMaj> S = H.mult(P).mult(H.transpose()).plus(R) as SimpleMatrix <DMatrixRMaj>;

            // K = PH'S^(-1)
            SimpleMatrix <DMatrixRMaj> K = P.mult(H.transpose().mult(S.invert())) as SimpleMatrix <DMatrixRMaj>;

            // x = x + Ky
            x = x.plus(K.mult(y)) as SimpleMatrix <DMatrixRMaj>;

            // P = (I-kH)P = P - KHP
            P = P.minus(K.mult(H).mult(P)) as SimpleMatrix <DMatrixRMaj>;
        }
コード例 #28
0
    static public SimpleMatrix operator*(SimpleMatrix m1, SimpleMatrix m2)
    {
        if (m1.XDim != m2.YDim)
        {
            throw (new ArgumentException
                       ("Matrixes cannot be multiplied, dimension mismatch"));
        }

        SimpleMatrix res = new SimpleMatrix(m1.YDim, m2.XDim);

        for (int y = 0; y < m1.YDim; ++y)
        {
            for (int x = 0; x < m2.XDim; ++x)
            {
                for (int k = 0; k < m2.YDim; ++k)
                {
                    res[y, x] += m1[y, k] * m2[k, x];
                }
            }
        }

        return(res);
    }
コード例 #29
0
 public DVModel(TwoDimensionalMap <string, string, SimpleMatrix> binaryTransform, IDictionary <string, SimpleMatrix> unaryTransform, TwoDimensionalMap <string, string, SimpleMatrix> binaryScore, IDictionary <string, SimpleMatrix> unaryScore, IDictionary
                <string, SimpleMatrix> wordVectors, Options op)
 {
     this.op = op;
     this.binaryTransform   = binaryTransform;
     this.unaryTransform    = unaryTransform;
     this.binaryScore       = binaryScore;
     this.unaryScore        = unaryScore;
     this.wordVectors       = wordVectors;
     this.numBinaryMatrices = binaryTransform.Size();
     this.numUnaryMatrices  = unaryTransform.Count;
     if (numBinaryMatrices > 0)
     {
         this.binaryTransformSize = binaryTransform.GetEnumerator().Current.GetValue().GetNumElements();
         this.binaryScoreSize     = binaryScore.GetEnumerator().Current.GetValue().GetNumElements();
     }
     else
     {
         this.binaryTransformSize = 0;
         this.binaryScoreSize     = 0;
     }
     if (numUnaryMatrices > 0)
     {
         this.unaryTransformSize = unaryTransform.Values.GetEnumerator().Current.GetNumElements();
         this.unaryScoreSize     = unaryScore.Values.GetEnumerator().Current.GetNumElements();
     }
     else
     {
         this.unaryTransformSize = 0;
         this.unaryScoreSize     = 0;
     }
     this.numRows  = op.lexOptions.numHid;
     this.numCols  = op.lexOptions.numHid;
     this.identity = SimpleMatrix.Identity(numRows);
     this.rand     = new Random(op.trainOptions.randomSeed);
 }
コード例 #30
0
        /// <summary>
        /// Returns a column vector where each entry is the nth bilinear
        /// product of the nth slices of the two tensors.
        /// </summary>
        public virtual SimpleMatrix BilinearProducts(SimpleMatrix @in)
        {
            if (@in.NumCols() != 1)
            {
                throw new AssertionError("Expected a column vector");
            }
            if (@in.NumRows() != numCols)
            {
                throw new AssertionError("Number of rows in the input does not match number of columns in tensor");
            }
            if (numRows != numCols)
            {
                throw new AssertionError("Can only perform this operation on a SimpleTensor with square slices");
            }
            SimpleMatrix inT  = @in.Transpose();
            SimpleMatrix @out = new SimpleMatrix(numSlices, 1);

            for (int slice = 0; slice < numSlices; ++slice)
            {
                double result = inT.Mult(slices[slice]).Mult(@in).Get(0);
                @out.Set(slice, result);
            }
            return(@out);
        }
コード例 #31
0
ファイル: ArcToHelper.cs プロジェクト: furesoft/Perspex
            /// <summary>
            /// ArcTo Helper for StreamGeometryContext
            /// </summary>
            /// <param name="path">Target path</param>
            /// <param name="p1">Start point</param>
            /// <param name="p2">End point</param>
            /// <param name="size">Ellipse radii</param>
            /// <param name="theta">Ellipse theta (angle measured from the abscissa)</param>
            /// <param name="isLargeArc">Large Arc Indicator</param>
            /// <param name="clockwise">Clockwise direction flag</param>
            public static void BuildArc(IStreamGeometryContextImpl path, Point p1, Point p2, Size size, double theta, bool isLargeArc, bool clockwise)
            {

                // var orthogonalizer = new RotateTransform(-theta);
                var orth = new SimpleMatrix(Math.Cos(theta), Math.Sin(theta), -Math.Sin(theta), Math.Cos(theta));
                var rest = new SimpleMatrix(Math.Cos(theta), -Math.Sin(theta), Math.Sin(theta), Math.Cos(theta));

                // var restorer = orthogonalizer.Inverse;
                // if(restorer == null) throw new InvalidOperationException("Can't get a restorer!");

                Point p1S = orth * (new Point((p1.X - p2.X) / 2, (p1.Y - p2.Y) / 2));

                double rx = size.Width;
                double ry = size.Height;
                double rx2 = rx * rx;
                double ry2 = ry * ry;
                double y1S2 = p1S.Y * p1S.Y;
                double x1S2 = p1S.X * p1S.X;

                double numerator = rx2*ry2 - rx2*y1S2 - ry2*x1S2;
                double denominator = rx2*y1S2 + ry2*x1S2;

                if (Math.Abs(denominator) < 1e-8)
                {
                    path.LineTo(p2);
                    return;
                }
                if ((numerator / denominator) < 0)
                {
                    double lambda = x1S2/rx2 + y1S2/ry2;
                    double lambdaSqrt = Math.Sqrt(lambda);
                    if (lambda > 1)
                    {
                        rx *= lambdaSqrt;
                        ry *= lambdaSqrt;
                        rx2 = rx*rx;
                        ry2 = ry*ry;
                        numerator = rx2 * ry2 - rx2 * y1S2 - ry2 * x1S2;
                        if (numerator < 0)
                            numerator = 0;

                        denominator = rx2 * y1S2 + ry2 * x1S2;
                    }

                }

                double multiplier = Math.Sqrt(numerator / denominator);
                Point mulVec = new Point(rx * p1S.Y / ry, -ry * p1S.X / rx);

                int sign = (clockwise != isLargeArc) ? 1 : -1;

                Point cs = new Point(mulVec.X * multiplier * sign, mulVec.Y * multiplier * sign);

                Vector translation = new Vector((p1.X + p2.X) / 2, (p1.Y + p2.Y) / 2);

                Point c = rest * (cs) + translation;

                // See "http://www.w3.org/TR/SVG/implnote.html#ArcConversionEndpointToCenter" to understand
                // how the ellipse center is calculated 


                // from here, W3C recommendations from the above link make less sense than Darth Vader pouring
                // some sea water in a water filter while standing in the water confused 

                // Therefore, we are on our own with our task of finding out lambda1 and lambda2
                // matching our points p1 and p2.

                // Fortunately it is not so difficult now, when we already know the ellipse centre.

                // We eliminate the offset, making our ellipse zero-centered, then we eliminate the theta,
                // making its Y and X axes the same as global axes. Then we can easily get our angles using
                // good old school formula for angles between vectors.

                // We should remember that this class expects true angles, and not the t-values for ellipse equation.
                // To understand how t-values are obtained, one should see Etas calculation in the constructor code.

                var p1NoOffset = orth * (p1-c);
                var p2NoOffset = orth * (p2-c);

                // if the arc is drawn clockwise, we swap start and end points
                var revisedP1 = clockwise ? p1NoOffset : p2NoOffset;
                var revisedP2 = clockwise ? p2NoOffset : p1NoOffset;


                var thetaStart = GetAngle(new Vector(1, 0), revisedP1);
                var thetaEnd = GetAngle(new Vector(1, 0), revisedP2);


                // Uncomment this to draw a pie
                // path.LineTo(c, true, true);
                // path.LineTo(clockwise ? p1 : p2, true,true);

                path.LineTo(clockwise ? p1 : p2);
                var arc = new EllipticalArc(c.X, c.Y, rx, ry, theta, thetaStart, thetaEnd, false);
                arc.BuildArc(path, arc._maxDegree, arc._defaultFlatness, false);

                //uncomment this to draw a pie
                //path.LineTo(c, true, true);
            }