コード例 #1
0
        public virtual void AddRandomBinaryMatrix(string leftBasic, string rightBasic)
        {
            if (binaryTransform.Get(leftBasic, rightBasic) != null)
            {
                return;
            }
            ++numBinaryMatrices;
            // scoring matrix
            SimpleMatrix score = SimpleMatrix.Random(1, numCols, -1.0 / Math.Sqrt((double)numCols), 1.0 / Math.Sqrt((double)numCols), rand);

            binaryScore.Put(leftBasic, rightBasic, score.Scale(op.trainOptions.scalingForInit));
            SimpleMatrix binary;

            if (op.trainOptions.useContextWords)
            {
                binary = new SimpleMatrix(numRows, numCols * 4 + 1);
                // leave room for bias term
                binary.InsertIntoThis(0, numCols * 2 + 1, RandomContextMatrix());
            }
            else
            {
                binary = new SimpleMatrix(numRows, numCols * 2 + 1);
            }
            SimpleMatrix left  = RandomTransformMatrix();
            SimpleMatrix right = RandomTransformMatrix();

            binary.InsertIntoThis(0, 0, left);
            binary.InsertIntoThis(0, numCols, right);
            binaryTransform.Put(leftBasic, rightBasic, binary.Scale(op.trainOptions.scalingForInit));
        }
コード例 #2
0
        public virtual void AddRandomUnaryMatrix(string childBasic)
        {
            if (unaryTransform[childBasic] != null)
            {
                return;
            }
            ++numUnaryMatrices;
            // scoring matrix
            SimpleMatrix score = SimpleMatrix.Random(1, numCols, -1.0 / Math.Sqrt((double)numCols), 1.0 / Math.Sqrt((double)numCols), rand);

            unaryScore[childBasic] = score.Scale(op.trainOptions.scalingForInit);
            SimpleMatrix transform;

            if (op.trainOptions.useContextWords)
            {
                transform = new SimpleMatrix(numRows, numCols * 3 + 1);
                // leave room for bias term
                transform.InsertIntoThis(0, numCols + 1, RandomContextMatrix());
            }
            else
            {
                transform = new SimpleMatrix(numRows, numCols + 1);
            }
            SimpleMatrix unary = RandomTransformMatrix();

            transform.InsertIntoThis(0, 0, unary);
            unaryTransform[childBasic] = transform.Scale(op.trainOptions.scalingForInit);
        }
コード例 #3
0
        /// <summary>Creates a random context matrix.</summary>
        /// <remarks>
        /// Creates a random context matrix.  This will be numRows x
        /// 2*numCols big.  These can be appended to the end of either a
        /// unary or binary transform matrix to get the transform matrix
        /// which uses context words.
        /// </remarks>
        private SimpleMatrix RandomContextMatrix()
        {
            SimpleMatrix matrix = new SimpleMatrix(numRows, numCols * 2);

            matrix.InsertIntoThis(0, 0, identity.Scale(op.trainOptions.scalingForInit * 0.1));
            matrix.InsertIntoThis(0, numCols, identity.Scale(op.trainOptions.scalingForInit * 0.1));
            matrix = matrix.Plus(SimpleMatrix.Random(numRows, numCols * 2, -1.0 / Math.Sqrt((double)numCols * 100.0), 1.0 / Math.Sqrt((double)numCols * 100.0), rand));
            return(matrix);
        }
コード例 #4
0
 /// <summary>
 /// Returns a randomly initialized tensor with values draft from the
 /// uniform distribution between minValue and maxValue.
 /// </summary>
 public static Edu.Stanford.Nlp.Neural.SimpleTensor Random(int numRows, int numCols, int numSlices, double minValue, double maxValue, Java.Util.Random rand)
 {
     Edu.Stanford.Nlp.Neural.SimpleTensor tensor = new Edu.Stanford.Nlp.Neural.SimpleTensor(numRows, numCols, numSlices);
     for (int i = 0; i < numSlices; ++i)
     {
         tensor.slices[i] = SimpleMatrix.Random(numRows, numCols, minValue, maxValue, rand);
     }
     return(tensor);
 }
コード例 #5
0
        /// <summary>Returns matrices of the right size for either binary or unary (terminal) classification</summary>
        internal virtual SimpleMatrix RandomClassificationMatrix()
        {
            SimpleMatrix score = new SimpleMatrix(numClasses, numHid + 1);
            double       range = 1.0 / (Math.Sqrt((double)numHid));

            score.InsertIntoThis(0, 0, SimpleMatrix.Random(numClasses, numHid, -range, range, rand));
            // bias column goes from 0 to 1 initially
            score.InsertIntoThis(0, numHid, SimpleMatrix.Random(numClasses, 1, 0.0, 1.0, rand));
            return(score.Scale(op.trainOptions.scalingForInit));
        }
コード例 #6
0
        /// <summary>
        /// Create a random transform matrix based on the initialization
        /// parameters.
        /// </summary>
        /// <remarks>
        /// Create a random transform matrix based on the initialization
        /// parameters.  This will be numRows x numCols big.  These can be
        /// plugged into either unary or binary transform matrices.
        /// </remarks>
        private SimpleMatrix RandomTransformMatrix()
        {
            SimpleMatrix matrix;

            switch (op.trainOptions.transformMatrixType)
            {
            case TrainOptions.TransformMatrixType.Diagonal:
            {
                matrix = SimpleMatrix.Random(numRows, numCols, -1.0 / Math.Sqrt((double)numCols * 100.0), 1.0 / Math.Sqrt((double)numCols * 100.0), rand).Plus(identity);
                break;
            }

            case TrainOptions.TransformMatrixType.Random:
            {
                matrix = SimpleMatrix.Random(numRows, numCols, -1.0 / Math.Sqrt((double)numCols), 1.0 / Math.Sqrt((double)numCols), rand);
                break;
            }

            case TrainOptions.TransformMatrixType.OffDiagonal:
            {
                matrix = SimpleMatrix.Random(numRows, numCols, -1.0 / Math.Sqrt((double)numCols * 100.0), 1.0 / Math.Sqrt((double)numCols * 100.0), rand).Plus(identity);
                for (int i = 0; i < numCols; ++i)
                {
                    int x     = rand.NextInt(numCols);
                    int y     = rand.NextInt(numCols);
                    int scale = rand.NextInt(3) - 1;
                    // -1, 0, or 1
                    matrix.Set(x, y, matrix.Get(x, y) + scale);
                }
                break;
            }

            case TrainOptions.TransformMatrixType.RandomZeros:
            {
                matrix = SimpleMatrix.Random(numRows, numCols, -1.0 / Math.Sqrt((double)numCols * 100.0), 1.0 / Math.Sqrt((double)numCols * 100.0), rand).Plus(identity);
                for (int i_1 = 0; i_1 < numCols; ++i_1)
                {
                    int x = rand.NextInt(numCols);
                    int y = rand.NextInt(numCols);
                    matrix.Set(x, y, 0.0);
                }
                break;
            }

            default:
            {
                throw new ArgumentException("Unexpected matrix initialization type " + op.trainOptions.transformMatrixType);
            }
            }
            return(matrix);
        }
コード例 #7
0
        internal virtual SimpleMatrix RandomTransformBlock()
        {
            double range = 1.0 / (Math.Sqrt((double)numHid) * 2.0);

            return(SimpleMatrix.Random(numHid, numHid, -range, range, rand).Plus(identity));
        }
コード例 #8
0
        public virtual void ReadWordVectors()
        {
            SimpleMatrix unknownNumberVector         = null;
            SimpleMatrix unknownCapsVector           = null;
            SimpleMatrix unknownChineseYearVector    = null;
            SimpleMatrix unknownChineseNumberVector  = null;
            SimpleMatrix unknownChinesePercentVector = null;

            wordVectors = Generics.NewTreeMap();
            int numberCount         = 0;
            int capsCount           = 0;
            int chineseYearCount    = 0;
            int chineseNumberCount  = 0;
            int chinesePercentCount = 0;
            //Map<String, SimpleMatrix> rawWordVectors = NeuralUtils.readRawWordVectors(op.lexOptions.wordVectorFile, op.lexOptions.numHid);
            Embedding rawWordVectors = new Embedding(op.lexOptions.wordVectorFile, op.lexOptions.numHid);

            foreach (string word in rawWordVectors.KeySet())
            {
                SimpleMatrix vector = rawWordVectors.Get(word);
                if (op.wordFunction != null)
                {
                    word = op.wordFunction.Apply(word);
                }
                wordVectors[word] = vector;
                if (op.lexOptions.numHid <= 0)
                {
                    op.lexOptions.numHid = vector.GetNumElements();
                }
                // TODO: factor out all of these identical blobs
                if (op.trainOptions.unknownNumberVector && (NumberPattern.Matcher(word).Matches() || DgPattern.Matcher(word).Matches()))
                {
                    ++numberCount;
                    if (unknownNumberVector == null)
                    {
                        unknownNumberVector = new SimpleMatrix(vector);
                    }
                    else
                    {
                        unknownNumberVector = unknownNumberVector.Plus(vector);
                    }
                }
                if (op.trainOptions.unknownCapsVector && CapsPattern.Matcher(word).Matches())
                {
                    ++capsCount;
                    if (unknownCapsVector == null)
                    {
                        unknownCapsVector = new SimpleMatrix(vector);
                    }
                    else
                    {
                        unknownCapsVector = unknownCapsVector.Plus(vector);
                    }
                }
                if (op.trainOptions.unknownChineseYearVector && ChineseYearPattern.Matcher(word).Matches())
                {
                    ++chineseYearCount;
                    if (unknownChineseYearVector == null)
                    {
                        unknownChineseYearVector = new SimpleMatrix(vector);
                    }
                    else
                    {
                        unknownChineseYearVector = unknownChineseYearVector.Plus(vector);
                    }
                }
                if (op.trainOptions.unknownChineseNumberVector && (ChineseNumberPattern.Matcher(word).Matches() || DgPattern.Matcher(word).Matches()))
                {
                    ++chineseNumberCount;
                    if (unknownChineseNumberVector == null)
                    {
                        unknownChineseNumberVector = new SimpleMatrix(vector);
                    }
                    else
                    {
                        unknownChineseNumberVector = unknownChineseNumberVector.Plus(vector);
                    }
                }
                if (op.trainOptions.unknownChinesePercentVector && ChinesePercentPattern.Matcher(word).Matches())
                {
                    ++chinesePercentCount;
                    if (unknownChinesePercentVector == null)
                    {
                        unknownChinesePercentVector = new SimpleMatrix(vector);
                    }
                    else
                    {
                        unknownChinesePercentVector = unknownChinesePercentVector.Plus(vector);
                    }
                }
            }
            string unkWord = op.trainOptions.unkWord;

            if (op.wordFunction != null)
            {
                unkWord = op.wordFunction.Apply(unkWord);
            }
            SimpleMatrix unknownWordVector = wordVectors[unkWord];

            wordVectors[UnknownWord] = unknownWordVector;
            if (unknownWordVector == null)
            {
                throw new Exception("Unknown word vector not specified in the word vector file");
            }
            if (op.trainOptions.unknownNumberVector)
            {
                if (numberCount > 0)
                {
                    unknownNumberVector = unknownNumberVector.Divide(numberCount);
                }
                else
                {
                    unknownNumberVector = new SimpleMatrix(unknownWordVector);
                }
                wordVectors[UnknownNumber] = unknownNumberVector;
            }
            if (op.trainOptions.unknownCapsVector)
            {
                if (capsCount > 0)
                {
                    unknownCapsVector = unknownCapsVector.Divide(capsCount);
                }
                else
                {
                    unknownCapsVector = new SimpleMatrix(unknownWordVector);
                }
                wordVectors[UnknownCaps] = unknownCapsVector;
            }
            if (op.trainOptions.unknownChineseYearVector)
            {
                log.Info("Matched " + chineseYearCount + " chinese year vectors");
                if (chineseYearCount > 0)
                {
                    unknownChineseYearVector = unknownChineseYearVector.Divide(chineseYearCount);
                }
                else
                {
                    unknownChineseYearVector = new SimpleMatrix(unknownWordVector);
                }
                wordVectors[UnknownChineseYear] = unknownChineseYearVector;
            }
            if (op.trainOptions.unknownChineseNumberVector)
            {
                log.Info("Matched " + chineseNumberCount + " chinese number vectors");
                if (chineseNumberCount > 0)
                {
                    unknownChineseNumberVector = unknownChineseNumberVector.Divide(chineseNumberCount);
                }
                else
                {
                    unknownChineseNumberVector = new SimpleMatrix(unknownWordVector);
                }
                wordVectors[UnknownChineseNumber] = unknownChineseNumberVector;
            }
            if (op.trainOptions.unknownChinesePercentVector)
            {
                log.Info("Matched " + chinesePercentCount + " chinese percent vectors");
                if (chinesePercentCount > 0)
                {
                    unknownChinesePercentVector = unknownChinesePercentVector.Divide(chinesePercentCount);
                }
                else
                {
                    unknownChinesePercentVector = new SimpleMatrix(unknownWordVector);
                }
                wordVectors[UnknownChinesePercent] = unknownChinesePercentVector;
            }
            if (op.trainOptions.useContextWords)
            {
                SimpleMatrix start = SimpleMatrix.Random(op.lexOptions.numHid, 1, -0.5, 0.5, rand);
                SimpleMatrix end   = SimpleMatrix.Random(op.lexOptions.numHid, 1, -0.5, 0.5, rand);
                wordVectors[StartWord] = start;
                wordVectors[EndWord]   = end;
            }
        }
コード例 #9
0
        /// <exception cref="System.IO.IOException"/>
        public static void Main(string[] args)
        {
            string basePath         = "/user/socherr/scr/projects/semComp/RNTN/src/params/";
            int    numSlices        = 25;
            bool   useEscapedParens = false;

            for (int argIndex = 0; argIndex < args.Length;)
            {
                if (Sharpen.Runtime.EqualsIgnoreCase(args[argIndex], "-slices"))
                {
                    numSlices = System.Convert.ToInt32(args[argIndex + 1]);
                    argIndex += 2;
                }
                else
                {
                    if (Sharpen.Runtime.EqualsIgnoreCase(args[argIndex], "-path"))
                    {
                        basePath  = args[argIndex + 1];
                        argIndex += 2;
                    }
                    else
                    {
                        if (Sharpen.Runtime.EqualsIgnoreCase(args[argIndex], "-useEscapedParens"))
                        {
                            useEscapedParens = true;
                            argIndex        += 1;
                        }
                        else
                        {
                            log.Info("Unknown argument " + args[argIndex]);
                            System.Environment.Exit(2);
                        }
                    }
                }
            }
            SimpleMatrix[] slices = new SimpleMatrix[numSlices];
            for (int i = 0; i < numSlices; ++i)
            {
                slices[i] = LoadMatrix(basePath + "bin/Wt_" + (i + 1) + ".bin", basePath + "Wt_" + (i + 1) + ".txt");
            }
            SimpleTensor tensor = new SimpleTensor(slices);

            log.Info("W tensor size: " + tensor.NumRows() + "x" + tensor.NumCols() + "x" + tensor.NumSlices());
            SimpleMatrix W = LoadMatrix(basePath + "bin/W.bin", basePath + "W.txt");

            log.Info("W matrix size: " + W.NumRows() + "x" + W.NumCols());
            SimpleMatrix Wcat = LoadMatrix(basePath + "bin/Wcat.bin", basePath + "Wcat.txt");

            log.Info("W cat size: " + Wcat.NumRows() + "x" + Wcat.NumCols());
            SimpleMatrix combinedWV = LoadMatrix(basePath + "bin/Wv.bin", basePath + "Wv.txt");

            log.Info("Word matrix size: " + combinedWV.NumRows() + "x" + combinedWV.NumCols());
            File vocabFile = new File(basePath + "vocab_1.txt");

            if (!vocabFile.Exists())
            {
                vocabFile = new File(basePath + "words.txt");
            }
            IList <string> lines = Generics.NewArrayList();

            foreach (string line in IOUtils.ReadLines(vocabFile))
            {
                lines.Add(line.Trim());
            }
            log.Info("Lines in vocab file: " + lines.Count);
            IDictionary <string, SimpleMatrix> wordVectors = Generics.NewTreeMap();

            for (int i_1 = 0; i_1 < lines.Count && i_1 < combinedWV.NumCols(); ++i_1)
            {
                string[] pieces = lines[i_1].Split(" +");
                if (pieces.Length == 0 || pieces.Length > 1)
                {
                    continue;
                }
                wordVectors[pieces[0]] = combinedWV.ExtractMatrix(0, numSlices, i_1, i_1 + 1);
                if (pieces[0].Equals("UNK"))
                {
                    wordVectors[SentimentModel.UnknownWord] = wordVectors["UNK"];
                }
            }
            // If there is no ",", we first try to look for an HTML escaping,
            // then fall back to "." as better than just a random word vector.
            // Same for "``" and ";"
            CopyWordVector(wordVectors, "&#44", ",");
            CopyWordVector(wordVectors, ".", ",");
            CopyWordVector(wordVectors, "&#59", ";");
            CopyWordVector(wordVectors, ".", ";");
            CopyWordVector(wordVectors, "&#96&#96", "``");
            CopyWordVector(wordVectors, "''", "``");
            if (useEscapedParens)
            {
                ReplaceWordVector(wordVectors, "(", "-LRB-");
                ReplaceWordVector(wordVectors, ")", "-RRB-");
            }
            RNNOptions op = new RNNOptions();

            op.numHid = numSlices;
            op.lowercaseWordVectors = false;
            if (Wcat.NumRows() == 2)
            {
                op.classNames         = new string[] { "Negative", "Positive" };
                op.equivalenceClasses = new int[][] { new int[] { 0 }, new int[] { 1 } };
                // TODO: set to null once old models are updated
                op.numClasses = 2;
            }
            if (!wordVectors.Contains(SentimentModel.UnknownWord))
            {
                wordVectors[SentimentModel.UnknownWord] = SimpleMatrix.Random(numSlices, 1, -0.00001, 0.00001, new Random());
            }
            SentimentModel model = SentimentModel.ModelFromMatrices(W, Wcat, tensor, wordVectors, op);

            model.SaveSerialized("matlab.ser.gz");
        }