Exemple #1
0
        private void InitNetwork()
        {
            long  wordIndex, dimensionIndex;
            ulong nextRandom = 1;

            var numberOfWords = _wordCollection.GetNumberOfUniqueWords();

            _hiddenLayerWeights = new float[numberOfWords, _numberOfDimensions];
            if (_negative > 0)
            {
                _outputLayerWeights = new float[_wordCollection.GetNumberOfUniqueWords(), _numberOfDimensions];
                for (wordIndex = 0; wordIndex < _wordCollection.GetNumberOfUniqueWords(); wordIndex++)
                {
                    for (dimensionIndex = 0; dimensionIndex < _numberOfDimensions; dimensionIndex++)
                    {
                        _outputLayerWeights[wordIndex, dimensionIndex] = 0;
                    }
                }
            }
            for (wordIndex = 0; wordIndex < _wordCollection.GetNumberOfUniqueWords(); wordIndex++)
            {
                for (dimensionIndex = 0; dimensionIndex < _numberOfDimensions; dimensionIndex++)
                {
                    nextRandom = LinearCongruentialGenerator(nextRandom);
                    _hiddenLayerWeights[wordIndex, dimensionIndex] = ((nextRandom & 0xFFFF) / (float)65536 - (float)0.5) / _numberOfDimensions;
                }
            }
            var huffmanTree = new HuffmanTree();

            huffmanTree.Create(_wordCollection);
            GC.Collect();
        }
Exemple #2
0
        private void Setup()
        {
            _alpha = _startingAlpha;

            _fileHandler.GetWordDictionaryFromFile(_wordCollection, MaxCodeLength);

            // TODO: refactor the huffman stuff, so we can use it elsewhere
            _wordCollection.RemoveWordsWithCountLessThanMinCount(_minCount);
            var huffmanTree = new HuffmanTree();

            huffmanTree.Create(_wordCollection);

            InitNetwork();

            if (_negativeSamples > 0)
            {
                InitUnigramTable();
            }

            GC.Collect();
        }