Exemple #1
0
        public LRBMTrainer(MnistDataMgr dataMgr, LRBM lrbm)
        {
            _dataMgr = dataMgr;
            _lrbm    = lrbm;

            _rnd = new Random();
        }
Exemple #2
0
        public LRBMTester(LRBM lrbm, MnistDataMgr dataMgr)
        {
            _lrbm    = lrbm;
            _dataMgr = dataMgr;

            _testSetSize = _dataMgr.count;
        }
Exemple #3
0
        void InitLRBM(LRBM lrbm)
        {
            int lrbmLayerCnt = lrbm.layerCnt;

            for (int i = 0; i < lrbmLayerCnt; i++)
            {
                _layers.Add(new double[lrbm._rbms[i].visibleNbr]);
                _weights.Add(lrbm._rbms[i].GetWeightVisibleHidden());
                _bias.Add(lrbm._rbms[i].GetHiddenBias());
            }

            _layers.Add(new double[lrbm._rbms[lrbmLayerCnt - 1].hiddenNbr]);
        }
Exemple #4
0
        public DeepBeliefNet(double learningRate, LRBM lrbm, int outputNbr)
        {
            _rnd          = new Random();
            _learningRate = learningRate;
            //_lrbm = lrbm;
            _outputNbr = outputNbr;

            _layers  = new List <double[]>();
            _weights = new List <double[, ]>();
            _bias    = new List <double[]>();

            InitLRBM(lrbm);
            InitOutput(lrbm);

            _mlp = new MLP(_layers, _weights, _bias, _learningRate);
        }
Exemple #5
0
        void InitOutputStruct(int[] outputStruct, LRBM lrbm)
        {
            int currentLevelNbr = lrbm._rbms[lrbm.layerCnt - 1].hiddenNbr;
            int nextLevelNbr    = outputStruct[0];

            double[,] weight = new double[currentLevelNbr, nextLevelNbr];
            double[] bias = new double[nextLevelNbr];

            for (int j = 0; j < nextLevelNbr; j++)
            {
                for (int k = 0; k < currentLevelNbr; k++)
                {
                    weight[k, j] = _rnd.NextDouble() - 0.5;
                }

                bias[j] = 0.0;
            }

            _weights.Add(weight);
            _bias.Add(bias);

            _layers.Add(new double[nextLevelNbr]);


            for (int i = 1; i < outputStruct.Length; i++)
            {
                currentLevelNbr = outputStruct[i - 1];
                nextLevelNbr    = outputStruct[i];
                weight          = new double[currentLevelNbr, nextLevelNbr];
                bias            = new double[nextLevelNbr];

                for (int j = 0; j < nextLevelNbr; j++)
                {
                    for (int k = 0; k < currentLevelNbr; k++)
                    {
                        weight[k, j] = _rnd.NextDouble() - 0.5;
                    }

                    bias[j] = 0.0;
                }

                _weights.Add(weight);
                _bias.Add(bias);

                _layers.Add(new double[nextLevelNbr]);
            }
        }
Exemple #6
0
        public DeepBeliefNet(double learningRate, LRBM lrbm, int[] outputStruct)
        {
            _rnd          = new Random();
            _learningRate = learningRate;
            //_lrbm = lrbm;
            int outputDepth = outputStruct.Length;

            _outputNbr = outputStruct[outputDepth - 1];

            _layers  = new List <double[]>();
            _weights = new List <double[, ]>();
            _bias    = new List <double[]>();

            InitLRBM(lrbm);
            InitOutputStruct(outputStruct, lrbm);

            _mlp = new MLP(_layers, _weights, _bias, _learningRate);
        }
Exemple #7
0
        void InitOutput(LRBM lrbm)
        {
            int lrbmLayerCnt = lrbm.layerCnt;

            double [,] weight = new double[lrbm._rbms[lrbmLayerCnt - 1].hiddenNbr, _outputNbr];
            double[] bias = new double[_outputNbr];

            for (int j = 0; j < _outputNbr; j++)
            {
                for (int k = 0; k < lrbm._rbms[lrbmLayerCnt - 1].hiddenNbr; k++)
                {
                    weight[k, j] = _rnd.NextDouble() - 0.5;
                }

                bias[j] = 0.0;
            }

            _weights.Add(weight);
            _bias.Add(bias);

            _layers.Add(new double[_outputNbr]);
        }