public LRBMTrainer(MnistDataMgr dataMgr, LRBM lrbm) { _dataMgr = dataMgr; _lrbm = lrbm; _rnd = new Random(); }
public LRBMTester(LRBM lrbm, MnistDataMgr dataMgr) { _lrbm = lrbm; _dataMgr = dataMgr; _testSetSize = _dataMgr.count; }
void InitLRBM(LRBM lrbm) { int lrbmLayerCnt = lrbm.layerCnt; for (int i = 0; i < lrbmLayerCnt; i++) { _layers.Add(new double[lrbm._rbms[i].visibleNbr]); _weights.Add(lrbm._rbms[i].GetWeightVisibleHidden()); _bias.Add(lrbm._rbms[i].GetHiddenBias()); } _layers.Add(new double[lrbm._rbms[lrbmLayerCnt - 1].hiddenNbr]); }
public DeepBeliefNet(double learningRate, LRBM lrbm, int outputNbr) { _rnd = new Random(); _learningRate = learningRate; //_lrbm = lrbm; _outputNbr = outputNbr; _layers = new List <double[]>(); _weights = new List <double[, ]>(); _bias = new List <double[]>(); InitLRBM(lrbm); InitOutput(lrbm); _mlp = new MLP(_layers, _weights, _bias, _learningRate); }
void InitOutputStruct(int[] outputStruct, LRBM lrbm) { int currentLevelNbr = lrbm._rbms[lrbm.layerCnt - 1].hiddenNbr; int nextLevelNbr = outputStruct[0]; double[,] weight = new double[currentLevelNbr, nextLevelNbr]; double[] bias = new double[nextLevelNbr]; for (int j = 0; j < nextLevelNbr; j++) { for (int k = 0; k < currentLevelNbr; k++) { weight[k, j] = _rnd.NextDouble() - 0.5; } bias[j] = 0.0; } _weights.Add(weight); _bias.Add(bias); _layers.Add(new double[nextLevelNbr]); for (int i = 1; i < outputStruct.Length; i++) { currentLevelNbr = outputStruct[i - 1]; nextLevelNbr = outputStruct[i]; weight = new double[currentLevelNbr, nextLevelNbr]; bias = new double[nextLevelNbr]; for (int j = 0; j < nextLevelNbr; j++) { for (int k = 0; k < currentLevelNbr; k++) { weight[k, j] = _rnd.NextDouble() - 0.5; } bias[j] = 0.0; } _weights.Add(weight); _bias.Add(bias); _layers.Add(new double[nextLevelNbr]); } }
public DeepBeliefNet(double learningRate, LRBM lrbm, int[] outputStruct) { _rnd = new Random(); _learningRate = learningRate; //_lrbm = lrbm; int outputDepth = outputStruct.Length; _outputNbr = outputStruct[outputDepth - 1]; _layers = new List <double[]>(); _weights = new List <double[, ]>(); _bias = new List <double[]>(); InitLRBM(lrbm); InitOutputStruct(outputStruct, lrbm); _mlp = new MLP(_layers, _weights, _bias, _learningRate); }
void InitOutput(LRBM lrbm) { int lrbmLayerCnt = lrbm.layerCnt; double [,] weight = new double[lrbm._rbms[lrbmLayerCnt - 1].hiddenNbr, _outputNbr]; double[] bias = new double[_outputNbr]; for (int j = 0; j < _outputNbr; j++) { for (int k = 0; k < lrbm._rbms[lrbmLayerCnt - 1].hiddenNbr; k++) { weight[k, j] = _rnd.NextDouble() - 0.5; } bias[j] = 0.0; } _weights.Add(weight); _bias.Add(bias); _layers.Add(new double[_outputNbr]); }