Пример #1
0
 public MnistMLPTrainer(MnistDataMgr mgr, MLP mlp, bool nInput = true, bool nOutput = false)
 {
     _dataMgr        = mgr;
     _normalizeInput = nInput;
     _normalizeOuput = nOutput;
     _network        = mlp;
 }
Пример #2
0
 public void InitMLP(int[] layerStruct, double learnRate)
 {
     _normalizeOuput = false;
     _normalizeInput = true;
     _layerStruct    = layerStruct;
     _network        = new MLP(_layerStruct, learnRate);
 }
Пример #3
0
        public MainWindow()
        {
            InitializeComponent();
            Graph = new UserControl1();
            this.Content = Graph;

            var t = Generate();

            //    var s = t.Item1.Svd(true);

            //var nd = t.Item1.Multiply(s.VT().SubMatrix(0, 8, 0, 8));

            var mlp = new MLP(8, 100, 1);

            var r = mlp.Train(t.Item1, t.Item2, null, null, 2500);

            Graph.Set(r.TrainingSquaredError, r.TrainingError);

            DenseMatrix data = new DenseMatrix(new double[,] { { 1, 1 }, { 1, 0 }, { 0, 0 }, { 0, 1 } });
            DenseMatrix labels = new DenseMatrix(new double[,] { { 1, 0 }, { 0, 1 }, { 1, 0 }, { 0, 1 } });

            //var mlp = new MLP(2, 100, 2);

            //var r = mlp.Train(data, labels, data, labels, 5000);

            //Graph.Set(r.TrainingSquaredError, r.TrainingError);
        }
Пример #4
0
        public MnistMLPTester(MLP mlp, MnistDataMgr dataMgr)
        {
            _mlp     = mlp;
            _dataMgr = dataMgr;

            _testSetSize = _dataMgr.count;

            _mse         = 0;
            _successRate = 0;
        }
Пример #5
0
        public DeepBeliefNet(SerializationInfo info, StreamingContext ctxt)
        {
            _layers       = (List <double[]>)info.GetValue("Layers", typeof(List <double[]>));
            _weights      = (List <double[, ]>)info.GetValue("Weights", typeof(List <double[, ]>));
            _bias         = (List <double[]>)info.GetValue("Bias", typeof(List <double[]>));
            _learningRate = (double)info.GetValue("LearningRate", typeof(double));
            _outputNbr    = (int)info.GetValue("OutputNbr", typeof(int));

            _rnd = new Random();
            _mlp = new MLP(_layers, _weights, _bias, _learningRate);
        }
Пример #6
0
        public DeepBeliefNet(double learningRate, LRBM lrbm, int outputNbr)
        {
            _rnd          = new Random();
            _learningRate = learningRate;
            //_lrbm = lrbm;
            _outputNbr = outputNbr;

            _layers  = new List <double[]>();
            _weights = new List <double[, ]>();
            _bias    = new List <double[]>();

            InitLRBM(lrbm);
            InitOutput(lrbm);

            _mlp = new MLP(_layers, _weights, _bias, _learningRate);
        }
Пример #7
0
        public void InitMLP(int nrHidden, double learnRate)
        {
            int nrInput = _dataMgr.inputNum;
            int nrOutput;

            if (_normalizeOuput)
            {
                nrOutput = 1;
            }
            else
            {
                nrOutput = 10;
            }

            _layerStruct = new int[] { nrInput, nrHidden, nrOutput };

            _network = new MLP(_layerStruct, learnRate);
        }
Пример #8
0
        public DeepBeliefNet(double learningRate, LRBM lrbm, int[] outputStruct)
        {
            _rnd          = new Random();
            _learningRate = learningRate;
            //_lrbm = lrbm;
            int outputDepth = outputStruct.Length;

            _outputNbr = outputStruct[outputDepth - 1];

            _layers  = new List <double[]>();
            _weights = new List <double[, ]>();
            _bias    = new List <double[]>();

            InitLRBM(lrbm);
            InitOutputStruct(outputStruct, lrbm);

            _mlp = new MLP(_layers, _weights, _bias, _learningRate);
        }
Пример #9
0
 void Init()
 {
     int [] netStruct = new int[] { _visibleNbr, _hiddenNbr, _visibleNbr };
     _mlp = new MLP(netStruct, _learningRate);
 }