示例#1
0
        public mythSource StartLoop()
        {
            var strings = File.ReadAllLines(_filename);

            if (strings.Length > 1)
            {
                var        str        = strings[0];
                var        spstr      = str.Split(' ');
                int        _in_count  = int.Parse(spstr[0]);
                int        _out_count = int.Parse(spstr[1]);
                mythSource source     = new mythSource();
                for (int i = 1; i < strings.Length; i++)
                {
                    spstr = strings[i].Split(' ');
                    double[] _data = new double[_in_count];
                    int      k     = 0;
                    for (int j = 0; j < _in_count; j++)
                    {
                        _data[j] = double.Parse(spstr[k++]);
                    }
                    double[] _out = new double[_out_count];
                    for (int j = 0; j < _out_count; j++)
                    {
                        _out[j] = double.Parse(spstr[k++]);
                    }
                    mythLayer _inlayer  = new mythLayer(_data);
                    mythLayer _outlayer = new mythLayer(_out);
                    source.Add(_inlayer, _outlayer);
                }
                return(source);
            }
            return(null);
        }
示例#2
0
文件: mythNeuron.cs 项目: godka/shall
 public void Update(mythLayer layer, double rate)
 {
     for (int i = 0; i < layer.get().Length; i++)
     {
         var __delta = layer.get()[i].Output * this.Delta;
         _w[i] -= __delta * rate;
     }
 }
示例#3
0
文件: mythNeuron.cs 项目: godka/shall
 public double Activate(mythLayer layer)
 {
     if (_w == null)
     {
         _w = new double[layer.Length];
         initRandom(_w);
     }
     return(Foward(layer));
 }
示例#4
0
        private double ETotal(mythLayer layer)
        {
            double _etotal = 0;

            foreach (var t in layer.get())
            {
                _etotal += t.CalcE;
            }
            return(_etotal);
        }
示例#5
0
文件: mythNeuron.cs 项目: godka/shall
        private double Foward(mythLayer layer)
        {
            var _layerVal = layer.get();
            //if (_layerVal.Length != _w.Length)
            double ret = 0;

            for (int i = 0; i < _layerVal.Length; i++)
            {
                ret += _layerVal[i].Output * _w[i];
            }
            //ret += _φ;
            _output = sigmoid(ret);
            return(_output);
        }
示例#6
0
 public void Add(mythLayer __in, mythLayer __out)
 {
     if (_array_col < 0)
     {
         _array_col = __in.Length;
     }
     else
     {
         if (_array_col != __in.Length)
         {
             return;
         }
     }
     _in.Add(__in);
     _out.Add(__out);
 }
示例#7
0
文件: mythNeuron.cs 项目: godka/shall
        public void CalcDelta(mythLayer layer = null)
        {
            double dedoutput = 0;

            if (layer == null)
            {
                dedoutput = Output - Target;
            }
            else
            {
                //dedoutput = de1/doutput1 + de2/doutput2
                foreach (var t in layer.get())
                {
                    dedoutput += t.Delta * t._w[_index];
                }
            }
            var doutputdnet = disigmoid(Output);

            _delta = dedoutput * doutputdnet;
        }