예제 #1
0
        public List <ConectionsInfo> GetMinusGradient()
        {
            var res = new List <ConectionsInfo>();
            var flD = NeuronUtility.FirstLayerDerivatives(mLayers.Last(), mSample);

            for (int i = 0; i < mConections.Count; i++)
            {
                flD = NeuronUtility.LayerDerivatives(out ConectionsInfo c, flD, mConections[mConections.Count - i - 1], mLayers[mLayers.Count - i - 2]);
예제 #2
0
        public void Update(Sample s)
        {
            mLayers[0] = s.GetFirstLayer();
            mSample    = s;

            if (mConections[0].Back != s.Data.Length)
            {
                throw new Exception("Bad Sample");
            }

            for (int i = 0; i < mConections.Count; i++)
            {
                mLayers[i + 1] = NeuronUtility.CountALayer(mLayers[i], mConections[i]);
            }
        }
예제 #3
0
        public NetworkState(List <ConectionsInfo> c, Sample s)
        {
            mConections = new List <ConectionsInfo>();
            for (int i = 0; i < c.Count; i++)
            {
                mConections.Add((ConectionsInfo)c[i].Clone());
            }
            mLayers = new List <double[]>
            {
                s.GetFirstLayer()
            };
            mSample = s;
            if (mConections[0].Back != s.Data.Length)
            {
                throw new Exception("Bad Sample");
            }

            for (int i = 0; i < mConections.Count; i++)
            {
                mLayers.Add(NeuronUtility.CountALayer(mLayers[i], mConections[i]));
            }
        }