Exemple #1
0
        virtual public void Serialize(Archive ar)
        {
            if (ar.IsStoring())
            {
                // TODO: add storing code here
                ar.Write(m_etaLearningRate);
                ar.Write(m_Layers.Count);
                foreach (var lit in m_Layers)
                {
                    lit.Serialize(ar);
                }
            }
            else
            {
                // TODO: add loading code here

                double eta;
                ar.Read(out eta);
                m_etaLearningRate = eta;              // two-step storage is needed since m_etaLearningRate is "volatile"

                int nLayers;
                var pLayer = (NNLayer)null;

                ar.Read(out nLayers);
                m_Layers.Clear();
                m_Layers = new NNLayerList(nLayers);
                for (int ii = 0; ii < nLayers; ii++)
                {
                    pLayer = new NNLayer("", pLayer);

                    m_Layers.Add(pLayer);
                    pLayer.Serialize(ar);
                }
            }
        }
Exemple #2
0
 public NeuralNetwork()
 {
     m_etaLearningRate = .001;      // arbitrary, so that brand-new NNs can be serialized with a non-ridiculous number
     m_cBackprops      = 0;
     m_Layers          = new NNLayerList();
 }