Exemple #1
0
        virtual public void Serialize(Archive ar)
        {
            if (ar.IsStoring())
            {
                // TODO: add storing code here
                ar.Write(m_etaLearningRate);
                ar.Write(m_Layers.Count);
                foreach (var lit in m_Layers)
                {
                    lit.Serialize(ar);
                }
            }
            else
            {
                // TODO: add loading code here

                double eta;
                ar.Read(out eta);
                m_etaLearningRate = eta;              // two-step storage is needed since m_etaLearningRate is "volatile"

                int nLayers;
                var pLayer = (NNLayer)null;

                ar.Read(out nLayers);
                m_Layers.Clear();
                m_Layers = new NNLayerList(nLayers);
                for (int ii = 0; ii < nLayers; ii++)
                {
                    pLayer = new NNLayer("", pLayer);

                    m_Layers.Add(pLayer);
                    pLayer.Serialize(ar);
                }
            }
        }
Exemple #2
0
 public NNLayer(string str, NNLayer pPrev /* =NULL */)
 {
     label        = str;
     m_pPrevLayer = pPrev;
     m_sigmoid    = new SigmoidFunction();
     m_Weights    = new NNWeightList();
     m_Neurons    = new NNNeuronList();
 }
Exemple #3
0
 bool m_bFloatingPointWarning;  // flag for one-time warning (per layer) about potential floating point overflow
 public NNLayer()
 {
     label        = "";
     m_pPrevLayer = null;
     m_sigmoid    = new SigmoidFunction();
     m_Weights    = new NNWeightList();
     m_Neurons    = new NNNeuronList();
     Initialize();
 }