public NeuralNetwork(int[] layers, double learning_rate, double momentum) {
            _eta = learning_rate;
            _alpha = momentum;
            _squash = new Sigmoid();
            // layers.size is the number of layers
            // and each integer in layers is the number of units
            // in that layer, layer 0 is input layer layer n is output
            // initialize random number generator
            _rand = new Random();
            // initialize the list of neuron lists
            // , the number of neuron lists is the number of layers
            _neurons = new List<List<NeuralUnit>>();
            // iterate through the layers, newing the number of neurons in each layer
            for (int i = 0; i < layers.Length; i++) {
                _neurons.Add(new List<NeuralUnit>());
                for (int j = 0; j < layers[i]; j++)
                    _neurons[i].Add(new NeuralUnit(getRandomWeight(), _squash));
            }
            _last = _neurons.Count - 1;
            // initialize the input layer with one input in each input array
            foreach (NeuralUnit p in _neurons[0]) {
                p.Inputs = new Input[1];
                p.Inputs[0] = p.XZero;
            }
            for (int i = 1; i < _neurons.Count; i++) {
                foreach (NeuralUnit p in _neurons[i]) {
                    p.Inputs = new Input[_neurons[i - 1].Count + 1];
                    p.Inputs[0] = p.XZero;
                }
            }
            // wire the neurons together, each neuron's output connects
            // to one or more neuron inputs, plug output into inputs
            for (int i = 0; i < _last; i++) {
                for (int j = 0; j < _neurons[i].Count; j++) { //outer layer
                    NeuralUnit p1 = _neurons[i][j];
                    // since the output is a shared memory reference between objects
                    // updating the neuron's output will update all neurons consuming
                    // the output, also initialize the input weights to random numbers
                    // inner layer, start adding inputs at one because the first input is x0
                    foreach (NeuralUnit p2 in _neurons[i + 1]) {
                        p2.Inputs[j+1] = new Input(ref p1.Oput, getRandomWeight());
                    }
                }
            }
            //name the neurons
            for (int i = 0; i < _neurons.Count; i++) {
                for (int j = 0; j < _neurons[i].Count; j++) {
                    _neurons[i][j].Name = ((char)(i + 105)).ToString() + j.ToString();
                    _neurons[i][j].Oput.Name = ((char)(i + 105)).ToString() + j.ToString();
                }
            }

        }
        private void instantiateAnnFromFile(XmlDocument doc) {
            XmlNode node = null;
            //get the name of the ANN
            //get the squashing finction used
            //get the number of layers
            //get eta and alpha
            //get the units at each layer, their weights, 
            //and the units they are connected to
            foreach (XmlNode n in doc.ChildNodes) {
                if (n.Name == "network") {
                    _name = n.Attributes[0].Value;
                    string squashFunction = n.Attributes[1].Value;
                    string layers = n.Attributes[2].Value;
                    string learningRate = n.Attributes[3].Value;
                    string learningMomentum = n.Attributes[4].Value;
                    //init squashing function
                    _squash = new Sigmoid();
                    if (squashFunction == "tanh")
                        _squash = new Tanh();
                    //init the number of hidden layers
                    _neurons = new List<List<NeuralUnit>>();
                    int num = int.Parse(layers);
                    for (int i = 0; i < num; i++)
                        _neurons.Add(new List<NeuralUnit>());
                    _eta = double.Parse(learningRate);
                    _alpha = double.Parse(learningMomentum);
                    node = n;
                    break;
                }
            }
            //parse the IOs
            int currentLayer = 0;
            foreach (XmlNode n in node.ChildNodes) {
                if (n.Name == "layer") {
                    foreach (XmlNode n2 in n.ChildNodes) {
                        if (n2.Name == "unit") {
                            List<double> inputWeights = new List<double>();
                            string weights = n2.Attributes[1].Value;
                            while (weights.Contains(",")) {
                                int i = weights.IndexOf(",");
                                string weight = weights.Substring(0, i);
                                if (weight != ",")
                                    inputWeights.Add(double.Parse(weight));
                                weights = weights.Remove(0, i + 1);
                            }
                            NeuralUnit nu = new NeuralUnit(_squash);
                            nu.Name = n2.Attributes[0].Value;
                            nu.Oput.Name = nu.Name;
                            nu.Inputs = new Input[inputWeights.Count];
                            //this may not be necessary
                            nu.Inputs[0] = nu.XZero;
                            nu.Inputs[0].Weight = inputWeights[0];
                            for (int i = 1; i < inputWeights.Count; i++)
                                nu.Inputs[i] = new Input(inputWeights[i]);

                            List<string> connectedUnits = new List<string>();
                            string units = n2.Attributes[2].Value;
                            while (units.Contains(",")) {
                                int i = units.IndexOf(",");
                                string name = units.Substring(0, i);
                                if(name != ",")
                                connectedUnits.Add(name);
                                units = units.Remove(0, i+1);
                            }
                            nu.ConnectedUnits = connectedUnits.ToArray();
                            _neurons[currentLayer].Add(nu);
                        }
                    }
                    currentLayer++;
                }
            }

            //this is fine for networks without feedback
            //for (int i = _neurons.Count - 1; i >= 0; i--) {
            //    for (int j = 0; j < _neurons[i].Count; j++) {
            //        NeuralUnit nu = _neurons[i][j];
            //        for (int k = 0; k < _neurons[i][j].ConnectedUnits.Length; k++) {
            //            for (int m = 0; m < _neurons[i - 1].Count; m++) {
            //                if (_neurons[i - 1][m].Name == _neurons[i][j].ConnectedUnits[k]) {
            //                    _neurons[i][j].Inputs[k + 1].Oput = _neurons[i - 1][m].Oput;
            //                }
            //            }
            //        }
            //    }
            //}
            _last = _neurons.Count - 1;
            // now wire the neurons together
            List<NeuralUnit> neurons = new List<NeuralUnit>();
            for (int i = 0; i < _neurons.Count; i++) {
                for (int j = 0; j < _neurons[i].Count; j++) {
                    neurons.Add(_neurons[i][j]);
                }
            }
            for (int i = 0; i < neurons.Count; i++) {
                for (int j = 0; j < neurons[i].ConnectedUnits.Length; j++) {
                    for (int k = 0; k < neurons.Count; k++) {
                        if (neurons[k].Name == neurons[i].ConnectedUnits[j])
                              neurons[i].Inputs[j+1].Oput = neurons[k].Oput;
                    }
                }
            }

        }
 public NeuralUnit(SquashingFunction sq) {
     initialize();
     _squash = sq;
 }
 public NeuralUnit(double weight, SquashingFunction sq) {
     initialize();
     _xZero.Weight = weight;
     _squash = sq;
 }
 //default constructor
 public NeuralUnit() {
     initialize();
     _squash = new Sigmoid();
 }