public Layer(Layer previousLayer, object layerData,
                     ref Dictionary <int, Neuron> neurons, ref int neuronCounter,
                     ref Dictionary <int, Connection> connections, ref int connectionCounter,
                     double learningRate,
                     double weightRescaleFactor)
        {
            // Form connections based upon layer type
            if (layerData is LayerData.RELU)
            {
                // One-To-One Connections
                neuronIdxs = new List <int>();
                tFuncType  = TransferFuncType.RECTILINEAR;
                foreach (int prevNeuronIdx in previousLayer.neuronIdxs)
                {
                    Neuron neuron = new Neuron(tFuncType);
                    neuron.biasAllowed = false;
                    Connection connection = new Connection(ref connections, ref connectionCounter, learningRate, weightRescaleFactor, false);
                    connection.weight = 1;
                    neuron.Idx        = neuronCounter;
                    connection.srcDest[prevNeuronIdx] = neuron.Idx;
                    neuron.incomingConnection.Add(connection.Idx);
                    neurons[prevNeuronIdx].outgoingConnection.Add(connection.Idx);
                    neurons[neuronCounter++] = neuron;
                    neuronIdxs.Add(neuron.Idx);
                }
            }
            else if (layerData is LayerData.FullyConnected)
            {
                // Cross Connections
                LayerData.FullyConnected currLayerData = (LayerData.FullyConnected)layerData;
                tFuncType  = currLayerData.tFuncType;
                neuronIdxs = new List <int>();
                for (int i = 0; i < currLayerData.cntNeurons; i++)
                {
                    Neuron neuron = new Neuron(tFuncType, previousLayer, ref neurons, ref neuronCounter, ref connections,
                                               ref connectionCounter, learningRate, weightRescaleFactor);
                    neuronIdxs.Add(neuron.Idx);
                }
            }
            else if (layerData is LayerData.Convolutional)
            {
                LayerData.Convolutional currLayerData = (LayerData.Convolutional)layerData;
                tFuncType  = TransferFuncType.LINEAR;
                neuronIdxs = new List <int>();
                int dimIn = (int)Math.Sqrt(previousLayer.cntNeurons);

                // Form connections for each filter
                foreach (int filter in currLayerData.filters)
                {
                    int filterStartIdx, filterEndIdx;
                    if (currLayerData.padding)
                    {
                        filterStartIdx = -filter / 2;
                        filterEndIdx   = filter / 2;
                    }
                    else
                    {
                        filterStartIdx = 0;
                        filterEndIdx   = filter - 1;
                    }
                    Dictionary <int, int> filterConnections = new Dictionary <int, int>();
                    for (int k1 = filterStartIdx; k1 <= filterEndIdx; k1++)
                    {
                        for (int k2 = filterStartIdx; k2 <= filterEndIdx; k2++)
                        {
                            Connection connection = new Connection(ref connections, ref connectionCounter, learningRate, weightRescaleFactor);
                            int        hashIdx    = (k1 + filter / 2) * filter + (k2 + filter / 2);
                            connection.weight          = 1;
                            connection.updateAllowed   = false;
                            filterConnections[hashIdx] = connection.Idx;
                        }
                    }
                    // Zero padding is introduced for area which is not completly overlapped by filter
                    for (int i = 0; i + (currLayerData.padding ? 0 : filter - 1) < dimIn; i += currLayerData.stride)
                    {
                        for (int j = 0; j + (currLayerData.padding ? 0 : filter - 1) < dimIn; j += currLayerData.stride)
                        {
                            Neuron neuron = new Neuron(tFuncType);
                            neuron.Idx               = neuronCounter;
                            neuron.biasAllowed       = false;
                            neurons[neuronCounter++] = neuron;
                            neuronIdxs.Add(neuron.Idx);
                            for (int k1 = filterStartIdx; k1 <= filterEndIdx; k1++)
                            {
                                for (int k2 = filterStartIdx; k2 <= filterEndIdx; k2++)
                                {
                                    int idx = GetIndex(i + k1, j + k2, dimIn, previousLayer);
                                    if (idx == -1)
                                    {
                                        continue;
                                    }
                                    int hashIdx = (k1 + filter / 2) * filter + (k2 + filter / 2);
                                    int cIdx    = filterConnections[hashIdx];
                                    connections[cIdx].srcDest[idx] = neuron.Idx;
                                    neurons[idx].outgoingConnection.Add(cIdx);
                                    neurons[neuron.Idx].incomingConnection.Add(cIdx);
                                }
                            }
                        }
                    }
                }
            }
            else if (layerData is LayerData.MaxPool)
            {
                LayerData.MaxPool currLayerData = (LayerData.MaxPool)layerData;
                this.tFuncType = TransferFuncType.MAXPOOL;
                neuronIdxs     = new List <int>();
                int dimIn = (int)Math.Sqrt(previousLayer.cntNeurons);

                // Zero padding is introduced for area which is not completly overlapped by filter
                for (int i = 0; i < dimIn; i += currLayerData.stride)
                {
                    for (int j = 0; j < dimIn; j += currLayerData.stride)
                    {
                        Neuron neuron = new Neuron(tFuncType);
                        neuron.biasAllowed       = false;
                        neuron.Idx               = neuronCounter;
                        neurons[neuronCounter++] = neuron;
                        neuronIdxs.Add(neuron.Idx);

                        Dictionary <int, int> filterConnections = new Dictionary <int, int>();
                        for (int k1 = 0; k1 < currLayerData.size; k1++)
                        {
                            for (int k2 = 0; k2 < currLayerData.size; k2++)
                            {
                                Connection connection = new Connection(ref connections, ref connectionCounter, learningRate, weightRescaleFactor, false);
                                connection.weight = 1;
                                int hashIdx = k1 * currLayerData.size + k2;
                                filterConnections[hashIdx] = connection.Idx;
                            }
                        }

                        // Form new connections
                        for (int k1 = 0; k1 < currLayerData.size; k1++)
                        {
                            for (int k2 = 0; k2 < currLayerData.size; k2++)
                            {
                                int idx = GetIndex(i + k1, j + k2, dimIn, previousLayer);
                                if (idx == -1)
                                {
                                    continue;
                                }
                                int hashIdx = k1 * currLayerData.size + k2;
                                int cIdx    = filterConnections[hashIdx];
                                connections[cIdx].srcDest[idx] = neuron.Idx;
                                neurons[idx].outgoingConnection.Add(cIdx);
                                neurons[neuron.Idx].incomingConnection.Add(cIdx);
                            }
                        }
                    }
                }
            }
            else
            {
                throw new Exception("Invalid LayerConnectionStyle given to Layer.FormConnections  !!!!");
            }
            cntNeurons = neuronIdxs.Count();
        }
        public static NeuralNetwork Load(string filePath, bool showNNUI = false)
        {
            XmlDocument doc = new XmlDocument();

            try
            {
                doc.Load(filePath);
            }
            catch
            {
                throw new Exception("Invalid filepath given to NeuralNetwork.Load() !");
            }

            int               trainingCounter;
            int               layerCnt     = 0;
            double            learningRate = 0;
            NeuralNetworkArgs args         = new NeuralNetworkArgs();

            string basePath = "NeuralNetwork/";

            int.TryParse(XPathValue(basePath + "@TrainingDone", ref doc), out trainingCounter);
            int.TryParse(XPathValue(basePath + "@IndendedTrainingCnt", ref doc), out args.intendedTrainingCnt);
            double.TryParse(XPathValue(basePath + "@LearningRate", ref doc), out args.learningRate);
            double.TryParse(XPathValue(basePath + "@Momentum", ref doc), out args.momentum);
            basePath += "Layers/";

            int.TryParse(XPathValue(basePath + "@Count", ref doc), out layerCnt);
            args.layersData = new object[layerCnt];

            basePath += "Layer[@Index='{0}']/@{1}";
            XmlNodeList layerList = doc.SelectNodes("NeuralNetwork/Layers/Layer");

            for (int i = 0; i < layerCnt; i++)
            {
                XmlNode layerNode = layerList[i];
                switch (layerNode.Attributes["Type"].Value)
                {
                case "RELU":
                    LayerData.RELU reluLayer = new LayerData.RELU();
                    args.layersData[i] = reluLayer;
                    break;

                case "MAXPOOL":
                    LayerData.MaxPool maxpoolLayer = new LayerData.MaxPool();
                    int.TryParse(layerNode.Attributes["Size"].Value, out maxpoolLayer.size);
                    int.TryParse(layerNode.Attributes["Stride"].Value, out maxpoolLayer.stride);
                    args.layersData[i] = maxpoolLayer;
                    break;

                case "FULLYCONNECTED":
                    LayerData.FullyConnected fullyLayer = new LayerData.FullyConnected();
                    int.TryParse(layerNode.Attributes["Neurons"].Value, out fullyLayer.cntNeurons);
                    Enum.TryParse <TransferFuncType>(layerNode.Attributes["NeuronType"].Value, out fullyLayer.tFuncType);
                    args.layersData[i] = fullyLayer;
                    break;

                case "CONVOLUTIONAL":
                    LayerData.Convolutional convolutionalLayer = new LayerData.Convolutional();
                    int.TryParse(layerNode.Attributes["Stride"].Value, out convolutionalLayer.stride);
                    bool.TryParse(layerNode.Attributes["ZeroPadding"].Value, out convolutionalLayer.padding);
                    XmlDocument filterXml = new XmlDocument();
                    filterXml.LoadXml(layerNode.OuterXml);
                    XmlNodeList filterList = filterXml.SelectNodes("Layer/Filters/Filter");
                    convolutionalLayer.filters = new int[filterList.Count];
                    int j = 0;
                    foreach (XmlNode filterNode in filterList)
                    {
                        int.TryParse(filterNode.Attributes["Size"].Value, out convolutionalLayer.filters[j++]);
                    }
                    args.layersData[i] = convolutionalLayer;
                    break;

                default:
                    throw new Exception("Invalid Layer Type Entry Found!!!");
                }
            }

            NeuralNetwork nn = new NeuralNetwork(args.intendedTrainingCnt, 1, args.learningRate, args.momentum, showNNUI, args.layersData);

            int.TryParse(XPathValue("NeuralNetwork/@TrainingDone", ref doc), out nn.trainingCounter);

            nn.RegisterOutput("Loading Neurons");
            basePath = "NeuralNetwork/Neurons/Neuron[@Index='{0}']/@Bias";
            int neuronCnt;

            int.TryParse(XPathValue("NeuralNetwork/Neurons/@Count", ref doc), out neuronCnt);
            for (int i = 0; i < neuronCnt; i++)
            {
                double.TryParse(XPathValue(string.Format(basePath, i.ToString()), ref doc), out nn.neurons[i].bias);
            }
            nn.RegisterOutput("Loading Connections");
            basePath = "NeuralNetwork/Connections/Connection[@Index='{0}']/@{1}";

            XmlNodeList connectionList = doc.SelectNodes("NeuralNetwork/Connections/Connection");

            foreach (XmlNode connection in connectionList)
            {
                int idx, src, dest;
                int.TryParse(connection.Attributes["Index"].Value, out idx);
                int.TryParse(connection.Attributes["Source"].Value, out src);
                int.TryParse(connection.Attributes["Destination"].Value, out dest);
                double.TryParse(connection.Attributes["Weight"].Value, out nn.connections[idx].weight);
                double.TryParse(connection.Attributes["PreviousWeightDelta"].Value, out nn.connections[idx].previousWeightDelta);
                double.TryParse(connection.Attributes["LearningRate"].Value, out nn.connections[idx].learningRate);
                nn.connections[idx].srcDest[src] = dest;
            }
            nn.RegisterOutput("Neural Network : " + XPathValue("NeuralNetwork/@Name", ref doc) + "Loaded Successfully : )");
            doc = null;
            return(nn);
        }