public static void ConstructLayers(object[] layersData, ref List <Layer> layers,
                                           ref Dictionary <int, Neuron> neurons, ref int neuronCounter,
                                           ref Dictionary <int, Connection> connections, ref int connectionCounter,
                                           double learningRate,
                                           double weightRescaleFactor)
        {
            List <Layer> lastSubLayers = new List <Layer>();
            Layer        lastLayer     = null;

            foreach (object layerData in layersData.ToArray())
            {
                if (layerData is LayerData.FullyConnected)
                {
                    lastLayer = new Layer(lastLayer, layerData, ref neurons, ref neuronCounter, ref connections, ref connectionCounter, learningRate, weightRescaleFactor);
                    lastSubLayers.Clear();
                    lastSubLayers.Add(lastLayer);
                }
                else if (layerData is LayerData.Convolutional)
                {
                    List <Layer>            currSubLayers = new List <Layer>();
                    LayerData.Convolutional currLayerData = (LayerData.Convolutional)layerData;
                    lastLayer = new Layer(TransferFuncType.LINEAR);
                    foreach (Layer subLayer in lastSubLayers)
                    {
                        foreach (int filter in currLayerData.filters)
                        {
                            LayerData.Convolutional layerDataTmp = new LayerData.Convolutional();
                            layerDataTmp.stride  = currLayerData.stride;
                            layerDataTmp.filters = new int[] { filter };

                            Layer layerTmp = new Layer(subLayer, layerDataTmp, ref neurons, ref neuronCounter, ref connections, ref connectionCounter, learningRate, weightRescaleFactor);
                            currSubLayers.Add(layerTmp);
                            lastLayer.MergeLayer(layerTmp);
                        }
                    }
                    lastSubLayers = currSubLayers;
                }
                else if (layerData is LayerData.RELU)
                {
                    List <Layer> currSubLayers = new List <Layer>();
                    lastLayer = new Layer(TransferFuncType.RECTILINEAR);
                    foreach (Layer subLayer in lastSubLayers)
                    {
                        Layer layerTmp = new Layer(subLayer, layerData, ref neurons, ref neuronCounter, ref connections, ref connectionCounter, learningRate, weightRescaleFactor);
                        currSubLayers.Add(layerTmp);
                        lastLayer.MergeLayer(layerTmp);
                    }
                    lastSubLayers = currSubLayers;
                }
                else if (layerData is LayerData.MaxPool)
                {
                    List <Layer> currSubLayers = new List <Layer>();
                    lastLayer = new Layer(TransferFuncType.MAXPOOL);
                    foreach (Layer subLayer in lastSubLayers)
                    {
                        Layer layerTmp = new Layer(subLayer, layerData, ref neurons, ref neuronCounter, ref connections, ref connectionCounter, learningRate, weightRescaleFactor);
                        currSubLayers.Add(layerTmp);
                        lastLayer.MergeLayer(layerTmp);
                    }
                    lastSubLayers = currSubLayers;
                }
                else
                {
                    throw new Exception("Invalid input given to Layer.ConstructLayers!!!!");
                }
                layers.Add(lastLayer);
            }
        }
        public static NeuralNetwork Load(string filePath, bool showNNUI = false)
        {
            XmlDocument doc = new XmlDocument();

            try
            {
                doc.Load(filePath);
            }
            catch
            {
                throw new Exception("Invalid filepath given to NeuralNetwork.Load() !");
            }

            int               trainingCounter;
            int               layerCnt     = 0;
            double            learningRate = 0;
            NeuralNetworkArgs args         = new NeuralNetworkArgs();

            string basePath = "NeuralNetwork/";

            int.TryParse(XPathValue(basePath + "@TrainingDone", ref doc), out trainingCounter);
            int.TryParse(XPathValue(basePath + "@IndendedTrainingCnt", ref doc), out args.intendedTrainingCnt);
            double.TryParse(XPathValue(basePath + "@LearningRate", ref doc), out args.learningRate);
            double.TryParse(XPathValue(basePath + "@Momentum", ref doc), out args.momentum);
            basePath += "Layers/";

            int.TryParse(XPathValue(basePath + "@Count", ref doc), out layerCnt);
            args.layersData = new object[layerCnt];

            basePath += "Layer[@Index='{0}']/@{1}";
            XmlNodeList layerList = doc.SelectNodes("NeuralNetwork/Layers/Layer");

            for (int i = 0; i < layerCnt; i++)
            {
                XmlNode layerNode = layerList[i];
                switch (layerNode.Attributes["Type"].Value)
                {
                case "RELU":
                    LayerData.RELU reluLayer = new LayerData.RELU();
                    args.layersData[i] = reluLayer;
                    break;

                case "MAXPOOL":
                    LayerData.MaxPool maxpoolLayer = new LayerData.MaxPool();
                    int.TryParse(layerNode.Attributes["Size"].Value, out maxpoolLayer.size);
                    int.TryParse(layerNode.Attributes["Stride"].Value, out maxpoolLayer.stride);
                    args.layersData[i] = maxpoolLayer;
                    break;

                case "FULLYCONNECTED":
                    LayerData.FullyConnected fullyLayer = new LayerData.FullyConnected();
                    int.TryParse(layerNode.Attributes["Neurons"].Value, out fullyLayer.cntNeurons);
                    Enum.TryParse <TransferFuncType>(layerNode.Attributes["NeuronType"].Value, out fullyLayer.tFuncType);
                    args.layersData[i] = fullyLayer;
                    break;

                case "CONVOLUTIONAL":
                    LayerData.Convolutional convolutionalLayer = new LayerData.Convolutional();
                    int.TryParse(layerNode.Attributes["Stride"].Value, out convolutionalLayer.stride);
                    bool.TryParse(layerNode.Attributes["ZeroPadding"].Value, out convolutionalLayer.padding);
                    XmlDocument filterXml = new XmlDocument();
                    filterXml.LoadXml(layerNode.OuterXml);
                    XmlNodeList filterList = filterXml.SelectNodes("Layer/Filters/Filter");
                    convolutionalLayer.filters = new int[filterList.Count];
                    int j = 0;
                    foreach (XmlNode filterNode in filterList)
                    {
                        int.TryParse(filterNode.Attributes["Size"].Value, out convolutionalLayer.filters[j++]);
                    }
                    args.layersData[i] = convolutionalLayer;
                    break;

                default:
                    throw new Exception("Invalid Layer Type Entry Found!!!");
                }
            }

            NeuralNetwork nn = new NeuralNetwork(args.intendedTrainingCnt, 1, args.learningRate, args.momentum, showNNUI, args.layersData);

            int.TryParse(XPathValue("NeuralNetwork/@TrainingDone", ref doc), out nn.trainingCounter);

            nn.RegisterOutput("Loading Neurons");
            basePath = "NeuralNetwork/Neurons/Neuron[@Index='{0}']/@Bias";
            int neuronCnt;

            int.TryParse(XPathValue("NeuralNetwork/Neurons/@Count", ref doc), out neuronCnt);
            for (int i = 0; i < neuronCnt; i++)
            {
                double.TryParse(XPathValue(string.Format(basePath, i.ToString()), ref doc), out nn.neurons[i].bias);
            }
            nn.RegisterOutput("Loading Connections");
            basePath = "NeuralNetwork/Connections/Connection[@Index='{0}']/@{1}";

            XmlNodeList connectionList = doc.SelectNodes("NeuralNetwork/Connections/Connection");

            foreach (XmlNode connection in connectionList)
            {
                int idx, src, dest;
                int.TryParse(connection.Attributes["Index"].Value, out idx);
                int.TryParse(connection.Attributes["Source"].Value, out src);
                int.TryParse(connection.Attributes["Destination"].Value, out dest);
                double.TryParse(connection.Attributes["Weight"].Value, out nn.connections[idx].weight);
                double.TryParse(connection.Attributes["PreviousWeightDelta"].Value, out nn.connections[idx].previousWeightDelta);
                double.TryParse(connection.Attributes["LearningRate"].Value, out nn.connections[idx].learningRate);
                nn.connections[idx].srcDest[src] = dest;
            }
            nn.RegisterOutput("Neural Network : " + XPathValue("NeuralNetwork/@Name", ref doc) + "Loaded Successfully : )");
            doc = null;
            return(nn);
        }
        public Layer(Layer previousLayer, object layerData,
                     ref Dictionary <int, Neuron> neurons, ref int neuronCounter,
                     ref Dictionary <int, Connection> connections, ref int connectionCounter,
                     double learningRate,
                     double weightRescaleFactor)
        {
            // Form connections based upon layer type
            if (layerData is LayerData.RELU)
            {
                // One-To-One Connections
                neuronIdxs = new List <int>();
                tFuncType  = TransferFuncType.RECTILINEAR;
                foreach (int prevNeuronIdx in previousLayer.neuronIdxs)
                {
                    Neuron neuron = new Neuron(tFuncType);
                    neuron.biasAllowed = false;
                    Connection connection = new Connection(ref connections, ref connectionCounter, learningRate, weightRescaleFactor, false);
                    connection.weight = 1;
                    neuron.Idx        = neuronCounter;
                    connection.srcDest[prevNeuronIdx] = neuron.Idx;
                    neuron.incomingConnection.Add(connection.Idx);
                    neurons[prevNeuronIdx].outgoingConnection.Add(connection.Idx);
                    neurons[neuronCounter++] = neuron;
                    neuronIdxs.Add(neuron.Idx);
                }
            }
            else if (layerData is LayerData.FullyConnected)
            {
                // Cross Connections
                LayerData.FullyConnected currLayerData = (LayerData.FullyConnected)layerData;
                tFuncType  = currLayerData.tFuncType;
                neuronIdxs = new List <int>();
                for (int i = 0; i < currLayerData.cntNeurons; i++)
                {
                    Neuron neuron = new Neuron(tFuncType, previousLayer, ref neurons, ref neuronCounter, ref connections,
                                               ref connectionCounter, learningRate, weightRescaleFactor);
                    neuronIdxs.Add(neuron.Idx);
                }
            }
            else if (layerData is LayerData.Convolutional)
            {
                LayerData.Convolutional currLayerData = (LayerData.Convolutional)layerData;
                tFuncType  = TransferFuncType.LINEAR;
                neuronIdxs = new List <int>();
                int dimIn = (int)Math.Sqrt(previousLayer.cntNeurons);

                // Form connections for each filter
                foreach (int filter in currLayerData.filters)
                {
                    int filterStartIdx, filterEndIdx;
                    if (currLayerData.padding)
                    {
                        filterStartIdx = -filter / 2;
                        filterEndIdx   = filter / 2;
                    }
                    else
                    {
                        filterStartIdx = 0;
                        filterEndIdx   = filter - 1;
                    }
                    Dictionary <int, int> filterConnections = new Dictionary <int, int>();
                    for (int k1 = filterStartIdx; k1 <= filterEndIdx; k1++)
                    {
                        for (int k2 = filterStartIdx; k2 <= filterEndIdx; k2++)
                        {
                            Connection connection = new Connection(ref connections, ref connectionCounter, learningRate, weightRescaleFactor);
                            int        hashIdx    = (k1 + filter / 2) * filter + (k2 + filter / 2);
                            connection.weight          = 1;
                            connection.updateAllowed   = false;
                            filterConnections[hashIdx] = connection.Idx;
                        }
                    }
                    // Zero padding is introduced for area which is not completly overlapped by filter
                    for (int i = 0; i + (currLayerData.padding ? 0 : filter - 1) < dimIn; i += currLayerData.stride)
                    {
                        for (int j = 0; j + (currLayerData.padding ? 0 : filter - 1) < dimIn; j += currLayerData.stride)
                        {
                            Neuron neuron = new Neuron(tFuncType);
                            neuron.Idx               = neuronCounter;
                            neuron.biasAllowed       = false;
                            neurons[neuronCounter++] = neuron;
                            neuronIdxs.Add(neuron.Idx);
                            for (int k1 = filterStartIdx; k1 <= filterEndIdx; k1++)
                            {
                                for (int k2 = filterStartIdx; k2 <= filterEndIdx; k2++)
                                {
                                    int idx = GetIndex(i + k1, j + k2, dimIn, previousLayer);
                                    if (idx == -1)
                                    {
                                        continue;
                                    }
                                    int hashIdx = (k1 + filter / 2) * filter + (k2 + filter / 2);
                                    int cIdx    = filterConnections[hashIdx];
                                    connections[cIdx].srcDest[idx] = neuron.Idx;
                                    neurons[idx].outgoingConnection.Add(cIdx);
                                    neurons[neuron.Idx].incomingConnection.Add(cIdx);
                                }
                            }
                        }
                    }
                }
            }
            else if (layerData is LayerData.MaxPool)
            {
                LayerData.MaxPool currLayerData = (LayerData.MaxPool)layerData;
                this.tFuncType = TransferFuncType.MAXPOOL;
                neuronIdxs     = new List <int>();
                int dimIn = (int)Math.Sqrt(previousLayer.cntNeurons);

                // Zero padding is introduced for area which is not completly overlapped by filter
                for (int i = 0; i < dimIn; i += currLayerData.stride)
                {
                    for (int j = 0; j < dimIn; j += currLayerData.stride)
                    {
                        Neuron neuron = new Neuron(tFuncType);
                        neuron.biasAllowed       = false;
                        neuron.Idx               = neuronCounter;
                        neurons[neuronCounter++] = neuron;
                        neuronIdxs.Add(neuron.Idx);

                        Dictionary <int, int> filterConnections = new Dictionary <int, int>();
                        for (int k1 = 0; k1 < currLayerData.size; k1++)
                        {
                            for (int k2 = 0; k2 < currLayerData.size; k2++)
                            {
                                Connection connection = new Connection(ref connections, ref connectionCounter, learningRate, weightRescaleFactor, false);
                                connection.weight = 1;
                                int hashIdx = k1 * currLayerData.size + k2;
                                filterConnections[hashIdx] = connection.Idx;
                            }
                        }

                        // Form new connections
                        for (int k1 = 0; k1 < currLayerData.size; k1++)
                        {
                            for (int k2 = 0; k2 < currLayerData.size; k2++)
                            {
                                int idx = GetIndex(i + k1, j + k2, dimIn, previousLayer);
                                if (idx == -1)
                                {
                                    continue;
                                }
                                int hashIdx = k1 * currLayerData.size + k2;
                                int cIdx    = filterConnections[hashIdx];
                                connections[cIdx].srcDest[idx] = neuron.Idx;
                                neurons[idx].outgoingConnection.Add(cIdx);
                                neurons[neuron.Idx].incomingConnection.Add(cIdx);
                            }
                        }
                    }
                }
            }
            else
            {
                throw new Exception("Invalid LayerConnectionStyle given to Layer.FormConnections  !!!!");
            }
            cntNeurons = neuronIdxs.Count();
        }
        public void Save(string filePath, string networkName)
        {
            XmlWriter         writer;
            XmlWriterSettings writerSettings = new XmlWriterSettings
            {
                Indent              = true,
                IndentChars         = "     ",
                NewLineOnAttributes = false,
                OmitXmlDeclaration  = true
            };

            try
            {
                writer = XmlWriter.Create(filePath, writerSettings);
            }
            catch
            {
                throw new Exception("Invalid filepath given to NeuralNetwork.Save() !");
            }

            writer.WriteStartElement("NeuralNetwork");

            writer.WriteAttributeString("Time", DateTime.Now.ToString());
            writer.WriteAttributeString("IndendedTrainingCnt", args.intendedTrainingCnt.ToString());
            writer.WriteAttributeString("TrainingDone", trainingCounter.ToString());
            writer.WriteAttributeString("Name", networkName);
            writer.WriteAttributeString("LearningRate", args.learningRate.ToString());
            writer.WriteAttributeString("Momentum", args.momentum.ToString());

            writer.WriteStartElement("Layers");
            writer.WriteAttributeString("Count", args.layersData.Length.ToString());
            for (int i = 0; i < args.layersData.Length; i++)
            {
                object layerData = args.layersData[i];
                writer.WriteStartElement("Layer");
                writer.WriteAttributeString("Index", i.ToString());
                if (layerData is LayerData.RELU)
                {
                    writer.WriteAttributeString("Type", "RELU");
                }
                else if (layerData is LayerData.MaxPool)
                {
                    writer.WriteAttributeString("Type", "MAXPOOL");
                    writer.WriteAttributeString("Size", ((LayerData.MaxPool)layerData).size.ToString());
                    writer.WriteAttributeString("Stride", ((LayerData.MaxPool)layerData).stride.ToString());
                }
                else if (layerData is LayerData.FullyConnected)
                {
                    LayerData.FullyConnected currLayerData = (LayerData.FullyConnected)layerData;
                    writer.WriteAttributeString("Type", "FULLYCONNECTED");
                    writer.WriteAttributeString("Neurons", currLayerData.cntNeurons.ToString());
                    writer.WriteAttributeString("NeuronType", currLayerData.tFuncType.ToString());
                }
                else if (layerData is LayerData.Convolutional)
                {
                    LayerData.Convolutional currLayerData = (LayerData.Convolutional)layerData;
                    writer.WriteAttributeString("Type", "CONVOLUTIONAL");
                    writer.WriteAttributeString("Stride", currLayerData.stride.ToString());
                    writer.WriteAttributeString("ZeroPadding", currLayerData.padding.ToString());
                    writer.WriteStartElement("Filters");
                    writer.WriteAttributeString("Count", currLayerData.filters.Length.ToString());
                    foreach (int filter in currLayerData.filters)
                    {
                        writer.WriteStartElement("Filter");
                        writer.WriteAttributeString("Size", filter.ToString());
                        writer.WriteEndElement();
                    }
                    writer.WriteEndElement();
                }
                writer.WriteEndElement();
            }
            writer.WriteEndElement(); // Layers

            writer.WriteStartElement("Neurons");
            writer.WriteAttributeString("Count", neurons.Values.Count.ToString());
            foreach (Neuron neuron in neurons.Values)
            {
                writer.WriteStartElement("Neuron");
                writer.WriteAttributeString("Index", neuron.Idx.ToString());
                writer.WriteAttributeString("Bias", neuron.bias.ToString());
                writer.WriteEndElement();
            }
            writer.WriteEndElement(); // Neurons

            writer.WriteStartElement("Connections");
            writer.WriteAttributeString("Count", connections.Values.Count.ToString());
            foreach (Connection connection in connections.Values)
            {
                foreach (int src in connection.srcDest.Keys)
                {
                    writer.WriteStartElement("Connection");
                    writer.WriteAttributeString("Index", connection.Idx.ToString());
                    writer.WriteAttributeString("Source", src.ToString());
                    writer.WriteAttributeString("Destination", connection.srcDest[src].ToString());
                    writer.WriteAttributeString("Weight", connection.weight.ToString());
                    writer.WriteAttributeString("LearningRate", connection.learningRate.ToString());
                    writer.WriteAttributeString("PreviousWeightDelta", connection.previousWeightDelta.ToString());
                    writer.WriteEndElement();
                }
            }
            writer.WriteEndElement(); // Connections

            writer.WriteEndElement(); // Neural Network Args

            writer.Flush();
            writer.Close();

            this.RegisterOutput("Saved Neural Network : " + networkName);
        }