private void LayerTest(NNLayer layer, int neuronsCount, int weightsCount, int connectionsCount)
 {
     Assert.AreEqual(neuronsCount, layer.Neurons.Count);
     Assert.AreEqual(weightsCount, layer.Weights.Count);
     foreach (NNNeuron n in layer.Neurons)
     {
         Assert.AreEqual(connectionsCount, n.Connections.Count);
     }
 }
        public NeuralNetwork Read()
        {
            NeuralNetwork net;

            using (BinaryReader reader = new BinaryReader(new FileStream(fileName, FileMode.Open)))
            {
                reader.BaseStream.Seek(0x18600, SeekOrigin.Begin);

                double learningRate = reader.ReadDouble();
                net = new NeuralNetwork(learningRate);

                int     layersCount = reader.ReadInt32();
                NNLayer layer       = null;
                for (int i = 0; i < layersCount; i++)
                {
                    string layerName = ReadString(reader);
                    layer = new NNLayer(layer);
                    net.Layers.Add(layer);

                    int neuronsCount = reader.ReadInt32();
                    int weightsCount = reader.ReadInt32();

                    for (int j = 0; j < neuronsCount; j++)
                    {
                        string   neuronName = ReadString(reader);
                        NNNeuron neuron     = new NNNeuron();
                        layer.Neurons.Add(neuron);

                        int connectionsCount = reader.ReadInt32();

                        for (int k = 0; k < connectionsCount; k++)
                        {
                            uint         neuronIndex = reader.ReadUInt32();
                            uint         weightIndex = reader.ReadUInt32();
                            NNConnection connection  = new NNConnection(neuronIndex, weightIndex);
                            neuron.Connections.Add(connection);
                        }
                    }

                    for (int j = 0; j < weightsCount; j++)
                    {
                        string weightName = ReadString(reader);
                        double value      = reader.ReadDouble();

                        NNWeight weight = new NNWeight {
                            Value = value
                        };
                        layer.Weights.Add(weight);
                    }
                }
            }

            return(net);
        }
Beispiel #3
0
    // Use this for initialization
    void Start()
    {
        Activate = test;

        NNlayers = new NNLayer[Layers.Length];

        int inputsNum = InputsNum;

        for (int i = 0; i < NNlayers.Length; i++)
        {
            NNlayers [i] = new NNLayer(inputsNum, Layers [i]);
            inputsNum    = Layers [i];
        }
    }
Beispiel #4
0
        /// <summary>
        /// Extract the string and create network parameters aranged in network layer
        /// </summary>
        /// <param name="strNetwork"></param>
        /// <returns></returns>
        public static List <NNLayer> CreateNetworkParameters(string strNetwork)
        {
            try
            {
                //
                var layers = new List <NNLayer>();
                //parse feature variables
                var strParameters = strNetwork.Split(m_cntkSpearator, StringSplitOptions.RemoveEmptyEntries);

                //in case of custom network model
                if (strParameters.Length == 1 && (strParameters[0].Contains("Custom") || strParameters[0].Contains("custom")))
                {
                    var l = new NNLayer()
                    {
                        Id = 1, Type = LayerType.Custom, Name = "Custom Implemented Network",
                    };
                    layers.Add(l);
                    return(layers);
                }

                for (int i = 0; i < strParameters.Length; i++)
                {
                    //
                    var strLayerValues = strParameters[i];
                    var ind            = strLayerValues.IndexOf(":");
                    var layer          = strLayerValues.Substring(ind + 1);
                    var values         = layer.Split(m_cntkSpearator2, StringSplitOptions.RemoveEmptyEntries);

                    //create layer
                    var l = new NNLayer();
                    l.Type              = (LayerType)Enum.Parse(typeof(LayerType), values[0], true);
                    l.Name              = $"{values[0]} Layer";
                    l.HDimension        = int.Parse(values[1].Trim(' '));
                    l.CDimension        = int.Parse(values[2].Trim(' '));
                    l.Value             = int.Parse(values[3].Trim(' '));
                    l.Activation        = (Activation)Enum.Parse(typeof(Activation), values[4], true);
                    l.SelfStabilization = values[5] == "1" ? true : false;
                    l.Peephole          = values[6] == "1" ? true : false;
                    l.UseActivation     = l.Type != LayerType.Embedding;
                    layers.Add(l);
                }

                return(layers);
            }
            catch (Exception)
            {
                throw;
            }
        }
Beispiel #5
0
        private void onAddLayer(object sender, ExecutedRoutedEventArgs e)
        {
            try
            {
                //
                var model = ActiveViewModel as MLConfigController;
                var layer = ((ComboBoxItem)e.Parameter).Content as string;
                //check if network configuration is Custom,
                if (model.Network.Where(x => x.Type == LayerType.Custom).Count() > 0)
                {
                    MessageBox.Show("Custom configuration is not allow to be edited.", "ANNdotNET");
                    return;
                }

                //
                if (e.Parameter == null)
                {
                    MessageBox.Show("Please select Layer from combo box first!", "ANNdotNET");
                    return;
                }



                //create layer
                var itm = new NNLayer();
                itm.UseActivation = true;
                itm.Activation    = Activation.None;
                if (layer == "Normalization Layer")
                {
                    itm.Type = LayerType.Normalization;
                }
                else if (layer == "Dense Layer")
                {
                    itm.Type = LayerType.Dense;
                }
                else if (layer == "LSTM Layer")
                {
                    itm.Activation = Activation.TanH;
                    itm.Type       = LayerType.LSTM;
                }
                else if (layer == "Embedding Layer")
                {
                    itm.Type          = LayerType.Embedding;
                    itm.UseActivation = false;
                }
                else if (layer == "Drop Layer")
                {
                    itm.Type = LayerType.Drop;
                }
                else
                {
                    throw new Exception("Unsupported Layer!");
                }
                itm.Name = layer;
                //normalization layer must be on the first position
                if (itm.Type == LayerType.Normalization)
                {
                    if (model.Network.Where(x => x.Type == LayerType.Normalization).Count() == 0)
                    {
                        model.Network.Insert(0, itm);
                    }
                    else
                    {
                        MessageBox.Show("Only one normalization layer is allowed.");
                    }
                }
                else if (itm.Type == LayerType.LSTM && model.Network.Where(x => x.Type == LayerType.LSTM).Count() > 0)
                {
                    var lastLSTM = model.Network.Where(x => x.Type == LayerType.LSTM).Last();
                    var index    = model.Network.IndexOf(lastLSTM);
                    model.Network.Insert(index + 1, itm);
                }
                else
                {
                    model.Network.Add(itm);
                }
            }
            catch (Exception ex)
            {
                ReportException(ex);
            }
        }
Beispiel #6
0
        private void onInsertLayer(object sender, ExecutedRoutedEventArgs e)
        {
            try
            {
                //
                if (e.Parameter == null)
                {
                    MessageBox.Show("Please select Layer from combo box first!", "ANNdotNET");
                    return;
                }
                //

                var model = ActiveViewModel as MLConfigController;
                var layer = ((ComboBoxItem)e.Parameter).Content as string;
                //check if network configuration is Custom,
                if (model.Network.Where(x => x.Type == LayerType.Custom).Count() > 0)
                {
                    MessageBox.Show("Custom configuration is not allow to be edited.", "ANNdotNET");
                    return;
                }

                //create layer
                var itm = new NNLayer();
                itm.UseFParam = true;
                itm.FParam    = Activation.None;
                if (layer == "Normalization")
                {
                    itm.Type = LayerType.Normalization;
                }
                else if (layer == "Dense")
                {
                    itm.Type = LayerType.Dense;
                }
                else if (layer == "Scale")
                {
                    itm.Type = LayerType.Scale;
                }
                else if (layer == "Conv1D")
                {
                    itm.Type = LayerType.Conv1D;
                }
                else if (layer == "Conv2D")
                {
                    itm.Type = LayerType.Conv2D;
                }
                else if (layer == "Pooling1D")
                {
                    itm.Type = LayerType.Pooling1D;
                }
                else if (layer == "Pooling2D")
                {
                    itm.Type = LayerType.Pooling2D;
                }
                else if (layer == "LSTM")
                {
                    itm.FParam = Activation.TanH;
                    itm.Type   = LayerType.LSTM;
                }
                else if (layer == "Embedding")
                {
                    itm.Type      = LayerType.Embedding;
                    itm.UseFParam = false;
                }
                else if (layer == "NALU")
                {
                    itm.Type = LayerType.NALU;
                }
                else if (layer == "DropOut")
                {
                    itm.Type = LayerType.Drop;
                }
                else if (layer == "CudaStackedLSTM")
                {
                    itm.Type = LayerType.CudaStackedLSTM;
                }
                else if (layer == "CudaStackedGRU")
                {
                    itm.Type = LayerType.CudaStackedGRU;
                }
                else
                {
                    throw new Exception("Unsupported Layer!");
                }
                itm.Name = layer;
                //normalization layer must be on the first position
                if (itm.Type == LayerType.Normalization)
                {
                    if (model.Network.Where(x => x.Type == LayerType.Normalization).Count() == 0)
                    {
                        model.Network.Insert(0, itm);
                    }
                    else
                    {
                        MessageBox.Show("Only one normalization layer is allowed.");
                    }
                }
                //SCale layer must be on the first
                else if (itm.Type == LayerType.Scale)
                {
                    itm.Param1 = 1;
                    itm.Param2 = 255;
                    if (model.Network.Where(x => x.Type == LayerType.Scale).Count() == 0)
                    {
                        model.Network.Insert(0, itm);
                    }
                    else
                    {
                        MessageBox.Show("Only one Scale layer is allowed.");
                    }
                }
                else if (itm.Type == LayerType.LSTM && model.Network.Any(x => x.Type == LayerType.LSTM))
                {
                    itm.Param1 = 5;
                    itm.Param2 = 5;
                    if (model.Network.Any(x => x.Name.StartsWith("CudaStacked")))
                    {
                        MessageBox.Show("CudaStacked like layers cannot be mixed with pure LSTM or GRU layers.");
                        return;
                    }
                    var lastLSTM = model.Network.Where(x => x.Type == LayerType.LSTM).Last();
                    var index    = model.Network.IndexOf(lastLSTM);
                    model.Network.Insert(index + 1, itm);
                }
                else if (itm.Type == LayerType.LSTM)
                {
                    itm.Param1 = 5;
                    itm.Param2 = 5;
                    if (model.Network.Any(x => x.Name.StartsWith("CudaStacked")))
                    {
                        MessageBox.Show("CudaStacked like layers cannot be mixed with pure LSTM or GRU layers.");
                        return;
                    }
                    if (model.SelectedIndex < 0)
                    {
                        model.Network.Add(itm);
                    }
                    else
                    {
                        model.Network.Insert(model.SelectedIndex, itm);
                    }
                }
                else if (layer.StartsWith("CudaStacked"))
                {
                    itm.Param1 = 1;
                    itm.Param2 = 5;
                    if (model.Network.Any(x => x.Name.StartsWith("LSTM")) || model.Network.Any(x => x.Name.StartsWith("GRU")))
                    {
                        MessageBox.Show("CudaStacked like layers cannot be mixed with pure LSTM or GRU layers.");
                        return;
                    }
                    if (model.Network.Any(x => x.Name.StartsWith("CudaStacked")))
                    {
                        MessageBox.Show($"Only one 'CudaStacked*' based layer can be in the network.");
                        return;
                    }
                    //
                    if (model.SelectedIndex < 0)
                    {
                        model.Network.Add(itm);
                    }
                    else
                    {
                        model.Network.Insert(model.SelectedIndex, itm);
                    }
                }
                else
                {
                    itm.Param1 = 5;
                    itm.Param2 = 5;

                    if (model.SelectedIndex < 0)
                    {
                        model.Network.Add(itm);
                    }
                    else
                    {
                        model.Network.Insert(model.SelectedIndex, itm);
                    }
                }
            }
            catch (Exception ex)
            {
                ReportException(ex);
            }
        }
Beispiel #7
0
        /////////////////////////
        private bool CreateNNNetWork(NeuralNetwork network)
        {
            NNLayer pLayer;

            int    ii, jj, kk;
            int    icNeurons = 0;
            int    icWeights = 0;
            double initWeight;
            String sLabel;
            var    m_rdm = new Random();

            // layer zero, the input layer.
            // Create neurons: exactly the same number of neurons as the input
            // vector of 29x29=841 pixels, and no weights/connections

            pLayer = new NNLayer("Layer00", null);
            network.m_Layers.Add(pLayer);

            for (ii = 0; ii < 841; ii++)
            {
                sLabel = String.Format("Layer00_Neuro{0}_Num{1}", ii, icNeurons);
                pLayer.m_Neurons.Add(new NNNeuron(sLabel));
                icNeurons++;
            }

            //double UNIFORM_PLUS_MINUS_ONE= (double)(2.0 * m_rdm.Next())/Constants.RAND_MAX - 1.0 ;

            // layer one:
            // This layer is a convolutional layer that has 6 feature maps.  Each feature
            // map is 13x13, and each unit in the feature maps is a 5x5 convolutional kernel
            // of the input layer.
            // So, there are 13x13x6 = 1014 neurons, (5x5+1)x6 = 156 weights

            pLayer = new NNLayer("Layer01", pLayer);
            network.m_Layers.Add(pLayer);

            for (ii = 0; ii < 1014; ii++)
            {
                sLabel = String.Format("Layer01_Neuron{0}_Num{1}", ii, icNeurons);
                pLayer.m_Neurons.Add(new NNNeuron(sLabel));
                icNeurons++;
            }

            for (ii = 0; ii < 156; ii++)
            {
                sLabel     = String.Format("Layer01_Weigh{0}_Num{1}", ii, icWeights);
                initWeight = 0.05 * (2.0 * m_rdm.NextDouble() - 1.0);
                pLayer.m_Weights.Add(new NNWeight(sLabel, initWeight));
            }

            // interconnections with previous layer: this is difficult
            // The previous layer is a top-down bitmap image that has been padded to size 29x29
            // Each neuron in this layer is connected to a 5x5 kernel in its feature map, which
            // is also a top-down bitmap of size 13x13.  We move the kernel by TWO pixels, i.e., we
            // skip every other pixel in the input image

            int[] kernelTemplate = new  int[25] {
                0, 1, 2, 3, 4,
                29, 30, 31, 32, 33,
                58, 59, 60, 61, 62,
                87, 88, 89, 90, 91,
                116, 117, 118, 119, 120
            };

            int iNumWeight;

            int fm;

            for (fm = 0; fm < 6; fm++)
            {
                for (ii = 0; ii < 13; ii++)
                {
                    for (jj = 0; jj < 13; jj++)
                    {
                        iNumWeight = fm * 26;                  // 26 is the number of weights per feature map
                        NNNeuron n = pLayer.m_Neurons[jj + ii * 13 + fm * 169];

                        n.AddConnection((uint)MyDefinations.ULONG_MAX, (uint)iNumWeight++);                    // bias weight

                        for (kk = 0; kk < 25; kk++)
                        {
                            // note: max val of index == 840, corresponding to 841 neurons in prev layer
                            n.AddConnection((uint)(2 * jj + 58 * ii + kernelTemplate[kk]), (uint)iNumWeight++);
                        }
                    }
                }
            }


            // layer two:
            // This layer is a convolutional layer that has 50 feature maps.  Each feature
            // map is 5x5, and each unit in the feature maps is a 5x5 convolutional kernel
            // of corresponding areas of all 6 of the previous layers, each of which is a 13x13 feature map
            // So, there are 5x5x50 = 1250 neurons, (5x5+1)x6x50 = 7800 weights

            pLayer = new NNLayer("Layer02", pLayer);
            network.m_Layers.Add(pLayer);

            for (ii = 0; ii < 1250; ii++)
            {
                sLabel = String.Format("Layer02_Neuron{0}_Num{1}", ii, icNeurons);
                pLayer.m_Neurons.Add(new NNNeuron(sLabel));
                icNeurons++;
            }

            for (ii = 0; ii < 7800; ii++)
            {
                sLabel     = String.Format("Layer02_Weight{0}_Num{1}", ii, icWeights);
                initWeight = 0.05 * (2.0 * m_rdm.NextDouble() - 1.0);
                pLayer.m_Weights.Add(new NNWeight(sLabel, initWeight));
            }

            // Interconnections with previous layer: this is difficult
            // Each feature map in the previous layer is a top-down bitmap image whose size
            // is 13x13, and there are 6 such feature maps.  Each neuron in one 5x5 feature map of this
            // layer is connected to a 5x5 kernel positioned correspondingly in all 6 parent
            // feature maps, and there are individual weights for the six different 5x5 kernels.  As
            // before, we move the kernel by TWO pixels, i.e., we
            // skip every other pixel in the input image.  The result is 50 different 5x5 top-down bitmap
            // feature maps

            int[] kernelTemplate2 = new int[25] {
                0, 1, 2, 3, 4,
                13, 14, 15, 16, 17,
                26, 27, 28, 29, 30,
                39, 40, 41, 42, 43,
                52, 53, 54, 55, 56
            };


            for (fm = 0; fm < 50; fm++)
            {
                for (ii = 0; ii < 5; ii++)
                {
                    for (jj = 0; jj < 5; jj++)
                    {
                        iNumWeight = fm * 156;                  // 26 is the number of weights per feature map
                        NNNeuron n = pLayer.m_Neurons[jj + ii * 5 + fm * 25];

                        n.AddConnection((uint)MyDefinations.ULONG_MAX, (uint)iNumWeight++);                    // bias weight

                        for (kk = 0; kk < 25; kk++)
                        {
                            // note: max val of index == 1013, corresponding to 1014 neurons in prev layer
                            n.AddConnection((uint)(2 * jj + 26 * ii + kernelTemplate2[kk]), (uint)iNumWeight++);
                            n.AddConnection((uint)(169 + 2 * jj + 26 * ii + kernelTemplate2[kk]), (uint)iNumWeight++);
                            n.AddConnection((uint)(338 + 2 * jj + 26 * ii + kernelTemplate2[kk]), (uint)iNumWeight++);
                            n.AddConnection((uint)(507 + 2 * jj + 26 * ii + kernelTemplate2[kk]), (uint)iNumWeight++);
                            n.AddConnection((uint)(676 + 2 * jj + 26 * ii + kernelTemplate2[kk]), (uint)iNumWeight++);
                            n.AddConnection((uint)(845 + 2 * jj + 26 * ii + kernelTemplate2[kk]), (uint)iNumWeight++);
                        }
                    }
                }
            }


            // layer three:
            // This layer is a fully-connected layer with 100 units.  Since it is fully-connected,
            // each of the 100 neurons in the layer is connected to all 1250 neurons in
            // the previous layer.
            // So, there are 100 neurons and 100*(1250+1)=125100 weights

            pLayer = new NNLayer("Layer03", pLayer);
            network.m_Layers.Add(pLayer);

            for (ii = 0; ii < 100; ii++)
            {
                sLabel = String.Format("Layer03_Neuron{0}_Num{1}", ii, icNeurons);
                pLayer.m_Neurons.Add(new NNNeuron(sLabel));
                icNeurons++;
            }

            for (ii = 0; ii < 125100; ii++)
            {
                sLabel     = String.Format("Layer03_Weight{0}_Num{1}", ii, icWeights);
                initWeight = 0.05 * (2.0 * m_rdm.NextDouble() - 1.0);
                pLayer.m_Weights.Add(new NNWeight(sLabel, initWeight));
            }

            // Interconnections with previous layer: fully-connected

            iNumWeight = 0;      // weights are not shared in this layer

            for (fm = 0; fm < 100; fm++)
            {
                NNNeuron n = pLayer.m_Neurons[fm];
                n.AddConnection((uint)MyDefinations.ULONG_MAX, (uint)iNumWeight++);             // bias weight

                for (ii = 0; ii < 1250; ii++)
                {
                    n.AddConnection((uint)ii, (uint)iNumWeight++);
                }
            }



            // layer four, the final (output) layer:
            // This layer is a fully-connected layer with 10 units.  Since it is fully-connected,
            // each of the 10 neurons in the layer is connected to all 100 neurons in
            // the previous layer.
            // So, there are 10 neurons and 10*(100+1)=1010 weights

            pLayer = new NNLayer("Layer04", pLayer);
            network.m_Layers.Add(pLayer);

            for (ii = 0; ii < 10; ii++)
            {
                sLabel = String.Format("Layer04_Neuron{0}_Num{1}", ii, icNeurons);
                pLayer.m_Neurons.Add(new NNNeuron(sLabel));
                icNeurons++;
            }

            for (ii = 0; ii < 1010; ii++)
            {
                sLabel     = String.Format("Layer04_Weight{0}_Num{1}", ii, icWeights);
                initWeight = 0.05 * (2.0 * m_rdm.NextDouble() - 1.0);
                pLayer.m_Weights.Add(new NNWeight(sLabel, initWeight));
            }

            // Interconnections with previous layer: fully-connected

            iNumWeight = 0;      // weights are not shared in this layer

            for (fm = 0; fm < 10; fm++)
            {
                var n = pLayer.m_Neurons[fm];
                n.AddConnection((uint)MyDefinations.ULONG_MAX, (uint)iNumWeight++);           // bias weight

                for (ii = 0; ii < 100; ii++)
                {
                    n.AddConnection((uint)ii, (uint)iNumWeight++);
                }
            }

            return(true);
        }