Exemple #1
0
        private void RefreshListBoxes(NN desired)
        {
            LayerTypes  = new List <string>();
            LayerCounts = new List <int>();
            LayerLB.Items.Clear();

            foreach (iLayer l in desired.Layers)
            {
                string name = null;
                int    len  = l.Length;
                if (l is PoolingLayer)
                {
                    len = (l as PoolingLayer).PoolSize;
                }
                if (l is FullyConnectedLayer)
                {
                    LayerTypes.Add("f"); name = "Fully Connected";
                }
                if (l is ConvolutionLayer)
                {
                    LayerTypes.Add("c"); name = "Convolution"; len = (l as ConvolutionLayer).KernelSize;
                }
                if (l is PoolingLayer)
                {
                    LayerTypes.Add("p"); name = "Pooling";
                }
                LayerCounts.Add(len);
                LayerLB.Items.Add("[" + (LayerCounts.Count - 1).ToString() + "] " + name + ", " + len.ToString());
            }
        }
Exemple #2
0
        private void DefaultBtn_Click(object sender, EventArgs e)
        {
            LayerTypes  = DefaultTypes();
            LayerCounts = DefaultCounts();
            NN newnn = ResetNN();

            RefreshListBoxes(newnn);
        }
Exemple #3
0
 private void Button3_Click(object sender, EventArgs e)
 {
     if (Run)
     {
         MessageBox.Show("Cannot reset while running"); return;
     }
     nn = ResetNN();
 }
Exemple #4
0
        private NN ResetNN()
        {
            NN nn = new NN();

            if (LayerTypes is null || LayerTypes.Count == 0)
            {
                LayerTypes  = DefaultTypes();
                LayerCounts = DefaultCounts();
            }
            nn.Init(GenerateLayers());
            IO.Write(nn);
            return(nn);
        }
Exemple #5
0
        /// <summary>
        /// Returns a NN from a file
        /// </summary>
        /// <param name="COG">[C]ritic [O]r [G]enerator</param>
        /// <returns></returns>
        public static NN Read()
        {
            NN nn = new NN();

            nn.Layers = new List <iLayer>();
            string[] text;
            using (StreamReader sr = File.OpenText(WBPath))
            {
                text = sr.ReadToEnd().Split(',');
            }
            nn.NumLayers = int.Parse(text[0]);
            int iterator = 1;

            for (int i = 0; i < nn.NumLayers; i++)
            {
                string type = text[iterator]; iterator++;
                //Pooling layer has no weights/biases
                if (type == "p")
                {
                    nn.Layers.Add(new PoolingLayer(int.Parse(text[iterator]), int.Parse(text[iterator + 1])));
                    iterator += 2; continue;
                }

                int LayerCount      = int.Parse(text[iterator]); iterator++;
                int InputLayerCount = int.Parse(text[iterator]); iterator++;

                if (type == "f")
                {
                    nn.Layers.Add(new FullyConnectedLayer(LayerCount, InputLayerCount));
                }
                if (type == "c")
                {
                    nn.Layers.Add(new ConvolutionLayer((int)Math.Sqrt(LayerCount), InputLayerCount));
                }

                for (int j = 0; j < nn.Layers[i].Weights.GetLength(0); j++)
                {
                    for (int jj = 0; jj < nn.Layers[i].Weights.GetLength(1); jj++)
                    {
                        nn.Layers[i].Weights[j, jj] = double.Parse(text[iterator]); iterator++;
                    }
                    if (i != nn.NumLayers - 1 && nn.Layers[i] is FullyConnectedLayer)
                    {
                        (nn.Layers[i] as FullyConnectedLayer).Biases[j] = double.Parse(text[iterator]); iterator++;
                    }
                }
            }
            return(nn);
        }
Exemple #6
0
        public Form1()
        {
            InitializeComponent();

            //Various textboxes
            Batchtxt.Text     = BatchSize.ToString();
            AlphaTxt.Text     = NN.LearningRate.ToString();
            SpecialDecay.Text = NN.RMSDecay.ToString();
            Nesterov.Checked  = NN.UseNesterov;
            ConvStepsTxt.Text = ConvolutionLayer.StepSize.ToString();

            //Special combobox
            Special.Items.Add("None");
            Special.Items.Add("RMSProp");
            Special.Items.Add("Momentum");
            //Special.Items.Add("ADAM");
            Special.SelectedIndex = 1;

            //Layer types combobox
            LayerTypeCB.Items.Add("Fully Connected");
            LayerTypeCB.Items.Add("Convolution");
            LayerTypeCB.Items.Add("Pooling");
            LayerTypeCB.SelectedIndex = 0;

            //NN loading or reset if invalid
            try
            {
                nn = IO.Read();
            }
            catch
            {
                MessageBox.Show("Failed to load data; reset to default");
                nn = ResetNN();
            }
            RefreshListBoxes(nn);
            if (LayerTypes.Count == 0)
            {
                nn = ResetNN();
            }
        }
Exemple #7
0
        /// <summary>
        /// Saves a specified NN to a file
        /// </summary>
        /// <param name="nn">The specified NN</param>
        /// <param name="COG">[C]ritic [O]r [G]enerator</param>
        public static void Write(NN nn)
        {
            StreamWriter sw = new StreamWriter(new FileStream(WBPath, FileMode.Create, FileAccess.Write, FileShare.None));

            sw.Write(nn.NumLayers + ",");
            for (int i = 0; i < nn.NumLayers; i++)
            {
                string type = "f";
                if (nn.Layers[i] is ConvolutionLayer)
                {
                    type = "c";
                }
                if (nn.Layers[i] is PoolingLayer)
                {
                    type = "p";
                }
                sw.Write(type + ",");
                //Pooling layer has no weights
                if (type == "p")
                {
                    sw.Write((nn.Layers[i] as PoolingLayer).PoolSize + "," + nn.Layers[i].InputLength + ",");
                    continue;
                }
                sw.Write(nn.Layers[i].Length + "," + nn.Layers[i].InputLength + ",");
                for (int j = 0; j < nn.Layers[i].Weights.GetLength(0); j++)
                {
                    for (int jj = 0; jj < nn.Layers[i].Weights.GetLength(1); jj++)
                    {
                        sw.Write(nn.Layers[i].Weights[j, jj] + ",");
                    }
                    if (i != nn.NumLayers - 1 && nn.Layers[i] is FullyConnectedLayer)
                    {
                        sw.Write((nn.Layers[i] as FullyConnectedLayer).Biases[j] + ",");
                    }
                }
            }
            sw.Close();
        }