예제 #1
0
 public NetTrainer(IModelService modelService)
 {
     Network net = new Network();
     net.AddLayer(new BasicLayer(new ActivationTANH(), true, DefaultFingerprintSize));
     net.AddLayer(new BasicLayer(new ActivationTANH(), true, DefaultHiddenNeuronsCount));
     net.AddLayer(new BasicLayer(new ActivationTANH(), false, OutPutNeurons));
     net.Structure.FinalizeStructure();
     net.Reset();
     this.modelService = modelService;
     pauseSem = new Semaphore(0, 1, "PauseSemaphore");
 }
예제 #2
0
        public void BuildNetwork()
        {
            _network = new Network(_node);

            _network.AddLayer(4); //Hidden layer with 2 neurons
            _network.AddLayer(1); //Output layer with 1 neuron

            _network.BindInputLayer(_input); //Bind Input Data
            _network.BindTraining(_desired); //Bind desired output data

            _network.AutoLinkFeedforward(); //Create synapses between the layers for typical feedforward networks.
        }
예제 #3
0
 public void AddLayers()
 {
     Network.AddLayer(InputNeuronCount, "Input"); // input layer
     for (int i = 0; i < HiddenLayerCount; ++i)
     {
         Network.AddLayer(HiddenNeuronCount, String.Format("Hidden{0}", i)); // hidden layers
     }
     Network.AddLayer(OutputNeuronCount, "Output");                          // output layer
 }
예제 #4
0
        private void финСетьToolStripMenuItem_Click(object sender, EventArgs e)
        {
            Network network = new Network();

            network.AddLayer(new FullyConnLayar(new Relu(), new NeuralNetwork.Size(1, 1, 400)));
            network.AddLayer(new MaxPool2D(new SimpleFunc(), 2, 1));
            network.AddLayer(new FullyConnLayar(new Relu(), new NeuralNetwork.Size(1, 1, 100)));
            network.AddLayer(new MaxPool2D(new SimpleFunc(), 2, 1));
            network.AddLayer(new FullyConnLayar(new Relu(), new NeuralNetwork.Size(1, 1, 4)));
            network.Compile(new NeuralNetwork.Size(10, 1, 4));

            //network.Normalization();
            //network.Normalization();

            NetworkData.network = network;

            WriteNetwork();
        }
예제 #5
0
        /// <summary>
        /// Connect the network.
        /// </summary>
        public virtual void ConnectNetwork()
        {
            Layer input = _network.InitUnboundInputLayer(_numberOfInputs);
            Layer hidden;

            if (_config.All2AllEnable.Value)
            {
                if (_config.HorizontalLinesEnable.Value && _config.VerticalLinesEnable.Value)
                {
                    hidden = _network.AddLayer((int)Math.Ceiling(_numberOfInputs / 4d));
                }
                else
                {
                    hidden = _network.AddLayer((int)Math.Ceiling(_numberOfInputs / 2d));
                }
                input.CrossLinkForward();                 //link input to existing hidden neurons
            }
            else
            {
                hidden = _network.AddLayer(0);
            }
            _network.AddLayer(_numberOfOutputs);

            if (_config.VerticalLinesEnable.Value)
            {
                hidden.AddRange(AddVerticalLines(input, hidden));
            }
            if (_config.HorizontalLinesEnable.Value)
            {
                hidden.AddRange(AddHorizontalLines(input, hidden));
            }
            if (_config.RingsEnable.Value)
            {
                hidden.AddRange(AddRings(input, hidden));
            }
            if (_config.LittleSquaresEnable.Value)
            {
                hidden.AddRange(AddSquares(input, hidden));
            }

            hidden.CrossLinkForward();             //link hidden to output layer
        }
예제 #6
0
        static bool CatTrain()
        {
            BitmapCatEnumerator enums   = new BitmapCatEnumerator("Sorted", new System.Drawing.Size(50, 25));
            Network             network = new Network();

            network.AddLayer(new Conv2D(new Relu(), 7, 7, 32));
            network.AddLayer(new MaxPool2D(new Relu(), 2, 2));

            network.AddLayer(new Conv2D(new Relu(), 5, 5, 64));
            network.AddLayer(new MaxPool2D(new Relu(), 2, 2));


            network.AddLayer(new FullyConnLayar(new Relu(), new Size(1, 1, 256)));
            network.AddLayer(new FullyConnLayar(new Sigmoid(), new Size(1, 1, 2)));

            network.Compile(new Size(3, 25, 50), true);

            network.Normalization();

            var pair = enums.GetRandom(ref network);

            OneEnumerator one = new OneEnumerator();

            one.input  = pair.Key;
            one.output = pair.Value;

            MomentumParallel sgd = new MomentumParallel(network, 0.9, 1e-6);

            double[] errors = sgd.TrainBatch(enums, 32, 1000);

            return(errors[0] > errors.Last());
        }
예제 #7
0
        static bool SaveLoad()
        {
            BitmapCatEnumerator enums = new BitmapCatEnumerator("Sorted", new System.Drawing.Size(50, 25));

            Network network = new Network();

            network.AddLayer(new Conv2D(new Relu(), 7, 7, 32));
            network.AddLayer(new MaxPool2D(new Relu(), 2, 2));

            network.AddLayer(new Conv2D(new Relu(), 5, 5, 64));
            network.AddLayer(new MaxPool2D(new Relu(), 2, 2));

            network.AddLayer(new FullyConnLayar(new Relu(), new Size(1, 1, 256)));
            network.AddLayer(new FullyConnLayar(new Sigmoid(), new Size(1, 1, 2)));

            network.Compile(new Size(3, 25, 50), true);

            network.Normalization();
            network.Normalization();

            string data = network.SaveJSON();

            Network network2 = new Network();

            network2.LoadJSON(data);

            var pair = enums.GetRandom(ref network);

            var out1 = network.GetOutput(pair.Key);
            var out2 = network.GetOutput(pair.Key);

            return(out1[0, 0, 0] == out2[0, 0, 0]);
        }
예제 #8
0
        public MainWindow()
        {
            InitializeComponent();

            TestInput Input = new TestInput(48, 48, 1);

            Network net = new Network();

            net.AddLayer(new Conv(190, 5, 5, 48, 48, true));
            net.AddLayer(new Conv(260, 5, 5, 44, 44, true));

            net.Process(Input.Value);

            long size = 0;

            using (Stream stream = new MemoryStream())
            {
                BinaryFormatter formatter = new BinaryFormatter();
                formatter.Serialize(stream, (object)net);
                size = stream.Length;
            }


            Conv conv1_44x44 = new Conv();

            Hidden fully_160 = new Hidden(160, 16, true);
            Pool   pool      = new Pool();

            conv1_44x44.AddMap(new Map(new Kernel(5, 5), 48, 48));

            conv1_44x44.InputValues(Input.Value);

            pool.InputMaps(conv1_44x44.Maps);

            var res = 8;
        }
예제 #9
0
        static bool ErrorTest()
        {
            Network network = new Network();

            network.AddLayer(new FullyConnLayar(new Relu(), new Size(1, 1, 1)));
            network.Compile(new Size(1, 1, 1));

            double[,,] input = new double[1, 1, 1];
            input[0, 0, 0]   = 1;
            double[,,] t     = new double[1, 1, 1];
            t[0, 0, 0]       = 1;

            double val = network.GetError(input, t);


            return(val == 1);
        }
예제 #10
0
        public void XORNoBiasTest()
        {
            double[][] inputs         = new double[][] { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } };
            double[]   expectedOutput = new double[] { -1, 1, 1, -1 };

            var network = new Network(
                new IdentityLayer(2),
                new DenseLayerNoBias(2, 1, new IdentityActivation(), new SquaredDistance()));

            network.AddLayer(new DenseLayerNoBias(2, 2, new IdentityActivation(), new SquaredDistance()));
            network.Initialize();

            for (int i = 0; i < inputs.Length; i++)
            {
                network.Evaluate(inputs[i]);
                Assert.AreNotEqual(expectedOutput[i], network.OutputLayer.Output[0]);
            }

            // Train network
            int    epoc  = 0;
            double error = 100;

            while (++epoc < 1000 && error > 0.001)
            {
                error = 0;
                for (int i = 0; i < inputs.Length; i++)
                {
                    error += network.Train(inputs[i], new double[] { expectedOutput[i] }, 0.1);
                }
            }

            //Assert.IsTrue(epoc < 1000);

            //for (int i = 0; i < inputs.Length; i++)
            //{
            //    layer.Evaluate(inputs[i]);

            //    Assert.IsTrue(Math.Abs(expectedOutput[i] - layer.Output[0]) < 0.01); // 1% error margin
            //}
        }
예제 #11
0
        static bool PreTrain()
        {
            BitmapCatEnumerator enums   = new BitmapCatEnumerator("Sorted", new System.Drawing.Size(24, 12));
            BitmapCatEnumerator val     = new BitmapCatEnumerator("Val", new System.Drawing.Size(24, 12));
            Network             network = new Network();

            //network.LoadJSON(System.IO.File.ReadAllText("pretrained_2.neural"));
            //network.CompileOnlyError();
            network.AddLayer(new Conv2D(new Relu(), 3, 3, 10));
            network.AddLayer(new MaxPool2D(new Relu(), 2, 2));

            network.AddLayer(new Conv2D(new Relu(), 5, 5, 30));
            network.AddLayer(new MaxPool2D(new Relu(), 2, 2));

            network.AddLayer(new FullyConnLayar(new Relu(), new Size(1, 1, 256)));
            network.AddLayer(new FullyConnLayar(new Sigmoid(), new Size(1, 1, 2)));

            network.Compile(new Size(3, 12, 24), true);

            network.Normalization();
            network.Normalization();

            MomentumParallel sgd = new MomentumParallel(network, 0.9, 1e-4);

            sgd.need_max = false;

            var pair = PretrainAutoEncoder.Action(network, sgd, enums, val, 2000, 32);

            Console.WriteLine("{0}\n{1}", pair.Key, pair.Value);
            Console.WriteLine("Start train");

            sgd = new MomentumParallel(network, 0, 1e-5);
            DateTime start = DateTime.Now;

            sgd.TrainBatch(enums, 256, 1);
            for (int i = 0; i < 100000; i++)
            {
                double[] errors = sgd.TrainBatchContinue(enums, 256, 1);
                if ((DateTime.Now - start).TotalMinutes > 5)
                {
                    System.IO.File.WriteAllText("train_" + i + ".neural", network.SaveJSON());
                    start = DateTime.Now;
                    Console.WriteLine("Saved at " + "train_" + i + ".neural");
                }
            }
            return(true);
        }
예제 #12
0
        static void Main(string[] args)
        {
            Controler ffc = new Controler(1);

            Network net = new Network(ffc.Node);

            net.InitUnboundInputLayer(1).BasicConfiguration.ActivationType.Value = EActivationType.Linear;
            net.AddLayer(1, EActivationType.Linear);
            net.AutoLinkFeedforward();

            Neuron  neuron  = net.LastLayer[0];
            Synapse synapse = neuron.SourceSynapses[0];

            ffc.ImportNetwork(net, false);

            BasicConfig config = ffc.NeuralNetwork.BasicConfiguration;

            config.BiasNeuronEnable.Value          = true;
            config.BiasNeuronOutput.Value          = 1.0;
            config.FlatspotEliminationEnable.Value = false;
            config.WeightDecayEnable.Value         = false;
            config.SymmetryPreventionEnable.Value  = false;
            config.ManhattanTrainingEnable.Value   = false;
            config.LearningRate.Value = 0.005;

            StochasticCoordinateGenerator scg = new StochasticCoordinateGenerator(0, 10, 100);
            //RegularCoordinateGenerator rcg = new RegularCoordinateGenerator(-25, 25, 50);
            DynamicSampleProvider dsp = new DynamicSampleProvider(my_func, scg); //rcg);

            ffc.Provider = dsp;                                                  // new CachedSampleProvider(dsp);

            Console.WriteLine("TARGET FUNCTION:             3*x+5");
            Console.WriteLine("TARGET Synapse Weight      = 3.0");
            Console.WriteLine("TARGET Bias Weight         = 5.0");
            Console.WriteLine("TARGET Mean Squared Error <= 0.000000001");
            Console.WriteLine();

            Console.WriteLine("Synapse Weight: " + synapse.Weight + " - Bias Weight: " + neuron.BiasNeuronWeight);
            Console.WriteLine("Initial MSE: " + ffc.EstimateMeanSquaredError());
            Console.WriteLine();

            ffc.TrainAllSamplesOnce();
            Console.WriteLine("Synapse Weight: " + synapse.Weight + " - Bias Weight: " + neuron.BiasNeuronWeight);
            Console.WriteLine("Trained MSE: " + ffc.EstimateMeanSquaredError());
            Console.WriteLine();

            ffc.TrainAllSamplesOnce();
            Console.WriteLine("Synapse Weight: " + synapse.Weight + " - Bias Weight: " + neuron.BiasNeuronWeight);
            Console.WriteLine("Trained MSE: " + ffc.EstimateMeanSquaredError());
            Console.WriteLine();

            ffc.TrainAllSamplesOnce();
            Console.WriteLine("Synapse Weight: " + synapse.Weight + " - Bias Weight: " + neuron.BiasNeuronWeight);
            Console.WriteLine("Trained MSE: " + ffc.EstimateMeanSquaredError());
            Console.WriteLine();

            Console.WriteLine("Auto Training, maximum 1000 Epochs");
            Console.WriteLine();
            if (ffc.TrainAllSamplesUntil(0.000000001, 1000))
            {
                Console.WriteLine("Synapse Weight: " + synapse.Weight + " - Bias Weight: " + neuron.BiasNeuronWeight);
                Console.ForegroundColor = ConsoleColor.Green;
                Console.WriteLine("SUCCEEDS auto training with MSE: " + ffc.EstimateMeanSquaredError());
                Console.ResetColor();
            }
            else
            {
                Console.WriteLine("Synapse Weight: " + synapse.Weight + " - Bias Weight: " + neuron.BiasNeuronWeight);
                Console.ForegroundColor = ConsoleColor.Red;
                Console.WriteLine("FAILS auto training with MSE: " + ffc.EstimateMeanSquaredError());
                Console.ResetColor();
            }

            Console.ReadKey();
        }
예제 #13
0
        public void RunDemo()
        {
            Console.WriteLine("### BASIC UNBOUND DEMO ###");

            //Initialize the network manager.
            //This constructor also creates the first
            //network layer (Inputlayer).
            Network network = new Network();

            //You need to initialize (the size of) the
            //input layer in an unbound scenario
            network.InitUnboundInputLayer(3);
            //Add the hidden layer with 4 neurons.
            network.AddLayer(4);
            //Add the output layer with 2 neurons.
            network.AddLayer(2);

            //Connect the neurons together using synapses.
            //This is the easiest way to do it; I'll discuss
            //other ways in more detail in another demo.
            network.AutoLinkFeedforward();

            //Push new input data
            network.PushUnboundInput(new bool[] {false,true,false});
            //... and output training data ...
            network.PushUnboundTraining(new bool[] {false,true});

            //Propagate the network using the bound input data.
            //Internally, this is a two round process, to
            //correctly handle feedbacks
            network.CalculateFeedforward();
            //Collect the network output and print it.
            App.PrintArray(network.CollectOutput());

            //Train the current pattern using Backpropagation (one step)!
            network.TrainCurrentPattern(false,true);
            //Print the output; the difference to (-1,1) should be
            //smaller this time!
            App.PrintArray(network.CollectOutput());

            //Same one more time:
            network.TrainCurrentPattern(false,true);
            App.PrintArray(network.CollectOutput());

            //Train another pattern:
            Console.WriteLine("# new pattern:");
            //this time we're using doubles directly, instead of booleans.
            //5/1 are the default values for input/training values.
            network.PushUnboundInput(new double[] {5d,-5d,-5d});
            network.PushUnboundTraining(new double[] {1,1});
            //calculate ...
            network.CalculateFeedforward();
            App.PrintArray(network.CollectOutput());
            //... and train it one time
            network.TrainCurrentPattern(false,true);
            App.PrintArray(network.CollectOutput());

            //what about the old pattern now?
            Console.WriteLine("# the old pattern again:");
            network.PushUnboundInput(new double[] {-5d,5d,-5d});
            network.PushUnboundTraining(new double[] {-1,1});
            network.CalculateFeedforward();
            App.PrintArray(network.CollectOutput());

            Console.WriteLine("=== COMPLETE ===");
            Console.WriteLine();
        }
예제 #14
0
        private Network initializeNetwork()
        {
            // layer 1
            Node  a1     = new Node("a1");
            Node  a2     = new Node("a2");
            Node  a3     = new Node("a3");
            Layer layer1 = new Layer();

            layer1.AddNode(a1);
            layer1.AddNode(a2);
            layer1.AddNode(a3);
            // layer 2
            Node  b1     = new Node("b1");
            Node  b2     = new Node("b2");
            Layer layer2 = new Layer();

            layer2.AddNode(b1);
            layer2.AddNode(b2);
            // layer 3
            Node  c1     = new Node("c1");
            Node  c2     = new Node("c2");
            Layer layer3 = new Layer();

            layer3.AddNode(c1);
            layer3.AddNode(c2);

            // Connectors layer1 -> layer2
            Connector con_a1_b1 = new Connector(a1, b1, 0.5f);

            a1.AddForwardConnector(con_a1_b1);
            b2.AddBackwardConnector(con_a1_b1);
            Connector con_a2_b1 = new Connector(a2, b1, 0.5f);
            Connector con_a2_b2 = new Connector(a2, b2, 0.5f);

            a2.AddForwardConnector(con_a2_b1);
            b1.AddBackwardConnector(con_a2_b1);
            a2.AddForwardConnector(con_a2_b2);
            b2.AddBackwardConnector(con_a2_b2);
            Connector con_a3_b2 = new Connector(a3, b2, 0.5f);

            a3.AddForwardConnector(con_a3_b2);
            b2.AddBackwardConnector(con_a3_b2);
            // Connectors layer2 -> layer3
            Connector con_b1_c1 = new Connector(b1, c1, 0.5f);
            Connector con_b1_c2 = new Connector(b1, c2, 0.5f);

            b1.AddForwardConnector(con_b1_c1);
            c1.AddBackwardConnector(con_b1_c1);
            b1.AddForwardConnector(con_b1_c2);
            c2.AddBackwardConnector(con_b1_c2);
            Connector con_b2_c1 = new Connector(b2, c1, 0.5f);
            Connector con_b2_c2 = new Connector(b2, c2, 0.5f);

            b2.AddForwardConnector(con_b2_c1);
            c1.AddBackwardConnector(con_b2_c1);
            b2.AddForwardConnector(con_b2_c2);
            c2.AddBackwardConnector(con_b2_c2);

            // form the network
            Network network = new Network();

            network.AddLayer(layer1);
            network.AddLayer(layer2);

            return(network);
        }
예제 #15
0
        public void RunDemo()
        {
            Console.WriteLine("### NETWORK STRUCTURE DEMO ###");

            //Initialize the network manager.
            //This constructor also creates the first
            //network layer (Inputlayer).
            Network network = new Network();

            //You need to initialize (the size of) the
            //input layer in an unbound scenario
            network.InitUnboundInputLayer(3);
            //Add the hidden layer with 4 neurons.
            network.AddLayer(4);
            //Add the output layer with 2 neurons.
            network.AddLayer(2);

            //Instead of calling AutoLinkFeedforward()
            //on this place, in this demo we'll connect
            //the network together by our own!

            Layer input  = network.FirstLayer;
            Layer hidden = input.TargetLayer;
            Layer output = network.LastLayer;

            //First we want to connect all neurons
            //of the hidden layer to all neurons
            //of the input layer (that's exactly
            //what the AutoLinkFeedforward would
            //do - but between all layers).
            input.CrossLinkForward();

            //Then we want to achieve a lateral
            //feedback in the hidden layer
            //(AutoLinkFeedforward does NOT do this):
            hidden.CrossLinkLayer();

            //Next we want to connect the first
            //and the second Neuron of the hidden
            //Layer to the first output neuron,
            //and the third and fourth to the 2nd
            //output neuron. Some of the synapses
            //shall start with special weights:
            hidden[0].ConnectToNeuron(output[0]);
            hidden[1].ConnectToNeuron(output[0], 0.5);
            hidden[2].ConnectToNeuron(output[1]);
            hidden[3].ConnectToNeuron(output[1], -1.5);

            //That's it. Now we can work with it,
            //just we did on the Basic Unbound Demo:
            network.PushUnboundInput(new bool[] { false, true, false });
            network.PushUnboundTraining(new bool[] { false, true });
            network.CalculateFeedforward();
            App.PrintArray(network.CollectOutput());
            network.TrainCurrentPattern(false, true);
            App.PrintArray(network.CollectOutput());
            network.TrainCurrentPattern(false, true);
            App.PrintArray(network.CollectOutput());

            //This demo may help you e.g. building your own
            //INetworkStructureFactory implementations
            //for the grid pattern matching building block.
            //(You may also want to check out the default
            //implementation!)

            Console.WriteLine("=== COMPLETE ===");
            Console.WriteLine();
        }
예제 #16
0
        private void загрузитьСтандартнуюМодельToolStripMenuItem_Click(object sender, EventArgs e)
        {
            Network network = new Network();

            network.AddLayer(new Dropout(new SimpleFunc(), 0.1));
            network.AddLayer(new Conv2D(new Relu(), 9, 9, 16));
            network.AddLayer(new MaxPool2D(new SimpleFunc(), 2, 2));
            network.AddLayer(new Conv2D(new Relu(), 5, 5, 32));
            network.AddLayer(new MaxPool2D(new SimpleFunc(), 2, 2));
            network.AddLayer(new Conv2D(new Relu(), 3, 3, 64));
            network.AddLayer(new MaxPool2D(new SimpleFunc(), 2, 2));
            network.AddLayer(new Conv2D(new Relu(), 3, 3, 32));
            network.AddLayer(new MaxPool2D(new SimpleFunc(), 2, 2));
            network.AddLayer(new FullyConnLayar(new Relu(), new NeuralNetwork.Size(1, 1, 128)));

            var pair    = NetworkData.train.GetRandom(ref network);
            var out_arr = pair.Value;

            network.AddLayer(new FullyConnLayar(new Sigmoid(), new NeuralNetwork.Size(out_arr.GetLength(0), out_arr.GetLength(1), out_arr.GetLength(2))));

            network.Compile(new NeuralNetwork.Size(3, NetworkData.image_size.Height, NetworkData.image_size.Width), true);

            network.Normalization();
            network.Normalization();

            NetworkData.network = network;

            string message = "";

            for (int i = 0; i < network.layers.Count; i++)
            {
                message += network.layers[i].GetType().Name + " {" + network.layers[i].output_size[0] + "," + network.layers[i].output_size[1] + "," + network.layers[i].output_size[2] + "}\n";
            }

            WriteNetwork();

            MessageBox.Show(message);
        }
예제 #17
0
        private void button1_Click(object sender, EventArgs e)
        {
            if (openFileDialog1.ShowDialog() == DialogResult.OK)
            {
                Bitmap map = new Bitmap(openFileDialog1.FileName);

                BoxerDLL.BoxCut2D   cutter = new BoxCut2D(7, 10, 0.143f, 0.1f);
                List <Bitmap>       maps   = cutter.Cut(map);
                List <double[, , ]> rgbs   = new List <double[, , ]>();
                for (int i = 0; i < maps.Count; i++)
                {
                    maps[i] = new Bitmap(maps[i], new System.Drawing.Size(24, 12));
                    rgbs.Add(ImageDataConverter.GetRGBArray(maps[i]));
                }


                RectangleF[] rects = cutter.Rects1D(map.Width, map.Height);

                BitmapCatEnumerator enums   = new BitmapCatEnumerator("Sorted", new System.Drawing.Size(24, 12));
                BitmapCatEnumerator val     = new BitmapCatEnumerator("Val", new System.Drawing.Size(24, 12));
                Network             network = new Network();
                network.AddLayer(new Dropout(new Relu(), 0.05));
                network.AddLayer(new Conv2D(new Relu(), 7, 7, 32));
                network.AddLayer(new MaxPool2D(new Relu(), 2, 2));
                network.AddLayer(new Dropout(new Relu(), 0.05));

                network.AddLayer(new Conv2D(new Relu(), 3, 3, 64));
                network.AddLayer(new MaxPool2D(new Relu(), 2, 2));
                network.AddLayer(new Dropout(new Relu(), 0.05));

                network.AddLayer(new FullyConnLayar(new Sigmoid(), new NeuralNetwork.Size(1, 1, 256)));
                network.AddLayer(new FullyConnLayar(new Sigmoid(), new NeuralNetwork.Size(1, 1, 2)));

                network.Compile(new NeuralNetwork.Size(3, 12, 24), true);

                network.Normalization();

                pictureBox1.Image = map;

                MomentumParallel sgd = new MomentumParallel(network, 0.9, 1e-4);

                int last = 0;

                for (int i = 0; i < 1000; i++)
                {
                    var errors = sgd.TrainBatchPercent(enums, 32, 1);

                    if (errors.Last().Value > 90 || i - last > 20)
                    {
                        last = i;
                        Color[] colors = new Color[rects.Length];
                        Color   plus   = Color.FromArgb(100, Color.Red);
                        Color   minus  = Color.FromArgb(100, Color.Green);

                        for (int j = 0; j < maps.Count; j++)
                        {
                            double[,,] output = network.GetOutput(rgbs[j]);

                            colors[j] = output[0, 0, 0] > output[0, 0, 1] ? plus : minus;
                        }

                        pictureBox1.Image = DrawColoredRects(map, rects, colors);

                        var validation = network.GetError(val);
                        label2.Text = "Validation\nError: " + validation.Key + "\nPercent: " + validation.Value;
                    }
                    label1.Text = i + "\nError: " + errors.Last().Key + "\nPercent: " + errors.Last().Value;
                    Update();
                }
            }
        }
예제 #18
0
        static void LinePredictor()
        {
            StreamReader rd           = new StreamReader("data.txt");
            string       classes_line = rd.ReadLine();

            string[]             classes = classes_line.Split(' ');
            List <double[, , ]>  inputs  = new List <double[, , ]>();
            List <double [, , ]> outputs = new List <double[, , ]>();

            while (!rd.EndOfStream)
            {
                inputs.Add(LineToArr3(rd.ReadLine()));
                outputs.Add(LineToArr3(rd.ReadLine()));
            }

            int output_len = outputs[0].Length;

            int nancount = 0;

            for (int i = 0; i < inputs.Count; i++)
            {
                var l = inputs[i];
                for (int j = 0; j < l.Length; j++)
                {
                    if (double.IsNaN(l[0, 0, j]) || double.IsInfinity(l[0, 0, j]))
                    {
                        nancount++;
                        inputs.RemoveAt(i);
                        outputs.RemoveAt(i);
                        i--;
                        break;
                    }
                }
            }


            ArrDataEnumerator enumerator = new ArrDataEnumerator(inputs, outputs);

            DataCategoryVisualisation visualisation = new DataCategoryVisualisation(enumerator, 2, 6, 600, classes);

            visualisation.ShowDialog();

            Network network = new Network();

            network.loss = new MaxLoss();
            network.AddLayer(new FullyConnLayar(new LeackyRelu(0.01), new NeuralNetwork.Size(1, 1, 64)));
            network.AddLayer(new FullyConnLayar(new Sigmoid(), new NeuralNetwork.Size(1, 1, output_len)));
            network.Compile(new NeuralNetwork.Size(1, 1, inputs[0].GetLength(2)), true);
            network.AddNoise();

            Optimizer parallel = new MomentumParallel(network, 0.9, 0.0005);

            enumerator.SplitTrainVal(0.8, out ArrDataEnumerator train, out ArrDataEnumerator val);
            Learner learner = new Learner()
            {
                optimizer  = parallel,
                train_data = train,
                val_data   = val
            };

            learner.metrics.Add(new ArgMaxMetrics());
            learner.metrics.Add(new ArgmaxCount(0, 0, 4));
            learner.metrics.Add(new ArgmaxTrainCount(0, 0, 4));
            learner.callbacks.Add(new MetricSaveCallback("val_argmax"));

            learner.Learn(8, 50, 10000);
        }
        public void RunDemo()
        {
            Console.WriteLine("### NETWORK STRUCTURE DEMO ###");

            //Initialize the network manager.
            //This constructor also creates the first
            //network layer (Inputlayer).
            Network network = new Network();

            //You need to initialize (the size of) the
            //input layer in an unbound scenario
            network.InitUnboundInputLayer(3);
            //Add the hidden layer with 4 neurons.
            network.AddLayer(4);
            //Add the output layer with 2 neurons.
            network.AddLayer(2);

            //Instead of calling AutoLinkFeedforward()
            //on this place, in this demo we'll connect
            //the network together by our own!

            Layer input = network.FirstLayer;
            Layer hidden = input.TargetLayer;
            Layer output = network.LastLayer;

            //First we want to connect all neurons
            //of the hidden layer to all neurons
            //of the input layer (that's exactly
            //what the AutoLinkFeedforward would
            //do - but between all layers).
            input.CrossLinkForward();

            //Then we want to achieve a lateral
            //feedback in the hidden layer
            //(AutoLinkFeedforward does NOT do this):
            hidden.CrossLinkLayer();

            //Next we want to connect the first
            //and the second Neuron of the hidden
            //Layer to the first output neuron,
            //and the third and fourth to the 2nd
            //output neuron. Some of the synapses
            //shall start with special weights:
            hidden[0].ConnectToNeuron(output[0]);
            hidden[1].ConnectToNeuron(output[0],0.5);
            hidden[2].ConnectToNeuron(output[1]);
            hidden[3].ConnectToNeuron(output[1],-1.5);

            //That's it. Now we can work with it,
            //just we did on the Basic Unbound Demo:
            network.PushUnboundInput(new bool[] {false,true,false});
            network.PushUnboundTraining(new bool[] {false,true});
            network.CalculateFeedforward();
            App.PrintArray(network.CollectOutput());
            network.TrainCurrentPattern(false,true);
            App.PrintArray(network.CollectOutput());
            network.TrainCurrentPattern(false,true);
            App.PrintArray(network.CollectOutput());

            //This demo may help you e.g. building your own
            //INetworkStructureFactory implementations
            //for the grid pattern matching building block.
            //(You may also want to check out the default
            //implementation!)

            Console.WriteLine("=== COMPLETE ===");
            Console.WriteLine();
        }
예제 #20
0
        public void RunDemo()
        {
            Console.WriteLine("### BASIC BOUND DEMO ###");

            //Prepare you're input and training data
            //to bind to the network
            double[] input = new double[] {-5d,5d,-5d};
            double[] training = new double[] {-1,1};

            //Initialize the network manager.
            //This constructor also creates the first
            //network layer (Inputlayer).
            Network network = new Network();

            //Bind your input array (to the already
            //existing input layer)
            network.BindInputLayer(input);
            //Add the hidden layer with 4 neurons.
            network.AddLayer(4);
            //Add the output layer with 2 neurons.
            network.AddLayer(2);
            //bind your training array to the output layer.
            //Always do this AFTER creating the layers.
            network.BindTraining(training);

            //Connect the neurons together using synapses.
            //This is the easiest way to do it; I'll discuss
            //other ways in more detail in another demo.
            network.AutoLinkFeedforward();

            //Propagate the network using the bound input data.
            //Internally, this is a two round process, to
            //correctly handle feedbacks
            network.CalculateFeedforward();
            //Collect the network output and print it.
            App.PrintArray(network.CollectOutput());

            //Train the current pattern using Backpropagation (one step)!
            network.TrainCurrentPattern(false,true);
            //Print the output; the difference to (-1,1) should be
            //smaller this time!
            App.PrintArray(network.CollectOutput());

            //Same one more time:
            network.TrainCurrentPattern(false,true);
            App.PrintArray(network.CollectOutput());

            //Train another pattern:
            Console.WriteLine("# new pattern:");
            input[0] = 5d;
            input[1] = -5d;
            training[0] = 1;
            //calculate ...
            network.CalculateFeedforward();
            App.PrintArray(network.CollectOutput());
            //... and train it one time
            network.TrainCurrentPattern(false,true);
            App.PrintArray(network.CollectOutput());

            //what about the old pattern now?
            Console.WriteLine("# the old pattern again:");
            input[0] = -5d;
            input[1] = 5d;
            training[0] = -1;
            network.CalculateFeedforward();
            App.PrintArray(network.CollectOutput());

            Console.WriteLine("=== COMPLETE ===");
            Console.WriteLine();
        }
예제 #21
0
        private void RebuildCurrentNetworkStructure()
        {
            currentNetwork = new Network();
            currentNeuronMap = new Hashtable();
            currentSynapseMap = new Hashtable();

            //Build Layers and Neurons
            NeuralDataSet.LayersRow[] layerRows = SelectLayersFromNetwork(currentNetworkRow);
            if(layerRows.Length == 0)
                return;
            NeuralDataSet.NeuronsRow[] neuronRows = SelectNeuronsFromLayer(layerRows[0]);
            currentNetwork.InitUnboundInputLayer(neuronRows.Length);
            Layer[] layers = new Layer[layerRows.Length];
            layers[0] = currentNetwork.FirstLayer;
            AppendNeuronsToNeuronMap(currentNeuronMap,neuronRows,layers[0]);
            for(int i=1;i<layerRows.Length;i++)
            {
                neuronRows = SelectNeuronsFromLayer(layerRows[i]);
                layers[i] = currentNetwork.AddLayer(neuronRows.Length);
                AppendNeuronsToNeuronMap(currentNeuronMap,neuronRows,layers[i]);
            }

            //Build Synapses
            NeuralDataSet.SynapsesRow[] synapseRows = SelectSynapsesFromNetwork(currentNetworkRow);
            for(int i=0;i<synapseRows.Length;i++)
                currentSynapseMap.Add(synapseRows[i].syID,
                    ((Neuron)currentNeuronMap[synapseRows[i].syFK_neuronSource]).ConnectToNeuron((Neuron)currentNeuronMap[synapseRows[i].syFK_neuronTarget]));
        }
예제 #22
0
        public void BuildNetwork()
        {
            network = new Network(node);

            if(!groupedHiddenLayer)
            {
                network.AddLayer(32); //Hidden layer with 32 neurons
                network.AddLayer(16); //Output layer with 16 neuron

                network.BindInputLayer(input); //Bind Input Data

                network.PushUnboundTraining(outFF);
                network.AutoLinkFeedforward(); //Create synapses between the layers for typical feedforward networks.
            }
            else
            {
                network.AddLayer(64); //Hidden layer with 64 neurons
                network.AddLayer(16); //Output layer with 16 neuron

                network.BindInputLayer(input); //Bind Input Data

                network.PushUnboundTraining(outFF);

                network.FirstLayer.CrossLinkForward();
                Layer hidden = network.FirstLayer.TargetLayer;
                Layer output = network.LastLayer;
                for(int i=0;i<output.Count;i++)
                {
                    hidden[i*4].ConnectToNeuron(output[i]);
                    hidden[i*4+1].ConnectToNeuron(output[i]);
                    hidden[i*4+2].ConnectToNeuron(output[i]);
                    hidden[i*4+3].ConnectToNeuron(output[i]);
                }
            }
        }
예제 #23
0
        public void RunDemo()
        {
            Console.WriteLine("### BASIC UNBOUND DEMO ###");

            //Initialize the network manager.
            //This constructor also creates the first
            //network layer (Inputlayer).
            Network network = new Network();

            //You need to initialize (the size of) the
            //input layer in an unbound scenario
            network.InitUnboundInputLayer(3);
            //Add the hidden layer with 4 neurons.
            network.AddLayer(4);
            //Add the output layer with 2 neurons.
            network.AddLayer(2);

            //Connect the neurons together using synapses.
            //This is the easiest way to do it; I'll discuss
            //other ways in more detail in another demo.
            network.AutoLinkFeedforward();

            //Push new input data
            network.PushUnboundInput(new bool[] { false, true, false });
            //... and output training data ...
            network.PushUnboundTraining(new bool[] { false, true });

            //Propagate the network using the bound input data.
            //Internally, this is a two round process, to
            //correctly handle feedbacks
            network.CalculateFeedforward();
            //Collect the network output and print it.
            App.PrintArray(network.CollectOutput());

            //Train the current pattern using Backpropagation (one step)!
            network.TrainCurrentPattern(false, true);
            //Print the output; the difference to (-1,1) should be
            //smaller this time!
            App.PrintArray(network.CollectOutput());

            //Same one more time:
            network.TrainCurrentPattern(false, true);
            App.PrintArray(network.CollectOutput());

            //Train another pattern:
            Console.WriteLine("# new pattern:");
            //this time we're using doubles directly, instead of booleans.
            //5/1 are the default values for input/training values.
            network.PushUnboundInput(new double[] { 5d, -5d, -5d });
            network.PushUnboundTraining(new double[] { 1, 1 });
            //calculate ...
            network.CalculateFeedforward();
            App.PrintArray(network.CollectOutput());
            //... and train it one time
            network.TrainCurrentPattern(false, true);
            App.PrintArray(network.CollectOutput());

            //what about the old pattern now?
            Console.WriteLine("# the old pattern again:");
            network.PushUnboundInput(new double[] { -5d, 5d, -5d });
            network.PushUnboundTraining(new double[] { -1, 1 });
            network.CalculateFeedforward();
            App.PrintArray(network.CollectOutput());

            Console.WriteLine("=== COMPLETE ===");
            Console.WriteLine();
        }
예제 #24
0
        public void RunDemo()
        {
            Console.WriteLine("### BASIC BOUND DEMO ###");

            //Prepare you're input and training data
            //to bind to the network
            double[] input    = new double[] { -5d, 5d, -5d };
            double[] training = new double[] { -1, 1 };

            //Initialize the network manager.
            //This constructor also creates the first
            //network layer (Inputlayer).
            Network network = new Network();

            //Bind your input array (to the already
            //existing input layer)
            network.BindInputLayer(input);
            //Add the hidden layer with 4 neurons.
            network.AddLayer(4);
            //Add the output layer with 2 neurons.
            network.AddLayer(2);
            //bind your training array to the output layer.
            //Always do this AFTER creating the layers.
            network.BindTraining(training);

            //Connect the neurons together using synapses.
            //This is the easiest way to do it; I'll discuss
            //other ways in more detail in another demo.
            network.AutoLinkFeedforward();

            //Propagate the network using the bound input data.
            //Internally, this is a two round process, to
            //correctly handle feedbacks
            network.CalculateFeedforward();
            //Collect the network output and print it.
            App.PrintArray(network.CollectOutput());

            //Train the current pattern using Backpropagation (one step)!
            network.TrainCurrentPattern(false, true);
            //Print the output; the difference to (-1,1) should be
            //smaller this time!
            App.PrintArray(network.CollectOutput());

            //Same one more time:
            network.TrainCurrentPattern(false, true);
            App.PrintArray(network.CollectOutput());

            //Train another pattern:
            Console.WriteLine("# new pattern:");
            input[0]    = 5d;
            input[1]    = -5d;
            training[0] = 1;
            //calculate ...
            network.CalculateFeedforward();
            App.PrintArray(network.CollectOutput());
            //... and train it one time
            network.TrainCurrentPattern(false, true);
            App.PrintArray(network.CollectOutput());

            //what about the old pattern now?
            Console.WriteLine("# the old pattern again:");
            input[0]    = -5d;
            input[1]    = 5d;
            training[0] = -1;
            network.CalculateFeedforward();
            App.PrintArray(network.CollectOutput());

            Console.WriteLine("=== COMPLETE ===");
            Console.WriteLine();
        }
예제 #25
0
        static void Main(string[] args)
        {
            Controler ffc = new Controler(1);

            Network net = new Network(ffc.Node);
            net.InitUnboundInputLayer(1).BasicConfiguration.ActivationType.Value = EActivationType.Linear;
            net.AddLayer(1, EActivationType.Linear);
            net.AutoLinkFeedforward();

            Neuron neuron = net.LastLayer[0];
            Synapse synapse = neuron.SourceSynapses[0];

            ffc.ImportNetwork(net, false);

            BasicConfig config = ffc.NeuralNetwork.BasicConfiguration;
            config.BiasNeuronEnable.Value = true;
            config.BiasNeuronOutput.Value = 1.0;
            config.FlatspotEliminationEnable.Value = false;
            config.WeightDecayEnable.Value = false;
            config.SymmetryPreventionEnable.Value = false;
            config.ManhattanTrainingEnable.Value = false;
            config.LearningRate.Value = 0.005;

            StochasticCoordinateGenerator scg = new StochasticCoordinateGenerator(0,10,100);
            //RegularCoordinateGenerator rcg = new RegularCoordinateGenerator(-25, 25, 50);
            DynamicSampleProvider dsp = new DynamicSampleProvider(my_func, scg); //rcg);

            ffc.Provider = dsp; // new CachedSampleProvider(dsp);

            Console.WriteLine("TARGET FUNCTION:             3*x+5");
            Console.WriteLine("TARGET Synapse Weight      = 3.0");
            Console.WriteLine("TARGET Bias Weight         = 5.0");
            Console.WriteLine("TARGET Mean Squared Error <= 0.000000001");
            Console.WriteLine();

            Console.WriteLine("Synapse Weight: " + synapse.Weight + " - Bias Weight: " + neuron.BiasNeuronWeight);
            Console.WriteLine("Initial MSE: " + ffc.EstimateMeanSquaredError());
            Console.WriteLine();

            ffc.TrainAllSamplesOnce();
            Console.WriteLine("Synapse Weight: " + synapse.Weight + " - Bias Weight: " + neuron.BiasNeuronWeight);
            Console.WriteLine("Trained MSE: " + ffc.EstimateMeanSquaredError());
            Console.WriteLine();

            ffc.TrainAllSamplesOnce();
            Console.WriteLine("Synapse Weight: " + synapse.Weight + " - Bias Weight: " + neuron.BiasNeuronWeight);
            Console.WriteLine("Trained MSE: " + ffc.EstimateMeanSquaredError());
            Console.WriteLine();

            ffc.TrainAllSamplesOnce();
            Console.WriteLine("Synapse Weight: " + synapse.Weight + " - Bias Weight: " + neuron.BiasNeuronWeight);
            Console.WriteLine("Trained MSE: " + ffc.EstimateMeanSquaredError());
            Console.WriteLine();

            Console.WriteLine("Auto Training, maximum 1000 Epochs");
            Console.WriteLine();
            if(ffc.TrainAllSamplesUntil(0.000000001, 1000))
            {
                Console.WriteLine("Synapse Weight: " + synapse.Weight + " - Bias Weight: " + neuron.BiasNeuronWeight);
                Console.ForegroundColor = ConsoleColor.Green;
                Console.WriteLine("SUCCEEDS auto training with MSE: " + ffc.EstimateMeanSquaredError());
                Console.ResetColor();
            }
            else
            {
                Console.WriteLine("Synapse Weight: " + synapse.Weight + " - Bias Weight: " + neuron.BiasNeuronWeight);
                Console.ForegroundColor = ConsoleColor.Red;
                Console.WriteLine("FAILS auto training with MSE: " + ffc.EstimateMeanSquaredError());
                Console.ResetColor();
            }

            Console.ReadKey();
        }
예제 #26
0
        private void загрузитьБольшуюМодельToolStripMenuItem_Click(object sender, EventArgs e)
        {
            Network network = new Network();

            network.AddLayer(new Conv2D(new Relu(), 5, 5, 16));
            network.AddLayer(new MaxPool2D(new Relu(), 2, 2));
            network.AddLayer(new Conv2D(new Relu(), 5, 5, 20));
            network.AddLayer(new MaxPool2D(new Relu(), 2, 2));
            network.AddLayer(new Conv2D(new Relu(), 3, 3, 32));
            network.AddLayer(new MaxPool2D(new Relu(), 2, 2));
            network.AddLayer(new Conv2D(new Relu(), 3, 3, 54));
            network.AddLayer(new MaxPool2D(new Relu(), 2, 2));
            network.AddLayer(new Conv2D(new Relu(), 3, 3, 80));
            network.AddLayer(new MaxPool2D(new Relu(), 2, 2));
            network.AddLayer(new FullyConnLayar(new Sigmoid(), new NeuralNetwork.Size(1, 1, 100)));
            network.AddLayer(new FullyConnLayar(new Sigmoid(), new NeuralNetwork.Size(1, 1, 2)));

            network.Compile(new NeuralNetwork.Size(3, NetworkData.image_size.Height, NetworkData.image_size.Width), true);

            network.Normalization();
            network.Normalization();

            NetworkData.network = network;

            string message = "";

            for (int i = 0; i < network.layers.Count; i++)
            {
                message += network.layers[i].GetType().Name + " {" + network.layers[i].output_size[0] + "," + network.layers[i].output_size[1] + "," + network.layers[i].output_size[2] + "}\n";
            }

            WriteNetwork();

            MessageBox.Show(message);
        }
예제 #27
0
 /// <summary>
 ///   Standard constructor of NetTrainer, should be used in most cases.
 /// </summary>
 /// <param name = "dalManager">Database gateway</param>
 public NetTrainer(DaoGateway dalManager)
 {
     Network net = new Network();
     net.AddLayer(new BasicLayer(new ActivationTANH(), true, DEFAULT_FINGERPRINT_SIZE));
     net.AddLayer(new BasicLayer(new ActivationTANH(), true, DEFAULT_HIDDEN_NEURONS_COUNT));
     net.AddLayer(new BasicLayer(new ActivationTANH(), false, OUT_PUT_NEURONS));
     net.Structure.FinalizeStructure();
     net.Reset();
     Init(net, dalManager);
 }