コード例 #1
0
        private void StartAI()
        {
            BuilderInstance <float> .Volume = new VolumeBuilder(); // Needed for GPU, must be done on AI thread

            // Must be done after setting BuilderInstance<float>.Volume
            this._yhat = SerializationExtensions.Load <float>("FaceDetection", false)[0];

            while (true)
            {
                if (this._queueIn.TryDequeue(out var bitmap))
                {
                    bitmap = BitmapTool.ResizeImage(bitmap, 300, 300);
                    var boxes = new List <BoundingBox>();


                    var allExtracts =
                        BitmapTool.SlideWindow(bitmap, 100, 100 / 9);
                    // .Concat(BitmapTool.SlideWindow(bitmap, 40, 40 / 3));
                    //.Concat(BitmapTool.SlideWindow(bitmap, 50, 50/3));

                    foreach (var extracts in allExtracts.Batch(10))
                    {
                        boxes.AddRange(BitmapTool.Evaluate(extracts.ToList(), this._yhat, IMAGE_WIDTH, IMAGE_HEIGHT, 0.8f));
                    }
                    this._queueOut.Enqueue(boxes);
                }
            }
        }
コード例 #2
0
        public void GraphMl()
        {
            var a          = new Const <double>(1.0, "one");
            var b          = new Const <double>(2.0, "two");
            var add        = new Add <double>(a, b);
            var activation = new Activation <double>(add, ActivationType.Relu);

            activation.Save("test");

            var result = SerializationExtensions.Load <double>("test", false);
        }
コード例 #3
0
        public void GraphMl()
        {
            var cns        = new ConvNetSharp <double>();
            var a          = cns.Const(1.0, "one");
            var b          = cns.Const(2.0, "two");
            var add        = a + b;
            var activation = cns.Relu(add);

            activation.Save("test");

            var result = SerializationExtensions.Load <double>("test", false);
        }
コード例 #4
0
        public Form1()
        {
            InitializeComponent();

            this.openFileDialog1.InitialDirectory = Directory.GetCurrentDirectory();

            if (!string.IsNullOrEmpty(Settings.Default.LastFilename))
            {
                SetFilename(Settings.Default.LastFilename);
            }

            BuilderInstance <float> .Volume = new VolumeBuilder();
            ; // Needed for GPU, must be done on AI thread

            // Must be done after setting BuilderInstance<float>.Volume
            this._yhat = SerializationExtensions.Load <float>("FaceDetection", false)[0];
        }
コード例 #5
0
        /// <summary>
        ///     This sample shows how to serialize and deserialize a ConvNetSharp.Flow network
        ///     1) Graph creation
        ///     2) Dummy Training (only use a single data point)
        ///     3) Serialization
        ///     4) Deserialization
        /// </summary>
        private static void Main()
        {
            var cns = new ConvNetSharp <double>();

            // 1) Graph creation
            var input = cns.PlaceHolder("x"); // input

            var dense1  = cns.Dense(input, 20) + cns.Variable(BuilderInstance <double> .Volume.From(new double[20].Populate(0.1), new Shape(20)), "bias1", true);
            var relu    = cns.Relu(dense1);
            var dense2  = cns.Dense(relu, 10) + cns.Variable(new Shape(10), "bias2", true);
            var softmax = cns.Softmax(dense2); // output

            var output = cns.PlaceHolder("y"); // ground truth
            var cost   = new SoftmaxCrossEntropy <double>(cns, softmax, output);

            var x = BuilderInstance <double> .Volume.From(new[] { 0.3, -0.5 }, new Shape(2));

            var y = BuilderInstance <double> .Volume.From(new[] { 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 }, new Shape(10));

            var dico = new Dictionary <string, Volume <double> > {
                { "x", x }, { "y", y }
            };

            var count     = 0;
            var optimizer = new GradientDescentOptimizer <double>(cns, 0.01);

            using (var session = new Session <double>())
            {
                session.Differentiate(cost); // computes dCost/dW at every node of the graph

                // 2) Dummy Training (only use a single data point)
                double currentCost;
                do
                {
                    currentCost = Math.Abs(session.Run(cost, dico, false).ToArray().Sum());
                    Console.WriteLine($"cost: {currentCost}");

                    session.Run(optimizer, dico);
                    count++;
                } while (currentCost > 1e-2);

                Console.WriteLine($"{count}");

                // Forward pass with original network
                var result = session.Run(softmax, new Dictionary <string, Volume <double> > {
                    { "x", x }
                });
                Console.WriteLine("probability that x is class 0: " + result.Get(0));
            }

            // 3) Serialization
            softmax.Save("MyNetwork");

            // 4) Deserialization
            var deserialized = SerializationExtensions.Load <double>("MyNetwork", false)[0]; // first element is the model (second element is the cost if it was saved along)

            using (var session = new Session <double>())
            {
                // Forward pass with deserialized network
                var result = session.Run(deserialized, new Dictionary <string, Volume <double> > {
                    { "x", x }
                });
                Console.WriteLine("probability that x is class 0: " + result.Get(0)); // This should give exactly the same result as previous network evaluation
            }

            Console.ReadLine();
        }
コード例 #6
0
ファイル: RnnDemo.cs プロジェクト: zaharPonimash/ConvNetSharp
        public void GenerateText()
        {
            this._output = SerializationExtensions.Load <float>("MyNetwork", false)[0];

            var initState = BuilderInstance <float> .Volume.SameAs(new Shape(this._stateSize, 1, 1, 1));

            Volume <float> temperature  = 1.0f; // increase this for more creativity (and more spelling mistakes)
            Volume <float> dropoutProba = 0.0f;

            var input = new List <Volume <float> >();

            initState.Clear();

            // Seed
            var inputchar = new char[this._numSteps];

            inputchar[0] = 'n';
            inputchar[1] = 'o';

            for (var i = 0; i < this._numSteps; i++)
            {
                Console.Write(inputchar[i]);
                input.Add(BuilderInstance <float> .Volume.SameAs(new Shape(this._data.Vocabulary.Count, 1, 1, 1)));
            }

            do
            {
                using (var session = new Session <float>())
                {
                    for (var i = 0; i < this._numSteps; i++)
                    {
                        input[i].Clear();
                        input[i].Set(this._data.Vocabulary.IndexOf(inputchar[i]), 0, 0, 0, 1);
                    }

                    var dico = new Dictionary <string, Volume <float> >
                    {
                        { "initState", initState },
                        { "temperature", temperature },
                        { "dropoutProba", dropoutProba }
                    };
                    for (var i = 0; i < this._numSteps; i++)
                    {
                        dico["x" + (i + 1)] = input[i];
                    }

                    var result = session.Run(this._output, dico);
                    var c3     = ToChar(3, result);
                    Console.Write(c3);

                    for (var i = 1; i < this._numSteps; i++)
                    {
                        inputchar[i - 1] = inputchar[i];
                    }

                    inputchar[this._numSteps - 1] = c3;

                    initState = session.GetVariableByName(this._output, "initState").Result.Clone(); // re inject
                }
            } while (!Console.KeyAvailable);
        }
コード例 #7
0
        /// <summary>
        /// Solves y = x * W + b (CPU single version)
        /// for y = 1 and x = -2
        ///
        /// This also demonstrates how to save and load a graph
        /// </summary>
        public static void Example1()
        {
            var cns = new ConvNetSharp <float>();

            // Graph creation
            Op <float> cost;
            Op <float> fun;

            if (File.Exists("test.graphml"))
            {
                Console.WriteLine("Loading graph from disk.");
                var ops = SerializationExtensions.Load <float>("test", true);

                fun  = ops[0];
                cost = ops[1];
            }
            else
            {
                var x = cns.PlaceHolder("x");
                var y = cns.PlaceHolder("y");

                var W = cns.Variable(1.0f, "W", true);
                var b = cns.Variable(2.0f, "b", true);

                fun = x * W + b;

                cost = (fun - y) * (fun - y);
            }


            var optimizer = new AdamOptimizer <float>(cns, 0.01f, 0.9f, 0.999f, 1e-08f);

            using (var session = new Session <float>())
            {
                session.Differentiate(cost); // computes dCost/dW at every node of the graph

                float currentCost;
                do
                {
                    var dico = new Dictionary <string, Volume <float> > {
                        { "x", -2.0f }, { "y", 1.0f }
                    };

                    currentCost = session.Run(cost, dico);
                    Console.WriteLine($"cost: {currentCost}");

                    var result = session.Run(fun, dico);
                    session.Run(optimizer, dico);
                } while (currentCost > 1e-5);

                float finalW = session.GetVariableByName(fun, "W").Result;
                float finalb = session.GetVariableByName(fun, "b").Result;
                Console.WriteLine($"fun = x * {finalW} + {finalb}");

                fun.Save("test", cost);

                // Display graph
                var vm  = new ViewModel <float>(cost);
                var app = new Application();
                app.Run(new GraphControl {
                    DataContext = vm
                });
            }

            Console.ReadKey();
        }
コード例 #8
0
        private static void FacePresence()
        {
            var batchSize = 1000;
            int width     = 32;
            int height    = 32;

            BuilderInstance <float> .Volume = new VolumeBuilder(); // For GPU

            var imageLoader       = new ImageLoader();
            var randomImageLoader = new ImageLoader(true, 2);

            // Load Dataset - Faces
            var faces1 = LfwCropLoader.LoadDataset(@"..\..\..\Dataset\lfwcrop_grey", width, height);
            var faces2 = imageLoader.LoadDataset(@"..\..\..\Dataset\custom\faces", width, height); // dump you own face images here

            // Load Dataset - Non-faces
            var nonFaces1 = randomImageLoader.LoadDataset(@"..\..\..\Dataset\scene_categories", width, height);
            var nonFaces2 = randomImageLoader.LoadDataset(@"..\..\..\Dataset\TextureDatabase", width, height);
            var nonFaces3 = randomImageLoader.LoadDataset(@"..\..\..\Dataset\cars_brad_bg", width, height);
            var nonFaces4 = randomImageLoader.LoadDataset(@"..\..\..\Dataset\houses", width, height);
            var nonFaces5 = imageLoader.LoadDataset(@"..\..\..\Dataset\custom\non_faces", width, height); // dump you own non-face images here

            var facesDataset = new FaceDetectionDataset(width, height);

            facesDataset.TrainSet.AddRange(faces1);
            facesDataset.TrainSet.AddRange(faces2);
            facesDataset.TrainSet.AddRange(nonFaces1);
            facesDataset.TrainSet.AddRange(nonFaces2);
            facesDataset.TrainSet.AddRange(nonFaces3);
            facesDataset.TrainSet.AddRange(nonFaces4);
            facesDataset.TrainSet.AddRange(nonFaces5);

            Console.WriteLine(" Done.");
            ConvNetSharp <float> cns;

            // Model
            Op <float> softmax = null;

            if (File.Exists("FaceDetection.json"))
            {
                Console.WriteLine("Loading model from disk...");
                softmax = SerializationExtensions.Load <float>("FaceDetection", false)[0]; // first element is the model (second element is the cost if it was saved along)
                cns     = softmax.Graph;                                                   // Deserialization creates its own graph that we have to use. TODO: make it simplier in ConvNetSharp
            }
            else
            {
                cns = new ConvNetSharp <float>();
            }

            var x        = cns.PlaceHolder("x");
            var dropProb = cns.PlaceHolder("dropProb");

            if (softmax == null)
            {
                // Inspired by https://github.com/PCJohn/FaceDetect
                var layer1 = cns.Relu(cns.Conv(x, 5, 5, 4, 2) + cns.Variable(new Shape(1, 1, 4, 1), "bias1", true));
                var layer2 = cns.Relu(cns.Conv(layer1, 3, 3, 16, 2) + cns.Variable(new Shape(1, 1, 16, 1), "bias2", true));
                var layer3 = cns.Relu(cns.Conv(layer2, 3, 3, 32) + cns.Variable(new Shape(1, 1, 32, 1), "bias3", true));

                var flatten = cns.Flatten(layer3);
                var dense1  = cns.Dropout(cns.Relu(cns.Dense(flatten, 600)) + cns.Variable(new Shape(1, 1, 600, 1), "bias4", true), dropProb);
                var dense2  = cns.Dense(dense1, 2) + cns.Variable(new Shape(1, 1, 2, 1), "bias5", true);
                softmax = cns.Softmax(dense2);
            }

            var y = cns.PlaceHolder("y");

            // Cost
            var cost = new SoftmaxCrossEntropy <float>(cns, softmax, y);

            // Optimizer
            var optimizer = new AdamOptimizer <float>(cns, 1e-4f, 0.9f, 0.999f, 1e-16f);

            //if (File.Exists("loss.csv"))
            //{
            //    File.Delete("loss.csv");
            //}

            Volume <float> trainingProb = 0.5f;
            Volume <float> testingProb  = 0.0f;

            // Training
            using (var session = new Session <float>())
            {
                session.Differentiate(cost); // computes dCost/dW at every node of the graph

                var    iteration = 0;
                double currentCost;
                do
                {
                    var batch  = facesDataset.GetBatch(batchSize);
                    var input  = batch.Item1;
                    var output = batch.Item2;

                    var dico = new Dictionary <string, Volume <float> > {
                        { "x", input }, { "y", output }, { "dropProb", trainingProb }
                    };


                    var stopwatch = Stopwatch.StartNew();
                    // session.Run(softmax, dico);
                    Debug.WriteLine(stopwatch.ElapsedMilliseconds);

                    currentCost = session.Run(cost, dico);
                    Console.WriteLine($"cost: {currentCost}");
                    File.AppendAllLines("loss.csv", new[] { currentCost.ToString(CultureInfo.InvariantCulture) });

                    session.Run(optimizer, dico);

                    if (iteration++ % 100 == 0)
                    {
                        // Test on a on random picture
                        var test = facesDataset.GetBatch(100);
                        dico = new Dictionary <string, Volume <float> > {
                            { "x", test.Item1 }, { "dropProb", testingProb }
                        };
                        var result = session.Run(softmax, dico);

                        int correct = 0;
                        for (int i = 0; i < 100; i++)
                        {
                            var class0Prob = result.Get(0, 0, 0, i);
                            var class1Prob = result.Get(0, 0, 1, i);

                            if ((test.Item3[i].IsFace && class1Prob > class0Prob) || (!test.Item3[i].IsFace && class0Prob > class1Prob))
                            {
                                correct++;
                            }
                        }

                        Console.WriteLine($"Test: {correct}%");
                        File.AppendAllLines("accuracy.csv", new[] { correct.ToString() });
                        var filename = test.Item3[0].Filename;

                        softmax.Save("FaceDetection");
                    }
                } while (currentCost > 1e-5 && !Console.KeyAvailable);

                softmax.Save("FaceDetection");
            }
        }
コード例 #9
0
        private static void FaceLocalization()
        {
            BuilderInstance <float> .Volume = new VolumeBuilder(); // For GPU

            // Load Dataset
            var datasetPath = @"C:\Pro\Github\FaceLocalisation\Face\Dataset\Helen"; // contains folders from helen dataset (annotation, helen_1 ,..)
            var batchSize   = 5;                                                    // my GTX 760 cannot take more...
            var dataSet     = HelenLoader.LoadDataset(datasetPath, 256, 256, 50);

            Console.WriteLine(" Done.");
            ConvNetSharp <float> cns;

            // Model
            Op <float> yhat = null;

            if (File.Exists("FaceDetection.json"))
            {
                Console.WriteLine("Loading model from disk...");
                yhat = SerializationExtensions.Load <float>("FaceDetection", false)[0]; // first element is the model (second element is the cost if it was saved along)
                cns  = yhat.Graph;                                                      // Deserialization creates its own graph that we have to use. TODO: make it simplier in ConvNetSharp
            }
            else
            {
                cns = new ConvNetSharp <float>();
            }

            var x = cns.PlaceHolder("x");

            if (yhat == null)
            {
                // Inspired by http://cs231n.stanford.edu/reports/2017/pdfs/222.pdf
                var alpha  = 0.2f;
                var layer1 = cns.Dropout(cns.Pool(cns.LeakyRelu(cns.Conv(x, 3, 3, 32, 1, 1) + cns.Variable(new Shape(1, 1, 32, 1), "bias1", true), alpha), 2, 2, 0, 0, 2, 2), 0.1f);
                var layer2 = cns.Pool(cns.LeakyRelu(cns.Conv(layer1, 3, 3, 64, 1, 1) + cns.Variable(new Shape(1, 1, 64, 1), "bias2", true), alpha), 2, 2, 0, 0, 2, 2);
                var layer3 = cns.Dropout(cns.Pool(cns.LeakyRelu(cns.Conv(layer2, 3, 3, 128, 1, 1) + cns.Variable(new Shape(1, 1, 128, 1), "bias3", true), alpha), 2, 2, 0, 0, 2, 2),
                                         0.1f);
                var layer4 = cns.Pool(cns.LeakyRelu(cns.Conv(layer3, 3, 3, 64, 1, 1) + cns.Variable(new Shape(1, 1, 64, 1), "bias4", true), alpha), 2, 2, 0, 0, 2, 2);
                var layer5 = cns.Pool(cns.LeakyRelu(cns.Conv(layer4, 3, 3, 16, 1, 1) + cns.Variable(new Shape(1, 1, 16, 1), "bias5", true), alpha), 2, 2, 0, 0, 2, 2);

                var flatten = cns.Flatten(layer5);
                var dense1  = cns.Conv(flatten, 1, 1, 128);
                yhat = cns.Conv(dense1, 1, 1, 4);

                //x.Evaluated += (sender, args) => { }; // I use this to place a break point and check Volume dimensions / debug
            }

            var y = cns.PlaceHolder("y");

            // Cost
            var cost = cns.Sum((yhat - y) * (yhat - y), Shape.From(1));

            // Optimizer
            var optimizer = new AdamOptimizer <float>(cns, 0.01f, 0.9f, 0.999f, 1e-08f);

            if (File.Exists("loss.csv"))
            {
                File.Delete("loss.csv");
            }

            // Training
            using (var session = new Session <float>())
            {
                session.Differentiate(cost); // computes dCost/dW at every node of the graph

                var    iteration = 0;
                double currentCost;
                do
                {
                    var batch  = dataSet.GetBatch(batchSize);
                    var input  = batch.Item1;
                    var output = batch.Item2;

                    var dico = new Dictionary <string, Volume <float> > {
                        { "x", input }, { "y", output }
                    };

                    currentCost = session.Run(cost, dico);
                    Console.WriteLine($"cost: {currentCost}");
                    File.AppendAllLines("loss.csv", new[] { currentCost.ToString(CultureInfo.InvariantCulture) });

                    session.Run(optimizer, dico);

                    if (iteration++ % 100 == 0)
                    {
                        // Test on a on random picture
                        var test = dataSet.GetBatch(1);
                        dico = new Dictionary <string, Volume <float> > {
                            { "x", test.Item1 }
                        };
                        var result = session.Run(yhat, dico);

                        var image = (Bitmap)Image.FromFile(test.Item3[0].Filename);
                        BitmapTool.DrawBoundingBox(image, new BoundingBox {
                            x1 = result.Get(0), y1 = result.Get(1), x2 = result.Get(2), y2 = result.Get(3)
                        }, Color.Blue);
                        BitmapTool.DrawBoundingBox(image, test.Item3[0].BoundingBox, Color.Green); // correct answer
                        image.Save($"iteration_{iteration}.jpg");

                        yhat.Save("FaceDetection");
                    }
                } while (currentCost > 1e-5 && !Console.KeyAvailable);

                yhat.Save("FaceDetection");
            }
        }