Exemple #1
0
        // akhir pengaturan camera 4

        private void ProcessFrame(Image <Bgr, Byte> imageproc, int cameranumber)
        {
            var counter   = 0;
            var numface   = 0;
            var status    = 0;
            var dataShape = new ConvNetSharp.Volume.Shape(48, 48, 1, 1);
            var data      = new double[dataShape.TotalLength];

            /* Check to see that there was a frame collected */
            if (imageproc != null)
            {
                var grayframe = imageproc.Convert <Gray, byte>();
                var faces     = cascade.DetectMultiScale(grayframe, 1.1, 10, System.Drawing.Size.Empty); //the actual face detection happens here

                foreach (var face in faces)
                {
                    numface = numface + 1;
                    imageproc.Draw(face, new Bgr(System.Drawing.Color.Blue), 3);                       //the detected face(s) is highlighted here using a box that is drawn around it/them

                    result = imageproc.Copy(face).Convert <Gray, byte>().Resize(48, 48, Inter.Linear); //wajah yang akan di kenali ekspresinya

                    // convert image to mat
                    Mat matImage = new Mat();
                    matImage = result.Mat;

                    // create volume and fill volume with pixels
                    var emotion = BuilderInstance <double> .Volume.From(data, dataShape);

                    for (var i = 0; i < 48; i++)
                    {
                        for (var j = 0; j < 48; j++)
                        {
                            emotion.Set(i, j, 0, MatExtension.GetValue(matImage, i, j));
                        }
                    }

                    // feed the network with volume
                    var results = fernet.Forward(emotion);
                    var c       = fernet.GetPrediction();

                    var frresults = frnet.Forward(emotion);
                    var d         = frnet.GetPrediction();

                    // mengakses softmax layer
                    var softmaxLayer = fernet.Layers[fernet.Layers.Count - 1] as SoftmaxLayer;
                    var activation   = softmaxLayer.OutputActivation;
                    var N            = activation.Shape.GetDimension(3);
                    var C            = activation.Shape.GetDimension(2);

                    // mengambil setiap confidence level dari setiap label
                    for (var k = 0; k < 7; k++)
                    {
                        net_output[k] = Math.Round(activation.Get(1, 1, (k + 1), 0) * 100);
                    }

                    // display prediction result
                    // emotion prediction
                    if (c[0] == 0)
                    {
                        emotionstring = emotion_labels[0];
                        imageproc.Draw(face, new Bgr(System.Drawing.Color.Blue), 3);
                    }
                    else
                    if (c[0] == 1)
                    {
                        emotionstring = emotion_labels[1];
                        imageproc.Draw(face, new Bgr(System.Drawing.Color.Red), 3);
                    }
                    else
                    if (c[0] == 2)
                    {
                        emotionstring = emotion_labels[2];
                        imageproc.Draw(face, new Bgr(System.Drawing.Color.Yellow), 3);
                    }
                    else
                    if (c[0] == 3)
                    {
                        emotionstring = emotion_labels[3];
                        imageproc.Draw(face, new Bgr(System.Drawing.Color.Green), 3);
                    }
                    else
                    if (c[0] == 4)
                    {
                        emotionstring = emotion_labels[4];
                        imageproc.Draw(face, new Bgr(System.Drawing.Color.HotPink), 3);
                    }
                    else
                    if (c[0] == 5)
                    {
                        emotionstring = emotion_labels[5];
                        imageproc.Draw(face, new Bgr(System.Drawing.Color.Orange), 3);
                    }
                    else
                    if (c[0] == 6)
                    {
                        emotionstring = emotion_labels[6];
                        imageproc.Draw(face, new Bgr(System.Drawing.Color.Purple), 3);
                    }

                    // face recognition prediction
                    if (d[0] == 0)
                    {
                        CvInvoke.PutText(imageproc, fr_labels[0] + "-" + emotionstring, face.Location, FontFace.HersheyComplex, 0.75, new MCvScalar(0, 255, 0));
                    }
                    else
                    if (d[0] == 1)
                    {
                        CvInvoke.PutText(imageproc, fr_labels[1] + "-" + emotionstring, face.Location, FontFace.HersheyComplex, 0.75, new MCvScalar(0, 255, 0));
                    }
                    else
                    if (d[0] == 2)
                    {
                        CvInvoke.PutText(imageproc, fr_labels[2] + "-" + emotionstring, face.Location, FontFace.HersheyComplex, 0.75, new MCvScalar(0, 255, 0));
                        //updatearray(status, numface, counter);
                        // update array
                        while (status == 0)
                        {
                            if (val_net[numface - 1, counter] < 0)
                            {
                                val_net[numface - 1, counter] = net_output[0];
                                val_ang[numface - 1, counter] = net_output[1];
                                val_dis[numface - 1, counter] = net_output[2];
                                val_fea[numface - 1, counter] = net_output[3];
                                val_hap[numface - 1, counter] = net_output[4];
                                val_sad[numface - 1, counter] = net_output[5];
                                val_sur[numface - 1, counter] = net_output[6];
                                status = 1;
                            }
                            else
                            {
                                counter = counter + 1;
                            }
                        }

                        chart1(counter);
                    }
                }
            }



            // display ke gui
            if (cameranumber == 1)
            {
                Camera1.Source = BitmapSourceConvert.ToBitmapSource(imageproc);
            }
            else if (cameranumber == 2)
            {
                Camera2.Source = BitmapSourceConvert.ToBitmapSource(imageproc);
            }
            else if (cameranumber == 3)
            {
                Camera3.Source = BitmapSourceConvert.ToBitmapSource(imageproc);
            }
            else if (cameranumber == 4)
            {
                Camera4.Source = BitmapSourceConvert.ToBitmapSource(imageproc);
            }
        }