public void TestMethod4()
        {
            Debug.WriteLine("[TestMethod4]");

            EmotionDetectionAsset eda = new EmotionDetectionAsset();

            // http://stackoverflow.com/questions/13605013/pass-bitmap-from-c-sharp-to-c
            // https://msdn.microsoft.com/en-us/library/vs/alm/dd183402(v=vs.85).aspx
            //

            eda.Initialize(@".", "shape_predictor_68_face_landmarks.dat");

            eda.ParseRules(File.ReadAllLines(@".\FURIA Fuzzy Logic Rules.txt"));

            if (eda.ProcessImage((Bitmap)Bitmap.FromFile(@".\Kiavash1.jpg")))
            {
                Debug.WriteLine(String.Format("{0} Face(s detected.", eda.Faces.Count));

                if (eda.ProcessFaces())
                {
                    Int32 i = 1;
                    foreach (KeyValuePair <RECT, List <POINT> > kvp in eda.Faces)
                    {
                        Debug.WriteLine(String.Format("{0} Landmark(s) detected in Face {1} at {2}.", kvp.Value.Count, i++, kvp.Key));
                    }

                    if (eda.ProcessLandmarks())
                    {
                        //! Not coded yet.
                    }
                }
            }
        }
Exemple #2
0
void Start()
{
    WebCamDevice[] devices = WebCamTexture.devices;
    for (int i = 0; i < devices.Length; i++)
    {
      print("Webcam available: " + devices[i].name);
    }
     webcam = new WebCamTexture(640, 480);
     rawimage.texture = webcam;
     rawimage.material.mainTexture = webcam;
     data = new Color32[webcam.width * webcam.height];
     eda = new EmotionDetectionAsset();
     eda.Bridge = new dlib_csharp.Bridge();
     eda.Initialize(@"Assets\", database);
     String[] lines = File.ReadAllLines(furia);
     eda.ParseRules(lines);
     Debug.Log("Emotion detection Ready for Use");
    }
        public void TestMethod3()
        {
            Debug.WriteLine("[TestMethod3]");

            EmotionDetectionAsset eda = new EmotionDetectionAsset();

            if (eda.ParseRules(File.ReadAllLines(@"..\..\..\database\FURIA Fuzzy Logic Rules.txt")))
            {
                // Output:
                //Emotions = Happy(CF = 0.97)
                //(V30 in [159.608, 160.424, inf, inf])
                //(V35 in [30.0655, 30.2536, inf, inf])

                //eda.CheckValues(eda.expressions.Skip(0).First(), "V30", new Double[] { 20, 160, 180 });
                //eda.CheckValues(eda.expressions.Skip(0).First(), "V35", new Double[] { 20, 30.0655, 30.0656, 30.1, 30.2535, 30.2536, 32 });
            }

            Debug.Print("{0} Emotions found in Rules", eda.Emotions.Count);
        }
Exemple #4
0
    /// <summary>
    /// Use this for initialization.
    /// </summary>
    void Start()
    {
        Debug.Log("체크1" + furia);
        Debug.Log("체크2" + database);
        emo_txt_read();

        em_t    = GameObject.Find("EM_Text");
        em_s    = GameObject.Find("EM_St");
        btns    = GameObject.Find("Bottom_btns");
        picture = GameObject.Find("Picture_btns");



        /*
         * string date = DateTime.Now.ToString("yyyy_MM_dd_HH");
         * Debug.Log(date);
         * if (date == "2020_02_11_20")
         * {
         *  Time.timeScale = 0;
         * }
         */
        //1) Enumerate webcams
        //
        WebCamDevice[] devices = WebCamTexture.devices;

        //2) for debugging purposes, prints available devices to the console
        //
        for (int i = 0; i < devices.Length; i++)
        {
            print("Webcam available: " + devices[i].name);
        }

        //! http://answers.unity3d.com/questions/909967/getting-a-web-cam-to-play-on-ui-texture-image.html
        //WebCamTexture webcam = new WebCamTexture();
        //rawimage.texture = webcam;
        //rawimage.material.mainTexture = webcam;
        //webcamTexture.Play();

        //! https://answers.unity3d.com/questions/1101792/how-to-post-process-a-webcamtexture-in-realtime.html
        //3) Create a WebCamTexture (size should not be to big)
        webcam = new WebCamTexture(640, 480); // (Screen.width, Screen.height)

        //4) Assign the texture to an image in the UI to see output (these two lines are not necceasary if you do
        //   not want to show the webcam video, but might be handy for debugging purposes)
        rawimage.texture = webcam;
        rawimage.material.mainTexture = webcam;

        //5) Start capturing the webcam.
        //
        webcam.Play();

        //6) ??
        //output = new Texture2D(webcam.width, webcam.height);
        //GetComponent<Renderer>().material.mainTexture = output;

        // 7) Create an array to hold the ARGB data of a webcam video frame texture.
        //
        data = new Color32[webcam.width * webcam.height];

        //8) Create an EmotionDetectionAsset
        //
        //   The asset will load the appropriate dlibwrapper depending on process and OS.
        //   Note that during development unity tends to use the 32 bits version where
        //   during playing it uses either 32 or 64 bits version dependend on the OS.
        //
        eda = new EmotionDetectionAsset();

        //9) Assign a bridge (no interfaces are required but ILog is convenient during development.
        //
        eda.Bridge = new dlib_csharp.Bridge();

        //10) Init the EmotionDetectionAsset.
        //    Note this takes a couple of seconds as it need to read/parse the shape_predictor_68_face_landmarks database
        //
        eda.Initialize(@"Assets", database);

        //11) Read the fuzzy logic rules and parse them.
        //
        String[] lines = File.ReadAllLines(furia);
        eda.ParseRules(lines);

        Debug.Log("Emotion detection Ready for Use");
    }