Exemplo n.º 1
0
    public CvFrame GetFaceDetectionResults()
    {
        CvFrame cvf;
        int     detectedFaceCount = 0;

        unsafe
        {
            fixed(CvCircle *outFaces = _faces)
            {
                cvf = OpenCVInterop.Detect(outFaces, _maxFaceDetectCount, ref detectedFaceCount);
            }
        }

        NormalizedFacePositions.Clear();
        for (int i = 0; i < detectedFaceCount; i++)
        {
            NormalizedFacePositions.Add(new Vector2((_faces[i].X * DetectionDownScale) / CameraResolution.x, 1f - ((_faces[i].Y * DetectionDownScale) / CameraResolution.y)));
        }

        for (int i = 0; i < NormalizedFacePositions.Count; i++)
        {
            float x = NormalizedFacePositions[i].x;
            float y = NormalizedFacePositions[i].y;
            Debug.LogFormat("For face # {0} x = {1} and y = {2}", i, x, y); // output the face positions
        }
        return(cvf);
    }
Exemplo n.º 2
0
    /*
     * void OnApplicationQuit()
     * {
     *  if (_ready)
     *  {
     *      Debug.LogWarningFormat("before closing application", GetType());
     *      OpenCVInterop.Close();
     *      Debug.LogWarningFormat("before closing application", GetType());
     *  }
     * }*/


    void Update()
    {
        if (!_ready)
        {
            return;
        }

        int detectedObjectCount = 1;

        unsafe
        {
            fixed(CoOrdinates_Color *outObj = _object)
            {
                OpenCVInterop.Detect(outObj);
            }
        }
        NormalizedObjPositions.Clear();
        objectColor.Clear();
        for (int i = 0; i < detectedObjectCount; i++)
        {
            float objX = (_object[i].X * DetectionDownScale) / CameraResolution.x;
            float objY = 1f - ((_object[i].Y * DetectionDownScale) / CameraResolution.y);

            NormalizedObjPositions.Add(new Vector2(objX, objY));
            objectColor.Add(new Vector3(_object[i].R, _object[i].G, _object[i].B));
        }
    }
    void FaceDetection()
    {
        int detectedFaceCount = 0;

        unsafe
        {
            fixed(CvCircle *outFaces = _faces)
            {
                OpenCVInterop.Detect(outFaces, _maxFaceDetectCount, ref detectedFaceCount);
            }
        }

        NormalizedFacePositions.Clear();
        for (int i = 0; i < detectedFaceCount; i++)
        {
            NormalizedFacePositions.Add(new Vector2((_faces[i].X * DetectionDownScale) / CameraResolution.x, 1f - ((_faces[i].Y * DetectionDownScale) / CameraResolution.y)));
        }
    }
    void Update()
    {
        if (!_ready)
        {
            return;
        }

        unsafe
        {
            fixed(TransformData *outFaces = &faces)
            {
                OpenCVInterop.Detect(outFaces);
            }
        }

        trans = new Vector3(faces.tX, faces.tY, faces.tZ);
        rot_u = new Vector3(faces.ruX, faces.ruY, faces.ruZ);
        rot_f = new Vector3(faces.rfX, faces.rfY, faces.rfZ);
    }
    private void DetectBodies()
    {
        // Unsafe codeblock to retrieve data from OpenCV
        int detectedBodyCount = 0;

        unsafe
        {
            fixed(CvRectangle *outBodies = _bodies)
            {
                OpenCVInterop.Detect(outBodies, _maxTrackCount, ref detectedBodyCount);
            }
        }

        // Record the Normalized Tracking Positions
        NormalizedBodyPositions.Clear();
        for (int i = 0; i < detectedBodyCount; i++)
        {
            NormalizedBodyPositions.Add(new Vector2((_bodies[i].X * DetectionDownScale) / CameraResolution.x, 1f - ((_bodies[i].Y * DetectionDownScale) / CameraResolution.y)));
        }
        patientBody = _tracking[0];
    }
Exemplo n.º 6
0
    // Update is called once per frame
    void Update()
    {
        if (!_ready)
        {
            return;
        }

        int detectedFaceCount = 0;

        unsafe
        {
            fixed(CvCircle *outFaces = _faces)
            {
                OpenCVInterop.Detect(outFaces, _maxFaceDetectCount, ref detectedFaceCount);
            }
        }
        Debug.Log("detectedFaceCount:" + detectedFaceCount);
        NormalizedFacePositions.Clear();
        for (int i = 0; i < detectedFaceCount; i++)
        {
            NormalizedFacePositions.Add(new Vector2((_faces[i].X * DetectionDownScale) / CameraResolution.x - 0.5f, 1f - ((_faces[i].Y * DetectionDownScale) / CameraResolution.y)));
        }
    }
    void Update()

    {
        if (!_ready)
        {
            return;
        }

        int detectedFaceCount = 0;

        unsafe
        {
            fixed(CvCircle *outFaces = _faces)
            {
                OpenCVInterop.Detect(outFaces, _maxFaceDetectCount, ref detectedFaceCount);
            }
        }

        NormalizedFacePositions.Clear();
        for (int i = 0; i < detectedFaceCount; i++)
        {
            NormalizedFacePositions.Add(new Vector3((_faces[i].X * DetectionDownScale) / CameraResolution.x, 1f - ((_faces[i].Y * DetectionDownScale) / CameraResolution.y), 1f - (_faces[i].Radius * DetectionDownScale) / DepthDevider + DepthAdder));
        }
    }
Exemplo n.º 8
0
    // Update is called once per frame
    void Update()
    {
        //OpenCVInterop.TestShow();

        //imgData = OpenCVInterop.Detect("yolo-voc.cfg", "yolo-voc.weights", "data/voc.names", "camera", "", 0.200000003F, false);
        ////Texture2D tex = new Texture2D(640, 480, TextureFormat.RGB24, false);
        //tex.LoadRawTextureData(imgData);
        //GetComponent<MeshRenderer>().material.mainTexture = tex;
        //GetComponent<MeshRenderer>().material.SetTextureScale("_MainTex", new Vector2(-1, 1)); // flip texture horizontally, due to memcpy little endian
        //tex.Apply();

        //Debug.LogError(imgData.Length);

        //IntPtr ptr = OpenCVInterop.Detect("yolo-voc.cfg", "yolo-voc.weights", "data/voc.names", "camera", "", 0.200000003F, false);
        //Marshal.Copy(ptr, imgData, 0, 640 * 480 * 3);

        OpenCVInterop.Detect(imgData, "camera", 0.200000003F, false);
        //for (var i = 0; i < imgData.Length; i += 3)
        //{
        //    var color = new Color(imgData[i + 0], imgData[i + 1], imgData[i + 2], 0);
        //    colorData[i / 3] = color;
        //}
        //tex.SetPixels(colorData);
        tex.LoadRawTextureData(imgData);
        tex.Apply();

        //unsafe
        //{
        //    byte* ptr = (byte*)OpenCVInterop.Detect("yolo-voc.cfg", "yolo-voc.weights", "data/voc.names", "camera", "", 0.200000003F, false);

        //    for (var i = 0; i < 640*480*3; i += 3)
        //    {
        //        var color = new Color((float)ptr[i + 0], (float)ptr[i + 1], (float)ptr[i + 2], 255f);
        //        colorData[i / 3] = color;
        //    }
        //    tex.SetPixels(colorData);

        //    //int offset = 0;
        //    //for (int i = 0; i < 640; i++)
        //    //{
        //    //    for (int j = 0; j < 480; j++)
        //    //    {

        //    //        float b = (float)ptr[offset + 0] / 255.0f;
        //    //        float g = (float)ptr[offset + 1] / 255.0f;
        //    //        float r = (float)ptr[offset + 2] / 255.0f;
        //    //        float a = 255.0f;
        //    //        offset += 4;

        //    //        UnityEngine.Color color = new UnityEngine.Color(r, g, b, a);
        //    //        tex.SetPixel(j, 480 - i, color);
        //    //    }
        //    //}
        //}

        //StartCoroutine(UpdateTexture(imgData));
        //Debug.Log(imgData.Length);
        //for (var i = 0; i < imgData.Length; i += 3)
        //{
        //    var color = new Color(imgData[i + 0], imgData[i + 1], imgData[i + 2], 255);
        //    colorData[i / 3] = color;
        //}
        //tex.SetPixels(colorData);
    }
Exemplo n.º 9
0
    // Update is called once per frame
    void Update()
    {
        if (Input.GetMouseButtonDown(0))
        {
            SaveImage();

            //Debug.Log("Show Origin image.");
            //OpenCVInterop.Show();

            if (!_ready)
            {
                return;
            }

            int detectedFaceCount = 0;
            unsafe
            {
                fixed(CvCircle *outFaces = _faces)
                {
                    Debug.Log("Detect Start.");
                    //outFaces
                    OpenCVInterop.Detect(outFaces, _maxFaceDetectCount, ref detectedFaceCount);

                    //
                    //Tensorflow code
                    //
                    string PATH      = "cropimg.png";                                    //이미지 위치를 저장하는 변수
                    var    testImage = Resources.Load(PATH, typeof(Texture2D)) as Image; //이미지 로드

                    var file = "./Assets/cropimg.png";

                    //Tensor 불러오는 소스
                    TFSession.Runner runner;

                    TextAsset graphModel = Resources.Load("tf_model_191203_05") as TextAsset;
                    var       graph      = new TFGraph();

                    //graph.Import(new TFBuffer(graphModel.bytes));
                    graph.Import(graphModel.bytes);
                    TFSession session = new TFSession(graph);

                    Debug.Log("loaded freezed graph");

                    // Input , output 설정
                    //int inputSize = 48;
                    //Texture2D img_input = testImage;
                    //TFTensor input_tensor = TransformInput(img_input.GetPixels32(), inputSize, inputSize);
                    //SetScreen(testImage.width, testImage.height, rawimage, testImage);

                    var tensor = CreateTensorFromImageData(file);

                    runner = session.GetRunner();
                    runner.AddInput(graph["input_1"][0], tensor);
                    runner.Fetch(graph["predictions/Softmax"][0]);

                    Debug.Log("fetch finish");

                    // 실행
                    float[,] results = runner.Run()[0].GetValue() as float[, ];

                    Debug.Log("run");

                    float output = 0.0f;

                    string[] labels = { "Angry", "Disgust", "Fear", "Happy", "Sad", "Surprise", "Neutral" };


                    for (int i = 0; i < 7; i++)
                    {
                        output = results[0, i];
                        Debug.Log(labels[i] + ":" + output);
                        percent[i] = output * 100;

                        if (output >= result_rate)
                        {
                            result_rate  = output;
                            result_label = i;
                        }
                    }
                }
            }

            webcamTexture.Stop();
            byte[] byteArray = File.ReadAllBytes(@"C:\Users\dqf96\Desktop\NewUnityProject - 복사본\Assets\cropimg.png");
            //create a texture and load byte array to it
            // Texture size does not matter
            Texture2D sampleTexture = new Texture2D(2, 2);
            // the size of the texture will be replaced by image size
            bool isLoaded = sampleTexture.LoadImage(byteArray);
            // apply this texure as per requirement on image or material
            GameObject image = GameObject.Find("RawImage");
            if (isLoaded)
            {
                image.GetComponent <RawImage>().texture = sampleTexture;
            }

            // 결과 화면에 표시하기
            slider1         = GameObject.Find("Canvas1").transform.Find("Slider1").GetComponent <Slider>();
            slider1.value   = percent[0];
            textAsset1      = GameObject.Find("Canvas1").transform.Find("result1").GetComponent <Text>();
            textAsset1.text = percent[0] + "%";

            slider2         = GameObject.Find("Canvas1").transform.Find("Slider2").GetComponent <Slider>();
            slider2.value   = percent[1];
            textAsset2      = GameObject.Find("Canvas1").transform.Find("result2").GetComponent <Text>();
            textAsset2.text = percent[1] + "%";

            slider3         = GameObject.Find("Canvas1").transform.Find("Slider3").GetComponent <Slider>();
            slider3.value   = percent[2];
            textAsset3      = GameObject.Find("Canvas1").transform.Find("result3").GetComponent <Text>();
            textAsset3.text = percent[2] + "%";

            slider4         = GameObject.Find("Canvas1").transform.Find("Slider4").GetComponent <Slider>();
            slider4.value   = percent[3];
            textAsset4      = GameObject.Find("Canvas1").transform.Find("result4").GetComponent <Text>();
            textAsset4.text = percent[3] + "%";

            slider5         = GameObject.Find("Canvas1").transform.Find("Slider5").GetComponent <Slider>();
            slider5.value   = percent[4];
            textAsset5      = GameObject.Find("Canvas1").transform.Find("result5").GetComponent <Text>();
            textAsset5.text = percent[4] + "%";

            slider6         = GameObject.Find("Canvas1").transform.Find("Slider6").GetComponent <Slider>();
            slider6.value   = percent[5];
            textAsset6      = GameObject.Find("Canvas1").transform.Find("result6").GetComponent <Text>();
            textAsset6.text = percent[5] + "%";

            slider7         = GameObject.Find("Canvas1").transform.Find("Slider7").GetComponent <Slider>();
            slider7.value   = percent[6];
            textAsset7      = GameObject.Find("Canvas1").transform.Find("result7").GetComponent <Text>();
            textAsset7.text = percent[6] + "%";

            textAsset      = GameObject.Find("Canvas1").transform.Find("result").GetComponent <Text>();
            textAsset.text = labels[result_label] + ":" + percent[result_label] + "%";
        }
    }
Exemplo n.º 10
0
    void Update()
    {
        if (activate)
        {
            if (!_ready)
            {
                return;
            }

            int detectedFaceCount = 0;
            unsafe
            {
                fixed(CvCircle *outFaces = _faces)
                {
                    OpenCVInterop.Detect(outFaces, _maxFaceDetectCount, ref detectedFaceCount);
                }
            }
            NormalizedFacePositions.Clear();
            int max = 0;
            int num = -1;

            if (detectedFaceCount == 0 || oldPosition == null)
            {
                reset = true;
            }
            else
            {
                reset = false;
            }

            for (int i = 0; i < detectedFaceCount; i++)
            {
                NormalizedFacePositions.Add(new Vector2((_faces[i].X * DetectionDownScale) / CameraResolution.x, 1f - ((_faces[i].Y * DetectionDownScale) / CameraResolution.y)));
                if (max < _faces[i].Radius)
                {
                    if (!reset)
                    {
                        //un premier filtre d'input
                        float distance = _faces[i].Radius;
                        if (distance < 100.0f && distance >= 0.5f)
                        {
                            max         = _faces[i].Radius;
                            positions   = NormalizedFacePositions[NormalizedFacePositions.Count - 1];
                            positions.z = max;
                            num         = i;
                        }
                    }
                    else
                    {
                        max         = _faces[i].Radius;
                        positions   = NormalizedFacePositions[NormalizedFacePositions.Count - 1];
                        positions.z = max;
                        num         = i;
                    }
                }
            }
            taille = max;
            if (num < 0)
            {
                detectedFaceCount = 0;
            }
            else
            {
                oldPosition = new Vector2(positions.x, positions.y);
            }
        }
    }