Ejemplo n.º 1
0
    // Start is called before the first frame update
    void Start()
    {
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }
            else if (result == -3)
            {
                Debug.LogWarningFormat("[{0}] First Frame Empty.", GetType());
            }
            else if (result == -4)
            {
                Debug.LogWarningFormat("[{0}] No ROI.", GetType());
            }
            return;
        }

        CameraResolution            = new Vector2(camWidth, camHeight);
        _tracking                   = new CvRectangle[_maxTrackingCount];
        NormalizedTrackingPositions = new List <Vector2>();
        OpenCVInterop.SetScale(DetectionDownScale);
        _ready = true;
    }
    void Start()
    {
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }

            return;
        }

        CameraResolution        = new Vector2(camWidth, camHeight);
        _faces                  = new CvCircle[_maxFaceDetectCount];
        NormalizedFacePositions = new List <Vector3>();
        OpenCVInterop.SetScale(DetectionDownScale);
        _ready = true;
    }
Ejemplo n.º 3
0
    private bool Initialize()
    {
        cameraState = CameraState.connecting;
        //Debug.Log("Initialize OpenCV Face Detection");
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }
            lastError   = -4;
            cameraState = CameraState.disconnected;
            return(false);
        }
        CameraResolution        = new Vector2(camWidth, camHeight);
        _faces                  = new CvCircle[_maxFaceDetectCount];
        NormalizedFacePositions = new List <Vector2>();
        OpenCVInterop.SetScale(DetectionDownScale);
        lastError   = 0;
        cameraState = CameraState.connected;
        return(true);
    }
    void ThreadedWork()
    {
        _threadRunning = true;
        bool workDone = false;

        CvCircle[] _faces;


        int result = OpenCVInterop.Init();

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }

            return;
        }


        _faces = new CvCircle[2];
        //NormalizedFacePositions = new List<Vector3>();
        _ready = true;

        // This pattern lets us interrupt the work at a safe point if neeeded.
        while (_threadRunning && !workDone)
        {
            if (!_ready)
            {
                return;
            }

            unsafe
            {
                fixed(CvCircle *outFaces = _faces)
                {
                    OpenCVInterop.RunServer(outFaces);
                }
            }

            NormalizedFacePosition = new Vector3(((float)(640 - _faces[0].X) * DetectionDownScale) / 640f, 1f - (((float)_faces[0].Y * DetectionDownScale) / 480f), _faces[0].Radius);
        }
        _threadRunning = false;
    }
    // Start is called before the first frame update
    void Start()
    {
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        if (result < 0)
        {
            Debug.LogWarningFormat("Failed to initialize DLL");
            return;
        }
        CameraResolution        = new Vector2(camWidth, camHeight);
        _faces                  = new CvCircle[_maxFaceDetectCount];
        NormalizedFacePositions = new List <Vector2>();
        OpenCVInterop.SetScale(DetectionDownScale);
        _ready = true;
    }
    void Start()
    {
        // Initiate Open CV Wrapper
        int cam_width = 1920, cam_height = 1080;
        int result = OpenCVInterop.Init(ref cam_width, ref cam_height, detectRatio, camId);

        // Setup camera
        float  vfov = 2.0f * Mathf.Atan(0.5f * cam_height / cam_width) * Mathf.Rad2Deg;
        Camera cam  = Camera.main;

        cam.fieldOfView = vfov;
        cam.aspect      = (float)cam_width / (float)cam_height;

        trans  = new Vector3();
        rot_u  = new Vector3();
        rot_f  = new Vector3();
        _ready = true;
    }
    private void initOpenCV()
    {
        int camWidth = 0, camHeight = 0;
        int tracker = (int)trackerTypes;
        int cascade = (int)cascadeTypes;


        int result = OpenCVInterop.Init(ref camWidth, ref camHeight, ref tracker, ref cascade);

        // Run the OpenV Init script, and log the result

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }
            else if (result == -3)
            {
                Debug.LogWarningFormat("[{0}] No Bodies Detected.", GetType());
            }
            else if (result == -4)
            {
                Debug.LogWarningFormat("[{0}] Tracking Error.", GetType());
            }
            return;
        }

        // Prepare all variables and arrays for OpenCV
        CameraResolution            = new Vector2(camWidth, camHeight);
        _bodies                     = new CvRectangle[_maxTrackCount];
        _tracking                   = new CvRectangle[_maxTrackCount];
        patientBody                 = new CvRectangle();
        NormalizedBodyPositions     = new List <Vector2>();
        NormalizedTrackingPositions = new List <Vector2>();
        OpenCVInterop.SetScale(DetectionDownScale);
        DetectBodies();
        frameRate = 0;
        _ready    = true;
    }
Ejemplo n.º 8
0
    void Start()
    {
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        Debug.Log(result);
        if (result == 1)
        {
            _ready = true;
        }
        else
        {
            _ready = false;
        }


        CameraResolution        = new Vector2(camWidth, camHeight);
        _faces                  = new CvCircle[_maxFaceDetectCount];
        NormalizedFacePositions = new List <Vector2>();
        OpenCVInterop.SetScale(DetectionDownScale);
        _ready = true;
    }
Ejemplo n.º 9
0
    //public Renderer rnd;
    //public Color32 myColor;
    void Start()
    {
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }
            //      rnd = GetComponent<Renderer>();
            _camDistance = Vector3.Distance(Camera.main.transform.position, transform.position);
            return;
        }

        CameraResolution       = new Vector2(camWidth, camHeight);
        _object                = new CoOrdinates_Color[1];
        NormalizedObjPositions = new List <Vector2>();
        objectColor            = new List <Vector3>();
        OpenCVInterop.SetScale(DetectionDownScale);
        _ready = true;
    }
Ejemplo n.º 10
0
    // Start is called before the first frame update
    void Start()
    {
        //Save get the camera devices, in case you have more than 1 camera.
        WebCamDevice[] camDevices = WebCamTexture.devices;

        //Get the used camera name for the WebCamTexture initialization.
        string camName = camDevices[0].name;

        webcamTexture = new WebCamTexture(camName);

        //Render the image in the screen.
        rawimage.texture = webcamTexture;
        rawimage.material.mainTexture = webcamTexture;
        webcamTexture.Play();

        int result = OpenCVInterop.Init();

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -3)
            {
                Debug.LogWarningFormat("[{0}] Failed to open image.", GetType());
            }

            return;
        }

        //CameraResolution = new Vector2(camWidth, camHeight);
        _faces = new CvCircle[_maxFaceDetectCount];
        NormalizedFacePositions = new List <Vector2>();
        OpenCVInterop.SetScale(DetectionDownScale);
        _ready = true;
    }
Ejemplo n.º 11
0
    // Start is called before the first frame update
    void Start()
    {
        Debug.Log("Screen Size: " + Screen.width + "x" + Screen.height);

        OpenCVInterop.Init();
    }