void Start()
    {
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }

            return;
        }

        CameraResolution        = new Vector2(camWidth, camHeight);
        _faces                  = new CvCircle[_maxFaceDetectCount];
        NormalizedFacePositions = new List <Vector3>();
        OpenCVInterop.SetScale(DetectionDownScale);
        _ready = true;
    }
Exemple #2
0
    // Start is called before the first frame update
    void Start()
    {
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }
            else if (result == -3)
            {
                Debug.LogWarningFormat("[{0}] First Frame Empty.", GetType());
            }
            else if (result == -4)
            {
                Debug.LogWarningFormat("[{0}] No ROI.", GetType());
            }
            return;
        }

        CameraResolution            = new Vector2(camWidth, camHeight);
        _tracking                   = new CvRectangle[_maxTrackingCount];
        NormalizedTrackingPositions = new List <Vector2>();
        OpenCVInterop.SetScale(DetectionDownScale);
        _ready = true;
    }
Exemple #3
0
    private bool Initialize()
    {
        cameraState = CameraState.connecting;
        //Debug.Log("Initialize OpenCV Face Detection");
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }
            lastError   = -4;
            cameraState = CameraState.disconnected;
            return(false);
        }
        CameraResolution        = new Vector2(camWidth, camHeight);
        _faces                  = new CvCircle[_maxFaceDetectCount];
        NormalizedFacePositions = new List <Vector2>();
        OpenCVInterop.SetScale(DetectionDownScale);
        lastError   = 0;
        cameraState = CameraState.connected;
        return(true);
    }
    // Start is called before the first frame update
    void Start()
    {
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        if (result < 0)
        {
            Debug.LogWarningFormat("Failed to initialize DLL");
            return;
        }
        CameraResolution        = new Vector2(camWidth, camHeight);
        _faces                  = new CvCircle[_maxFaceDetectCount];
        NormalizedFacePositions = new List <Vector2>();
        OpenCVInterop.SetScale(DetectionDownScale);
        _ready = true;
    }
    private void initOpenCV()
    {
        int camWidth = 0, camHeight = 0;
        int tracker = (int)trackerTypes;
        int cascade = (int)cascadeTypes;


        int result = OpenCVInterop.Init(ref camWidth, ref camHeight, ref tracker, ref cascade);

        // Run the OpenV Init script, and log the result

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }
            else if (result == -3)
            {
                Debug.LogWarningFormat("[{0}] No Bodies Detected.", GetType());
            }
            else if (result == -4)
            {
                Debug.LogWarningFormat("[{0}] Tracking Error.", GetType());
            }
            return;
        }

        // Prepare all variables and arrays for OpenCV
        CameraResolution            = new Vector2(camWidth, camHeight);
        _bodies                     = new CvRectangle[_maxTrackCount];
        _tracking                   = new CvRectangle[_maxTrackCount];
        patientBody                 = new CvRectangle();
        NormalizedBodyPositions     = new List <Vector2>();
        NormalizedTrackingPositions = new List <Vector2>();
        OpenCVInterop.SetScale(DetectionDownScale);
        DetectBodies();
        frameRate = 0;
        _ready    = true;
    }
    void Start()
    {
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        Debug.Log(result);
        if (result == 1)
        {
            _ready = true;
        }
        else
        {
            _ready = false;
        }


        CameraResolution        = new Vector2(camWidth, camHeight);
        _faces                  = new CvCircle[_maxFaceDetectCount];
        NormalizedFacePositions = new List <Vector2>();
        OpenCVInterop.SetScale(DetectionDownScale);
        _ready = true;
    }
Exemple #7
0
    //public Renderer rnd;
    //public Color32 myColor;
    void Start()
    {
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }
            //      rnd = GetComponent<Renderer>();
            _camDistance = Vector3.Distance(Camera.main.transform.position, transform.position);
            return;
        }

        CameraResolution       = new Vector2(camWidth, camHeight);
        _object                = new CoOrdinates_Color[1];
        NormalizedObjPositions = new List <Vector2>();
        objectColor            = new List <Vector3>();
        OpenCVInterop.SetScale(DetectionDownScale);
        _ready = true;
    }
Exemple #8
0
    // Start is called before the first frame update
    void Start()
    {
        //Save get the camera devices, in case you have more than 1 camera.
        WebCamDevice[] camDevices = WebCamTexture.devices;

        //Get the used camera name for the WebCamTexture initialization.
        string camName = camDevices[0].name;

        webcamTexture = new WebCamTexture(camName);

        //Render the image in the screen.
        rawimage.texture = webcamTexture;
        rawimage.material.mainTexture = webcamTexture;
        webcamTexture.Play();

        int result = OpenCVInterop.Init();

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -3)
            {
                Debug.LogWarningFormat("[{0}] Failed to open image.", GetType());
            }

            return;
        }

        //CameraResolution = new Vector2(camWidth, camHeight);
        _faces = new CvCircle[_maxFaceDetectCount];
        NormalizedFacePositions = new List <Vector2>();
        OpenCVInterop.SetScale(DetectionDownScale);
        _ready = true;
    }