void ThreadedWork()
    {
        _threadRunning = true;
        bool workDone = false;

        CvCircle[] _faces;


        int result = OpenCVInterop.Init();

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }

            return;
        }


        _faces = new CvCircle[2];
        //NormalizedFacePositions = new List<Vector3>();
        _ready = true;

        // This pattern lets us interrupt the work at a safe point if neeeded.
        while (_threadRunning && !workDone)
        {
            if (!_ready)
            {
                return;
            }

            unsafe
            {
                fixed(CvCircle *outFaces = _faces)
                {
                    OpenCVInterop.RunServer(outFaces);
                }
            }

            NormalizedFacePosition = new Vector3(((float)(640 - _faces[0].X) * DetectionDownScale) / 640f, 1f - (((float)_faces[0].Y * DetectionDownScale) / 480f), _faces[0].Radius);
        }
        _threadRunning = false;
    }