InvokeOnAppThread() public static method

Executes callback item on application thread.

public static InvokeOnAppThread ( AppCallbackItem item, bool waitUntilDone ) : void
item AppCallbackItem Item to execute.
waitUntilDone bool Wait until item is executed.
return void
コード例 #1
0
    private void trackFilteredObject(ObjectTracker ot, Mat threshold)
    {
        Mat temp = new Mat();

        threshold.copyTo(temp);

        List <MatOfPoint> contours = new List <MatOfPoint>();
        Mat hierarchy = new Mat();

        Imgproc.findContours(temp, contours, hierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE);

        if (hierarchy.rows() > 0)
        {
            for (int index = 0; index >= 0; index = (int)hierarchy.get(0, index)[0])
            {
                Moments moment = Imgproc.moments(contours[index]);
                double  area   = moment.m00;

                if (area > 10 * 10)
                {
                    int x = (int)(moment.get_m10() / area);
                    int y = (int)(moment.get_m01() / area);

                    Vector2 point  = new Vector2(x, y);
                    Vector3 dirRay = LocatableCameraUtils.PixelCoordToWorldCoord(_cameraToWorldMatrix, _projectionMatrix, _resolution, point);

                    Application.InvokeOnAppThread(() => {
                        ot.Sphere.transform.position = Camera.main.transform.position + new Vector3(0, ot.offset, 0);
                        SphereCollider collider      = ot.Sphere.GetComponent <SphereCollider>();

                        // We inverse the ray source and dir to make the sphere collider work
                        Vector3 newPosRay = Camera.main.transform.position + dirRay * (collider.radius * 2);

                        Ray ray = new Ray(newPosRay, -dirRay);
                        RaycastHit hit;

                        if (Physics.Raycast(ray, out hit, collider.radius * 3))
                        {
                            Vector3 pos = hit.point;
                            ot.gameObject.transform.position = pos;
                        }
                    }, false);
                }
            }
        }
    }
コード例 #2
0
    private void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        // upload image bytes
        if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
        {
            _latestImageBytes = new byte[sample.dataLength];
        }
        sample.CopyRawImageDataIntoBuffer(_latestImageBytes);

        // transform matrix
        float[] cameraToWorldMatrixAsFloat;
        float[] projectionMatrixAsFloat;
        if (sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrixAsFloat) == false || sample.TryGetProjectionMatrix(out projectionMatrixAsFloat) == false)
        {
            Debug.Log("Failed to get camera to world or projection matrix");
            return;
        }

        _cameraToWorldMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat);
        _projectionMatrix    = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(projectionMatrixAsFloat);

        //focal_x = _projectionMatrix[0, 0] * _resolution.width / 2;
        //focal_y = _projectionMatrix[1, 1] * _resolution.height / 2;
        //cx = _resolution.width / 2 + _resolution.width / 2 * _projectionMatrix[0, 2];
        //cy = _resolution.height / 2 + _resolution.height / 2 * _projectionMatrix[1, 2];
        // Debug.Log("focal_x: " + focal_x.ToString("f5") + " focal_y: " + focal_y.ToString("f5") + " cx: " + cx.ToString("f5") + " cy: " + cy.ToString("f5"));

        sample.Dispose();

        // Opencv mat conversion: to RGB mat
        Mat frameBGRA = new Mat(_resolution.height, _resolution.width, CvType.CV_8UC4);

        //Array.Reverse(_latestImageBytes);
        frameBGRA.put(0, 0, _latestImageBytes);
        //Core.flip(frameBGRA, frameBGRA, 0);

        Mat frameBGR = new Mat(_resolution.height, _resolution.width, CvType.CV_8UC3);

        Imgproc.cvtColor(frameBGRA, frameBGR, Imgproc.COLOR_BGRA2BGR);
        Mat RGB = new Mat();

        Imgproc.cvtColor(frameBGR, RGB, Imgproc.COLOR_BGR2RGB);
        // Target detection: marker location in webcam
        Vector4 location = markerDetection.DetectMarkers(RGB);

        // Application thread. Because some operations are only allowed in main thread, not event thread
        Application.InvokeOnAppThread(() =>
        {
            if (MarkerDetection.success == true)// if detected
            {
                Debug.Log("catch!");
                // target: marker -> webcam -> world.
                Vector3 target = _cameraToWorldMatrix * location;
                Debug.Log("target: " + target.ToString("f5"));
                Target.transform.position = target;

                // render with the right matrix
                Matrix4x4 V = Camera.main.GetStereoViewMatrix(Camera.StereoscopicEye.Right);
                Matrix4x4 P = GL.GetGPUProjectionMatrix(Camera.main.GetStereoProjectionMatrix(Camera.StereoscopicEye.Right), false); // openGL to DX
                Matrix4x4 M = Target.transform.localToWorldMatrix;
                Target.GetComponent <Renderer>().material.SetMatrix("MATRIX_MVP", P * V * M);                                        // render with custom pipeline
                Target.GetComponent <Renderer>().enabled = true;                                                                     // show now
                Vector4 vertex = P * V * M * new Vector4(0f, 0f, 0f, 1f);                                                            // vertex location on the rendering plane

                //Vector3 cam = V.GetColumn(3);

                //// ----------------------------------------------------
                //// collide a cam-target ray with display collider
                //Vector3 camToTarget = target - cam;
                //Ray ray = new Ray(cam, camToTarget);
                //RaycastHit hit;

                //if (Physics.Raycast(ray, out hit, 2f))
                //{
                //    Vector3 pos = hit.point; // physical hitted point
                //    Target.transform.position = pos;
                //    Debug.Log("hit pos: " + pos.ToString("f5"));

                //    Matrix4x4 M = Target.transform.localToWorldMatrix;
                //    Target.GetComponent<Renderer>().material.SetMatrix("MATRIX_MVP", P * V * M); // render with custom pipeline
                //    Target.GetComponent<Renderer>().enabled = true; // show now
                //    // Vector4 vertex = P * V * M * new Vector4(0f, 0f, 0f, 1f); // vertex location on the rendering plane
                //}

                // ---------------------------------------------------
                // get eye position in eyecam
                string[] eyedata  = UDPCommunication.message.Split(',');
                Vector4 eye_pos_e = new Vector4(float.Parse(eyedata[0]) / 1000, float.Parse(eyedata[1]) / 1000, float.Parse(eyedata[2]) / 1000, 1.0f); // in [m]
                Debug.Log("eye in eyecam: " + eye_pos_e.ToString("f5"));

                // eye: eyecam -> webcam -> world.
                Vector3 eye_pos_w = _cameraToWorldMatrix * _eyecamTowebcam * eye_pos_e;
                Debug.Log("eye in world: " + eye_pos_w.ToString("f5"));

                // ----------------------------------------------------
                // collide a eye-target ray with display collider
                Vector3 eyeToTarget = target - eye_pos_w;
                Ray ray_revised     = new Ray(eye_pos_w, eyeToTarget);
                RaycastHit hit_revised;

                if (Physics.Raycast(ray_revised, out hit_revised, 2f))
                {
                    Vector4 pos_revised = hit_revised.point; // physical hitted point
                    //Revised.transform.position = pos_revised;
                    Debug.Log("hit_revised pos: " + pos_revised.ToString("f5"));
                    pos_revised.w = 1.0f;
                    // calculate hitted vertex, scale w to the same depth
                    Vector4 vertex_hit        = P * V * pos_revised;
                    float scale               = vertex.w / vertex_hit.w;
                    Vector4 vertex_hit_scaled = new Vector4(vertex_hit.x * scale, vertex_hit.y * scale, vertex_hit.z, vertex_hit.w * scale);

                    // retrieve the world location
                    Vector3 pos_scaled         = V.inverse * P.inverse * vertex_hit_scaled;
                    Revised.transform.position = pos_scaled;
                    // position the revised target and render it
                    Matrix4x4 M_revised = Revised.transform.localToWorldMatrix;
                    Revised.GetComponent <Renderer>().material.SetMatrix("MATRIX_MVP", P * V * M_revised);
                    Revised.GetComponent <Renderer>().enabled = true;

                    Debug.Log("webcameraToWorldMatrix:\n" + _cameraToWorldMatrix.ToString("f5"));
                    Debug.Log("WorldToRightMatrix:\n" + V.ToString("f5"));
                    Debug.Log("RightGLProjectionMatrix:\n" + P.ToString("f5"));

                    Debug.Log("detected target location: " + Target.transform.position.ToString("f5"));
                    Debug.Log("revised target location: " + Revised.transform.position.ToString("f5"));

                    Debug.Log("rendering vertex: " + vertex.ToString("f5"));
                    Debug.Log("hit vertex: " + vertex_hit.ToString("f5"));
                    Debug.Log("revised rendering vertex: " + vertex_hit_scaled.ToString("f5"));
                }
            }
            else
            {
                Revised.GetComponent <Renderer>().enabled = false;
                Target.GetComponent <Renderer>().enabled  = false; // hide
            }
        }, false);
    }