Example #1
0
        void OnFrameSampleAcquired(VideoCaptureSample sample)
        {
            //When copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized.
            //You can reuse this byte[] until you need to resize it (for whatever reason).
            if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
            {
                _latestImageBytes = new byte[sample.dataLength];
            }
            sample.CopyRawImageDataIntoBuffer(_latestImageBytes);

            //If you need to get the cameraToWorld matrix for purposes of compositing you can do it like this
            float[] cameraToWorldMatrix;
            if (sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrix) == false)
            {
                return;
            }

            //If you need to get the projection matrix for purposes of compositing you can do it like this
            float[] projectionMatrix;
            if (sample.TryGetProjectionMatrix(out projectionMatrix) == false)
            {
                return;
            }

            //Start sending the image data over the network
            networkController.StartExchange(_latestImageBytes);

            sample.Dispose();
        }
        protected virtual void OnFrameSampleAcquired(VideoCaptureSample sample)
        {
            lock (latestImageBytesLockObject){
                //When copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized.
                //You can reuse this byte[] until you need to resize it (for whatever reason).
                if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
                {
                    _latestImageBytes = new byte[sample.dataLength];
                }
                sample.CopyRawImageDataIntoBuffer(_latestImageBytes);
            }

            float[] cameraToWorldMatrixAsFloat;
            if (sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrixAsFloat) == false)
            {
                sample.Dispose();
                return;
            }

            float[] projectionMatrixAsFloat;
            if (sample.TryGetProjectionMatrix(out projectionMatrixAsFloat) == false)
            {
                sample.Dispose();
                return;
            }

            CameraIntrinsics camIntrinsics = sample.GetCameraIntrinsics();

            // Right now we pass things across the pipe as a float array then convert them back into UnityEngine.Matrix using a utility method
            projectionMatrix    = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(projectionMatrixAsFloat);
            cameraToWorldMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat);

            sample.Dispose();

            didUpdateThisFrame = true;
            didUpdateImageBufferInCurrentFrame = true;

            if (hasInitEventCompleted && frameMatAcquired != null)
            {
                Mat mat = new Mat(cameraParams.cameraResolutionHeight, cameraParams.cameraResolutionWidth, CvType.CV_8UC4);
                Utils.copyToMat <byte> (latestImageBytes, mat);

                if (_rotate90Degree)
                {
                    Mat rotatedFrameMat = new Mat(cameraParams.cameraResolutionWidth, cameraParams.cameraResolutionHeight, CvType.CV_8UC4);
                    Core.rotate(mat, rotatedFrameMat, Core.ROTATE_90_CLOCKWISE);
                    mat.Dispose();

                    FlipMat(rotatedFrameMat, _flipVertical, _flipHorizontal);

                    frameMatAcquired.Invoke(rotatedFrameMat, projectionMatrix, cameraToWorldMatrix, camIntrinsics);
                }
                else
                {
                    FlipMat(mat, _flipVertical, _flipHorizontal);

                    frameMatAcquired.Invoke(mat, projectionMatrix, cameraToWorldMatrix, camIntrinsics);
                }
            }
        }
Example #3
0
    void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        //When copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized.
        //You can reuse this byte[] until you need to resize it (for whatever reason).
        if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
        {
            _latestImageBytes = new byte[sample.dataLength];
        }
        sample.CopyRawImageDataIntoBuffer(_latestImageBytes);

        //If you need to get the cameraToWorld matrix for purposes of compositing you can do it like this
        float[] cameraToWorldMatrix;
        if (sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrix) == false)
        {
            return;
        }

        //If you need to get the projection matrix for purposes of compositing you can do it like this
        float[] projectionMatrix;
        if (sample.TryGetProjectionMatrix(out projectionMatrix) == false)
        {
            return;
        }

        sample.Dispose();

        //This is where we actually use the image data
        UnityEngine.WSA.Application.InvokeOnAppThread(() =>
        {
            _videoPanelUI.SetBytes(_latestImageBytes);
            //text.text = _resolution.width + "x"+ _resolution.height+"+"+ _latestImageBytes.Length;
            RGBUpdated = true;
        }, false);
    }
    async void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        if (frameProccessed == false)
        {
            cnt_out += 1;
            return;
        }
        cnt_in += 1;
        Debug.Log("cnt : in = " + cnt_in.ToString() + ", out = " + cnt_out);
        frameProccessed = false;
        Debug.Log("Frame sample acquired");
        bool mappable = true;

        float[] cameraToWorldMatrixAsFloat;
        float[] projectionMatrixAsFloat;
        mappable &= sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrixAsFloat);
        mappable &= sample.TryGetProjectionMatrix(out projectionMatrixAsFloat);

        //when copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized.
        //you can reuse this byte[] until you need to resize it(for whatever reason).
        byte[] latestImageBytes = null;

        System.Diagnostics.Stopwatch st = new System.Diagnostics.Stopwatch();
        st.Start();
        using (var ms = new InMemoryRandomAccessStream())
        {
            BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, ms);

            encoder.SetSoftwareBitmap(sample.Bitmap);
            try
            {
                await encoder.FlushAsync();
            }
            catch (Exception err)
            {
                Debug.LogError(err.Message);
                return;
            }
            latestImageBytes = new byte[ms.Size];
            await ms.ReadAsync(latestImageBytes.AsBuffer(), (uint)ms.Size, InputStreamOptions.None);
        }
        st.Stop();
        Debug.Log("encoding time " + st.ElapsedMilliseconds.ToString());

        // Right now we pass things across the pipe as a float array then convert them back into UnityEngine.Matrix using a utility method
        if (mappable)
        {
            st.Restart();
            cameraToWorld = CameraStreamHelper.ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat);
            projection    = CameraStreamHelper.ConvertFloatArrayToMatrix4x4(projectionMatrixAsFloat);
            await SocketManager.Instance.SendPhoto(latestImageBytes);

            st.Stop();
            Debug.Log("network time " + st.ElapsedMilliseconds.ToString());
            BoundingBox[] boxes = await SocketManager.Instance.RecvDetections();

            SceneUnderstanding.Instance.RecvDetections(cameraToWorld, projection, boxes, mappable);
        }
        frameProccessed = true;
    }
    void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        //When copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized.
        //You can reuse this byte[] until you need to resize it (for whatever reason).
        if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
        {
            _latestImageBytes = new byte[sample.dataLength];
        }

        // edit the color of each frame
        sample.CopyRawImageDataIntoBuffer(_latestImageBytes);

        //If you need to get the cameraToWorld matrix for purposes of compositing you can do it like this
        float[] cameraToWorldMatrix;
        if (sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrix) == false)
        {
            return;
        }

        //If you need to get the projection matrix for purposes of compositing you can do it like this
        float[] projectionMatrix;
        if (sample.TryGetProjectionMatrix(out projectionMatrix) == false)
        {
            return;
        }

        sample.Dispose();

        //This is where we actually use the image data
        UnityEngine.WSA.Application.InvokeOnAppThread(() =>
        {
            int f         = 10000;
            int PixelSize = 4;
            int width     = _resolution.width;
            int height    = _resolution.height;
            int Line      = width * PixelSize;

            // get normal image from mirror image
            for (int i = 0; i < height; ++i)
            {
                for (int j = 0; j + 4 < Line / 2; j += 4)
                {
                    Swap <byte>(ref _latestImageBytes[Line * i + j], ref _latestImageBytes[Line * i + Line - j - 4]);
                    Swap <byte>(ref _latestImageBytes[Line * i + j + 1], ref _latestImageBytes[Line * i + Line - j - 3]);
                    Swap <byte>(ref _latestImageBytes[Line * i + j + 2], ref _latestImageBytes[Line * i + Line - j - 2]);
                    Swap <byte>(ref _latestImageBytes[Line * i + j + 3], ref _latestImageBytes[Line * i + Line - j - 1]);

                    // reduce the alpha
                    //_latestImageBytes[Line * i + j + 3] = _latestImageBytes[Line * i + Line - j - 1] = 100;
                }
            }


            _videoPanelUI.SetBytes(_latestImageBytes);
        }, true);
    }
    void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        //When copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized.
        //You can reuse this byte[] until you need to resize it (for whatever reason).
        if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
        {
            _latestImageBytes = new byte[sample.dataLength];
        }
        sample.CopyRawImageDataIntoBuffer(_latestImageBytes);
        sample.Dispose();

        _videoPanelUI.SetBytes(_latestImageBytes);
    }
Example #7
0
    private void OnFrameSampleAcquired(VideoCaptureSample videocapturesample)
    {
        if (videocapturesample == null)
        {
            return;
        }

        var data = new byte[videocapturesample.dataLength];

        videocapturesample.CopyRawImageDataIntoBuffer(data);

        DoStuffWithData(data);
    }
Example #8
0
    // Everything above here is boilerplate from the VideoPanelApp.cs project
    void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        //When copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized.
        //You can reuse this byte[] until you need to resize it (for whatever reason).
        if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
        {
            _latestImageBytes = new byte[sample.dataLength];
        }
        sample.CopyRawImageDataIntoBuffer(_latestImageBytes);

        //If you need to get the cameraToWorld matrix for purposes of compositing you can do it like this
        float[] cameraToWorldMatrixAsFloat;
        if (sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrixAsFloat) == false)
        {
            return;
        }

        //If you need to get the projection matrix for purposes of compositing you can do it like this
        float[] projectionMatrixAsFloat;
        if (sample.TryGetProjectionMatrix(out projectionMatrixAsFloat) == false)
        {
            return;
        }

        // Right now we pass things across the pipe as a float array then convert them back into UnityEngine.Matrix using a utility method
        Matrix4x4 cameraToWorldMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat);
        Matrix4x4 projectionMatrix    = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(projectionMatrixAsFloat);

        //This is where we actually use the image data
        UnityEngine.WSA.Application.InvokeOnAppThread(() =>
        {
            _videoPanelUI.SetBytes(_latestImageBytes);

            Vector3 inverseNormal = LocatableCameraUtils.GetNormalOfPose(cameraToWorldMatrix);
            Vector3 imagePosition = cameraToWorldMatrix.MultiplyPoint(Vector3.zero);

            // Throw out an indicator in the composite space 2 meters in front of us using the corresponding view matrices
            float distanceToMarker      = 2f;
            Vector3 pointOnFaceBoxPlane = imagePosition - inverseNormal * distanceToMarker;
            Plane surfacePlane          = new Plane(inverseNormal, pointOnFaceBoxPlane);

            Vector2 targetPoint = new Vector2(_resolution.width * 0.5f, _resolution.height * 0.5f);
            Vector3 mdPoint     = LocatableCameraUtils.PixelCoordToWorldCoord(cameraToWorldMatrix, projectionMatrix, _resolution, targetPoint, surfacePlane);

            _targetIndicator.SetPosition(mdPoint);
            _targetIndicator.SetText("P");

            _videoPanelUI.gameObject.SetActive(false);
        }, false);
    }
Example #9
0
    void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        //When copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized.
        //You can reuse this byte[] until you need to resize it (for whatever reason).
        if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
        {
            _latestImageBytes = new byte[sample.dataLength];
        }
        sample.CopyRawImageDataIntoBuffer(_latestImageBytes);

        //If you need to get the cameraToWorld matrix for purposes of compositing you can do it like this
        float[] cameraToWorldMatrixAsFloat;
        if (sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrixAsFloat) == false)
        {
            return;
        }

        //If you need to get the projection matrix for purposes of compositing you can do it like this
        float[] projectionMatrixAsFloat;
        if (sample.TryGetProjectionMatrix(out projectionMatrixAsFloat) == false)
        {
            return;
        }

        // Right now we pass things across the pipe as a float array then convert them back into UnityEngine.Matrix using a utility method
        Matrix4x4 cameraToWorldMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat);
        Matrix4x4 projectionMatrix    = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(projectionMatrixAsFloat);

        //This is where we actually use the image data
        //TODO: Create a class like VideoPanel for the next code
        UnityEngine.WSA.Application.InvokeOnAppThread(() =>
        {
            _videoTexture.LoadRawTextureData(_latestImageBytes);
            _videoTexture.wrapMode = TextureWrapMode.Clamp;
            _videoTexture.Apply();

            _videoPanelUIRenderer.sharedMaterial.SetTexture("_MainTex", _videoTexture);
            _videoPanelUIRenderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", cameraToWorldMatrix.inverse);
            _videoPanelUIRenderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix);
            _videoPanelUIRenderer.sharedMaterial.SetFloat("_VignetteScale", 1.3f);


            Vector3 inverseNormal = -cameraToWorldMatrix.GetColumn(2);
            // Position the canvas object slightly in front of the real world web camera.
            Vector3 imagePosition = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);

            _videoPanelUI.gameObject.transform.position = imagePosition;
            _videoPanelUI.gameObject.transform.rotation = Quaternion.LookRotation(inverseNormal, cameraToWorldMatrix.GetColumn(1));
        }, false);
    }
    void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        //When copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized.
        //You can reuse this byte[] until you need to resize it (for whatever reason).
        if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
        {
            _latestImageBytes = new byte[sample.dataLength];
        }
        sample.CopyRawImageDataIntoBuffer(_latestImageBytes);
        sample.Dispose();

        //This is where we actually use the image data
        UnityEngine.WSA.Application.InvokeOnAppThread(() =>
        {
            _videoPanelUI.SetBytes(_latestImageBytes);
        }, false);
    }
    void OnFrameSampleCallback(VideoCaptureSample sample)
    {
        byte[] imageBytes = new byte[sample.dataLength];

        sample.CopyRawImageDataIntoBuffer(imageBytes);

        sample.Dispose();

        //图像水平是镜像的!
        UnityEngine.WSA.Application.InvokeOnAppThread(() =>
        {
            if (HorizontalMirror)
            {
                ImageMirror(imageBytes);
            }
            _videoTexture.LoadRawTextureData(imageBytes);
            _videoTexture.wrapMode = TextureWrapMode.Clamp;
            _videoTexture.Apply();
            UnityChatSDK.Instance.UpdateCustomTexture(_videoTexture);
        }, false);
    }
    private void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
        {
            _latestImageBytes = new byte[sample.dataLength];
        }
        sample.CopyRawImageDataIntoBuffer(_latestImageBytes);

        float[] cameraToWorldMatrixAsFloat;
        float[] projectionMatrixAsFloat;
        if (sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrixAsFloat) == false || sample.TryGetProjectionMatrix(out projectionMatrixAsFloat) == false)
        {
            Debug.Log("Failed to get camera to world or projection matrix");
            return;
        }

        _cameraToWorldMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat);
        _projectionMatrix    = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(projectionMatrixAsFloat);

        sample.Dispose();

        Mat frameBGRA = new Mat(_resolution.height, _resolution.width, CvType.CV_8UC4);

        frameBGRA.put(0, 0, _latestImageBytes);
        Mat frameBGR = new Mat(_resolution.height, _resolution.width, CvType.CV_8UC3);

        Imgproc.cvtColor(frameBGRA, frameBGR, Imgproc.COLOR_BGRA2BGR);

        Mat HSV       = new Mat();
        Mat threshold = new Mat();

        // Track objects
        foreach (ObjectTracker ot in _trackers)
        {
            Imgproc.cvtColor(frameBGR, HSV, Imgproc.COLOR_BGR2HSV);
            Core.inRange(HSV, new Scalar(ot.minH, ot.minSaturation, ot.minLight), new Scalar(ot.maxH, 255, 255), threshold);
            morphOps(threshold);
            trackFilteredObject(ot, threshold);
        }
    }
Example #13
0
        private async void OnFrameSampleAcquired(VideoCaptureSample videoCaptureSample)
        {
            if (FrameAquired != null && State == FrameState.ReadyForNextFrame)
            {
                State = FrameState.WaitForReady;
                var buffer = new Windows.Storage.Streams.Buffer((uint)videoCaptureSample.dataLength);
                videoCaptureSample.bitmap.CopyToBuffer(buffer);

                DataReader dataReader = DataReader.FromBuffer(buffer);
                byte[]     bytes      = new byte[buffer.Length];
                dataReader.ReadBytes(bytes);

                var inputStream = new InMemoryRandomAccessStream();

                BitmapEncoder bitmapEncoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, inputStream);

                bitmapEncoder.SetSoftwareBitmap(videoCaptureSample.bitmap);
                await bitmapEncoder.FlushAsync();

                var encodedData = new Windows.Storage.Streams.Buffer((uint)inputStream.Size);
                await inputStream.ReadAsync(encodedData, (uint)inputStream.Size, InputStreamOptions.None);

                Debug.WriteLine("Size of JPEG Image: " + encodedData.Length);

                DataReader dataReader2  = DataReader.FromBuffer(encodedData);
                byte[]     encodedBytes = new byte[encodedData.Length];
                dataReader2.ReadBytes(encodedBytes);

                dataReader.Dispose();
                dataReader2.Dispose();

                FrameAquired?.Invoke(this, new FrameAquiredEventArgs()
                {
                    BitmapData = bytes, width = _resolution.width, height = _resolution.height, ImageData = encodedBytes
                });
            }
            videoCaptureSample.Dispose();
        }
Example #14
0
    void OnFrameSampleCallback(VideoCaptureSample sample)
    {
        byte[] imageBytes = new byte[sample.dataLength];

        sample.CopyRawImageDataIntoBuffer(imageBytes);

        sample.Dispose();

        UnityEngine.WSA.Application.InvokeOnAppThread(() =>
        {
            if (Application.platform == RuntimePlatform.WSAPlayerX86)
            {
                ImageHorizontalMirror(imageBytes);
            }
            else if (Application.platform == RuntimePlatform.WSAPlayerARM)
            {
                ImageVerticalMirror(imageBytes);
            }
            _videoTexture.LoadRawTextureData(imageBytes);
            _videoTexture.Apply();
            UnityChatSDK.Instance.UpdateCustomTexture(_videoTexture);
        }, false);
    }
Example #15
0
    void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        lock (_mainThreadActions)
        {
            if (_mainThreadActions.Count > 2)
            {
                return;
            }
        }
        //When copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized.
        //You can reuse this byte[] until you need to resize it (for whatever reason).
        if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
        {
            _latestImageBytes = new byte[sample.dataLength];
        }
        sample.CopyRawImageDataIntoBuffer(_latestImageBytes);

        //If you need to get the cameraToWorld matrix for purposes of compositing you can do it like this
        float[] cameraToWorldMatrix;
        if (sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrix) == false)
        {
            return;
        }

        //If you need to get the projection matrix for purposes of compositing you can do it like this
        float[] projectionMatrix;
        if (sample.TryGetProjectionMatrix(out projectionMatrix) == false)
        {
            return;
        }

        sample.Dispose();

        //Debug.Log("Got frame: " + sample.FrameWidth + "x" + sample.FrameHeight + " | " + sample.pixelFormat);

        Enqueue(() => _videoPanelUI.SetBytes(_latestImageBytes));
    }
    void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        //When copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized.
        //You can reuse this byte[] until you need to resize it (for whatever reason).
        if (latestImageBytes == null || latestImageBytes.Length < sample.dataLength)
        {
            latestImageBytes = new byte[sample.dataLength];
        }
        sample.CopyRawImageDataIntoBuffer(latestImageBytes);

        //If you need to get the cameraToWorld matrix for purposes of compositing you can do it like this
        float[] outMatrix;
        if (sample.TryGetCameraToWorldMatrix(out outMatrix) == false)
        {
            return;
        }
        Matrix4x4 cameraToWorldMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(outMatrix);

        //If you need to get the projection matrix for purposes of compositing you can do it like this
        if (sample.TryGetProjectionMatrix(out outMatrix) == false)
        {
            return;
        }
        Matrix4x4 projectionMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(outMatrix);

        sample.Dispose();

        //This is where we actually use the image data
        UnityEngine.WSA.Application.InvokeOnAppThread(() =>
        {
            int length   = latestImageBytes.Length;
            int size     = Marshal.SizeOf(latestImageBytes[0]) * length;
            IntPtr inPtr = Marshal.AllocHGlobal(size);
            try
            {
                Marshal.Copy(latestImageBytes, 0, inPtr, length);
                IntPtr outPtr = ProcessFrame(inPtr, resolution.width, resolution.height);
                if (outPtr != IntPtr.Zero)
                {
                    int[] cvOutput = new int[4];
                    Marshal.Copy(outPtr, cvOutput, 0, 4);

                    // Get the world coordinates of the keyboard center
                    Vector2 pixelCoordinates = new Vector2(cvOutput[0], cvOutput[1]);
                    Vector3 worldDirection   = LocatableCameraUtils.PixelCoordToWorldCoord(cameraToWorldMatrix, projectionMatrix, resolution, pixelCoordinates);
                    Ray ray = new Ray(cameraToWorldMatrix.GetColumn(3), worldDirection);
                    RaycastHit hitInfo;
                    if (Physics.Raycast(ray, out hitInfo))
                    {
                        keyboardPosition = hitInfo.point;
                    }

                    // Get the world coordinates of the keyboard's eigenvector
                    pixelCoordinates = new Vector2(cvOutput[2], cvOutput[3]);
                    worldDirection   = LocatableCameraUtils.PixelCoordToWorldCoord(cameraToWorldMatrix, projectionMatrix, resolution, pixelCoordinates);
                    ray = new Ray(cameraToWorldMatrix.GetColumn(3), worldDirection);
                    if (Physics.Raycast(ray, out hitInfo))
                    {
                        keyboardOrientation = hitInfo.point - keyboardPosition;
                    }
                }

                /*byte[] outBytes = new byte[length];
                 * Marshal.Copy(outPtr, outBytes, 0, length);
                 * videoPanelUI.SetBytes(outBytes);*/
            }
            finally
            {
                // Clean memory
                Marshal.FreeHGlobal(inPtr);
                FreeMemory();
                GC.Collect();
            }
        }, false);

        if (stopVideoMode)
        {
            videoCapture.StopVideoModeAsync(OnVideoModeStopped);
        }
    }
    void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        m_NumFrames++;

        //When copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized.
        //You can reuse this byte[] until you need to resize it (for whatever reason).
        if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
        {
            _latestImageBytes = new byte[sample.dataLength];
        }
        sample.CopyRawImageDataIntoBuffer(_latestImageBytes);

        //If you need to get the cameraToWorld matrix for purposes of compositing you can do it like this
        float[] cameraToWorldMatrixAsFloat;
        if (sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrixAsFloat) == false)
        {
            return;
        }

        //If you need to get the projection matrix for purposes of compositing you can do it like this
        float[] projectionMatrixAsFloat;
        if (sample.TryGetProjectionMatrix(out projectionMatrixAsFloat) == false)
        {
            return;
        }

        // Right now we pass things across the pipe as a float array then convert them back into UnityEngine.Matrix using a utility method
        Matrix4x4 cameraToWorldMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat);
        Matrix4x4 projectionMatrix    = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(projectionMatrixAsFloat);

        //This is where we actually use the image data
        UnityEngine.WSA.Application.InvokeOnAppThread(() =>
        {
            int numFrames = m_NumFrames;

            _videoPanelUI.SetBytes(_latestImageBytes);

            Texture2D tex   = _videoPanelUI.rawImage.texture as Texture2D;
            byte[] jpgBytes = tex.EncodeToJPG();

            /*
             * Vector3 inverseNormal = LocatableCameraUtils.GetNormalOfPose(cameraToWorldMatrix);
             * Vector3 imagePosition = cameraToWorldMatrix.MultiplyPoint(Vector3.zero);
             *
             * // Throw out an indicator in the composite space 2 meters in front of us using the corresponding view matrices
             * float distanceToMarker = 2f;
             * Vector3 pointOnFaceBoxPlane = imagePosition - inverseNormal * distanceToMarker;
             * Plane surfacePlane = new Plane(inverseNormal, pointOnFaceBoxPlane);
             *
             * Vector2 targetPoint = new Vector2(_resolution.width * 0.5f, _resolution.height * 0.5f);
             * Vector3 mdPoint = LocatableCameraUtils.PixelCoordToWorldCoord(cameraToWorldMatrix, projectionMatrix, _resolution, targetPoint, surfacePlane);
             */

            //string infoText = String.Format("Position: {0}\t{1}\t{2}\nRotation: {3}\t{4}\t{5}", position.x, position.y, position.z, eulerAngles.x, eulerAngles.y, eulerAngles.z);
            //Debug.Log(infoText);


            float timestamp = Time.time - m_RecordingStartTime;

            PoseData pose  = new PoseData();
            pose.timestamp = timestamp;
            pose.position  = cameraToWorldMatrix.MultiplyPoint(Vector3.zero);

            Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));
            pose.eulerAngles    = rotation.eulerAngles;

            if (PlacingAxisMarkers && AxisPrefab != null)
            {
                Instantiate(AxisPrefab, pose.position, rotation);
            }


            m_CurrentPoses.Add(pose);

            SaveFrame(jpgBytes, numFrames);

            if (m_CurrentRecordingState == RecordingState.Recording)
            {
                this._videoCapture.RequestNextFrameSample(OnFrameSampleAcquired);
            }
        }, false);
    }
Example #18
0
    private void OnFrameSampleAcquired(VideoCaptureSample sample)
#endif
    {
        lock (sync)
        {
            if (processingFrame)
            {
                return;
            }

            processingFrame = true;
        }

        // surrounded with try/finally because we need to dispose of the sample
        try
        {
            // Allocate byteBuffer
            if (latestImageBytes == null || latestImageBytes.Length < sample.dataLength)
            {
                latestImageBytes = new byte[sample.dataLength];
            }

            // Fill frame struct
            SampleStruct s = new SampleStruct();
            sample.CopyRawImageDataIntoBuffer(latestImageBytes);
            s.data = latestImageBytes;

            // Get the cameraToWorldMatrix and projectionMatrix
            if (!sample.TryGetCameraToWorldMatrix(out s.camera2WorldMatrix) || !sample.TryGetProjectionMatrix(out s.projectionMatrix))
            {
                return;
            }

            HoloPicture picture = null;

            UnityEngine.WSA.Application.InvokeOnAppThread(() =>
            {
                picture = HoloPicture.CreateHologram(s.data, _resolution, s.camera2WorldMatrix, s.projectionMatrix);
            }, true);


            videoCapture.StopVideoModeAsync(onVideoModeStopped);
            IList <Yolo.YoloBoundingBox> predictions = null;

#if UNITY_WSA && !UNITY_EDITOR
            VideoFrame videoFrame = (VideoFrame)videoFrameInfo.GetValue(sample);

            if (videoFrame?.SoftwareBitmap == null)
            {
                return;
            }
            SoftwareBitmap softwareBitmap = videoFrame.SoftwareBitmap;

            if (softwareBitmap.BitmapAlphaMode != BitmapAlphaMode.Premultiplied)
            {
                softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
                videoFrame     = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap);
            }

            predictions = await ObjectDetector.Instance.AnalyzeImage(videoFrame);

            if (predictions?.Count == 0)
            {
                return;
            }
#endif
            UnityEngine.WSA.Application.InvokeOnAppThread(() =>
            {
                picture.Predictions = predictions;
                SaveHologram(picture);
                picture.DisplayPredictions();
            }, true);
        }
        finally
        {
            sample.Dispose();
        }
    }
    private void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        // upload image bytes
        if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
        {
            _latestImageBytes = new byte[sample.dataLength];
        }
        sample.CopyRawImageDataIntoBuffer(_latestImageBytes);

        // transform matrix
        float[] cameraToWorldMatrixAsFloat;
        float[] projectionMatrixAsFloat;
        if (sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrixAsFloat) == false || sample.TryGetProjectionMatrix(out projectionMatrixAsFloat) == false)
        {
            Debug.Log("Failed to get camera to world or projection matrix");
            return;
        }

        _cameraToWorldMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat);
        _projectionMatrix    = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(projectionMatrixAsFloat);

        //focal_x = _projectionMatrix[0, 0] * _resolution.width / 2;
        //focal_y = _projectionMatrix[1, 1] * _resolution.height / 2;
        //cx = _resolution.width / 2 + _resolution.width / 2 * _projectionMatrix[0, 2];
        //cy = _resolution.height / 2 + _resolution.height / 2 * _projectionMatrix[1, 2];
        // Debug.Log("focal_x: " + focal_x.ToString("f5") + " focal_y: " + focal_y.ToString("f5") + " cx: " + cx.ToString("f5") + " cy: " + cy.ToString("f5"));

        sample.Dispose();

        // Opencv mat conversion: to RGB mat
        Mat frameBGRA = new Mat(_resolution.height, _resolution.width, CvType.CV_8UC4);

        //Array.Reverse(_latestImageBytes);
        frameBGRA.put(0, 0, _latestImageBytes);
        //Core.flip(frameBGRA, frameBGRA, 0);

        Mat frameBGR = new Mat(_resolution.height, _resolution.width, CvType.CV_8UC3);

        Imgproc.cvtColor(frameBGRA, frameBGR, Imgproc.COLOR_BGRA2BGR);
        Mat RGB = new Mat();

        Imgproc.cvtColor(frameBGR, RGB, Imgproc.COLOR_BGR2RGB);
        // Target detection: marker location in webcam
        Vector4 location = markerDetection.DetectMarkers(RGB);

        // Application thread. Because some operations are only allowed in main thread, not event thread
        Application.InvokeOnAppThread(() =>
        {
            if (MarkerDetection.success == true)// if detected
            {
                Debug.Log("catch!");
                // target: marker -> webcam -> world.
                Vector3 target = _cameraToWorldMatrix * location;
                Debug.Log("target: " + target.ToString("f5"));
                Target.transform.position = target;

                // render with the right matrix
                Matrix4x4 V = Camera.main.GetStereoViewMatrix(Camera.StereoscopicEye.Right);
                Matrix4x4 P = GL.GetGPUProjectionMatrix(Camera.main.GetStereoProjectionMatrix(Camera.StereoscopicEye.Right), false); // openGL to DX
                Matrix4x4 M = Target.transform.localToWorldMatrix;
                Target.GetComponent <Renderer>().material.SetMatrix("MATRIX_MVP", P * V * M);                                        // render with custom pipeline
                Target.GetComponent <Renderer>().enabled = true;                                                                     // show now
                Vector4 vertex = P * V * M * new Vector4(0f, 0f, 0f, 1f);                                                            // vertex location on the rendering plane

                //Vector3 cam = V.GetColumn(3);

                //// ----------------------------------------------------
                //// collide a cam-target ray with display collider
                //Vector3 camToTarget = target - cam;
                //Ray ray = new Ray(cam, camToTarget);
                //RaycastHit hit;

                //if (Physics.Raycast(ray, out hit, 2f))
                //{
                //    Vector3 pos = hit.point; // physical hitted point
                //    Target.transform.position = pos;
                //    Debug.Log("hit pos: " + pos.ToString("f5"));

                //    Matrix4x4 M = Target.transform.localToWorldMatrix;
                //    Target.GetComponent<Renderer>().material.SetMatrix("MATRIX_MVP", P * V * M); // render with custom pipeline
                //    Target.GetComponent<Renderer>().enabled = true; // show now
                //    // Vector4 vertex = P * V * M * new Vector4(0f, 0f, 0f, 1f); // vertex location on the rendering plane
                //}

                // ---------------------------------------------------
                // get eye position in eyecam
                string[] eyedata  = UDPCommunication.message.Split(',');
                Vector4 eye_pos_e = new Vector4(float.Parse(eyedata[0]) / 1000, float.Parse(eyedata[1]) / 1000, float.Parse(eyedata[2]) / 1000, 1.0f); // in [m]
                Debug.Log("eye in eyecam: " + eye_pos_e.ToString("f5"));

                // eye: eyecam -> webcam -> world.
                Vector3 eye_pos_w = _cameraToWorldMatrix * _eyecamTowebcam * eye_pos_e;
                Debug.Log("eye in world: " + eye_pos_w.ToString("f5"));

                // ----------------------------------------------------
                // collide a eye-target ray with display collider
                Vector3 eyeToTarget = target - eye_pos_w;
                Ray ray_revised     = new Ray(eye_pos_w, eyeToTarget);
                RaycastHit hit_revised;

                if (Physics.Raycast(ray_revised, out hit_revised, 2f))
                {
                    Vector4 pos_revised = hit_revised.point; // physical hitted point
                    //Revised.transform.position = pos_revised;
                    Debug.Log("hit_revised pos: " + pos_revised.ToString("f5"));
                    pos_revised.w = 1.0f;
                    // calculate hitted vertex, scale w to the same depth
                    Vector4 vertex_hit        = P * V * pos_revised;
                    float scale               = vertex.w / vertex_hit.w;
                    Vector4 vertex_hit_scaled = new Vector4(vertex_hit.x * scale, vertex_hit.y * scale, vertex_hit.z, vertex_hit.w * scale);

                    // retrieve the world location
                    Vector3 pos_scaled         = V.inverse * P.inverse * vertex_hit_scaled;
                    Revised.transform.position = pos_scaled;
                    // position the revised target and render it
                    Matrix4x4 M_revised = Revised.transform.localToWorldMatrix;
                    Revised.GetComponent <Renderer>().material.SetMatrix("MATRIX_MVP", P * V * M_revised);
                    Revised.GetComponent <Renderer>().enabled = true;

                    Debug.Log("webcameraToWorldMatrix:\n" + _cameraToWorldMatrix.ToString("f5"));
                    Debug.Log("WorldToRightMatrix:\n" + V.ToString("f5"));
                    Debug.Log("RightGLProjectionMatrix:\n" + P.ToString("f5"));

                    Debug.Log("detected target location: " + Target.transform.position.ToString("f5"));
                    Debug.Log("revised target location: " + Revised.transform.position.ToString("f5"));

                    Debug.Log("rendering vertex: " + vertex.ToString("f5"));
                    Debug.Log("hit vertex: " + vertex_hit.ToString("f5"));
                    Debug.Log("revised rendering vertex: " + vertex_hit_scaled.ToString("f5"));
                }
            }
            else
            {
                Revised.GetComponent <Renderer>().enabled = false;
                Target.GetComponent <Renderer>().enabled  = false; // hide
            }
        }, false);
    }
Example #20
0
    private void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        // Allocate byteBuffer
        if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
        {
            _latestImageBytes = new byte[sample.dataLength];
        }

        // Fill frame struct
        SampleStruct s = new SampleStruct();

        sample.CopyRawImageDataIntoBuffer(_latestImageBytes);
        s.data = _latestImageBytes;

        // Get the cameraToWorldMatrix and projectionMatrix
        if (!sample.TryGetCameraToWorldMatrix(out s.camera2WorldMatrix) || !sample.TryGetProjectionMatrix(out s.projectionMatrix))
        {
            return;
        }

        sample.Dispose();

        Matrix4x4 camera2WorldMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(s.camera2WorldMatrix);
        Matrix4x4 projectionMatrix   = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(s.projectionMatrix);

        UnityEngine.WSA.Application.InvokeOnAppThread(() =>
        {
            // Upload bytes to texture
            _pictureTexture.LoadRawTextureData(s.data);
            _pictureTexture.wrapMode = TextureWrapMode.Clamp;
            _pictureTexture.Apply();

            // Set material parameters
            _pictureRenderer.sharedMaterial.SetTexture("_MainTex", _pictureTexture);
            _pictureRenderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", camera2WorldMatrix.inverse);
            _pictureRenderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix);
            _pictureRenderer.sharedMaterial.SetFloat("_VignetteScale", 0f);

            Vector3 inverseNormal = -camera2WorldMatrix.GetColumn(2);
            // Position the canvas object slightly in front of the real world web camera.
            Vector3 imagePosition = camera2WorldMatrix.GetColumn(3) - camera2WorldMatrix.GetColumn(2);

            _picture.transform.position = imagePosition;
            _picture.transform.rotation = Quaternion.LookRotation(inverseNormal, camera2WorldMatrix.GetColumn(1));
        }, false);

        // Stop the video and reproject the 5 pixels
        if (stopVideo)
        {
            _videoCapture.StopVideoModeAsync(onVideoModeStopped);

            // Get the ray directions
            Vector3 imageCenterDirection   = LocatableCameraUtils.PixelCoordToWorldCoord(camera2WorldMatrix, projectionMatrix, _resolution, new Vector2(_resolution.width / 2, _resolution.height / 2));
            Vector3 imageTopLeftDirection  = LocatableCameraUtils.PixelCoordToWorldCoord(camera2WorldMatrix, projectionMatrix, _resolution, new Vector2(0, 0));
            Vector3 imageTopRightDirection = LocatableCameraUtils.PixelCoordToWorldCoord(camera2WorldMatrix, projectionMatrix, _resolution, new Vector2(_resolution.width, 0));
            Vector3 imageBotLeftDirection  = LocatableCameraUtils.PixelCoordToWorldCoord(camera2WorldMatrix, projectionMatrix, _resolution, new Vector2(0, _resolution.height));
            Vector3 imageBotRightDirection = LocatableCameraUtils.PixelCoordToWorldCoord(camera2WorldMatrix, projectionMatrix, _resolution, new Vector2(_resolution.width, _resolution.height));

            UnityEngine.WSA.Application.InvokeOnAppThread(() =>
            {
                // Paint the rays on the 3d world
                _laser.shootLaserFrom(camera2WorldMatrix.GetColumn(3), imageCenterDirection, 10f, _centerMaterial);
                _laser.shootLaserFrom(camera2WorldMatrix.GetColumn(3), imageTopLeftDirection, 10f, _topLeftMaterial);
                _laser.shootLaserFrom(camera2WorldMatrix.GetColumn(3), imageTopRightDirection, 10f, _topRightMaterial);
                _laser.shootLaserFrom(camera2WorldMatrix.GetColumn(3), imageBotLeftDirection, 10f, _botLeftMaterial);
                _laser.shootLaserFrom(camera2WorldMatrix.GetColumn(3), imageBotRightDirection, 10f, _botRightMaterial);
            }, false);
        }
    }
 private void OnFrameSampleAcquired(VideoCaptureSample sample)
 {
 }
Example #22
0
 private void OnFrameSampleAcquiredPolling(VideoCaptureSample sample)
 {
     FrameSampleAcquired?.Invoke(sample);
     VideoCapture.RequestNextFrameSample(OnFrameSampleAcquiredPolling);
 }
Example #23
0
 private void OnFrameSampleAcquiredListener(VideoCaptureSample sample)
 {
     FrameSampleAcquired?.Invoke(sample);
 }
    void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        //When copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized.
        //You can reuse this byte[] until you need to resize it (for whatever reason).
        if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
        {
            _latestImageBytes = new byte[sample.dataLength];
        }
        sample.CopyRawImageDataIntoBuffer(_latestImageBytes);

        //If you need to get the cameraToWorld matrix for purposes of compositing you can do it like this
        float[] cameraToWorldMatrixAsFloat;
        if (sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrixAsFloat) == false)
        {
            return;
        }

        //If you need to get the projection matrix for purposes of compositing you can do it like this
        float[] projectionMatrixAsFloat;
        if (sample.TryGetProjectionMatrix(out projectionMatrixAsFloat) == false)
        {
            return;
        }

        // Right now we pass things across the pipe as a float array then convert them back into UnityEngine.Matrix using a utility method
        Matrix4x4 cameraToWorldMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat);
        Matrix4x4 projectionMatrix    = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(projectionMatrixAsFloat);

        //This is where we actually use the image data
        UnityEngine.WSA.Application.InvokeOnAppThread(() =>
        {
            Vector3 cameraWorldPosition    = cameraToWorldMatrix.MultiplyPoint(Vector3.zero);
            Quaternion cameraWorldRotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));


            _videoPanelUI.SetBytes(_latestImageBytes);

            Texture2D tex = _videoPanelUI.rawImage.texture as Texture2D;

            Color32[] c        = tex.GetPixels32();
            IntPtr imageHandle = getImageHandle(c);
            newImage(imageHandle);

            // do any detection here...

            int chessX = topDownCameraCalibrationData["chess_x"].AsInt;
            int chessY = topDownCameraCalibrationData["chess_y"].AsInt;
            float chessSquareMeters = topDownCameraCalibrationData["chess_square_size_meters"].AsFloat;
            double cam_mtx_fx       = HoloCam_fx;
            double cam_mtx_fy       = HoloCam_fy;
            double cam_mtx_cx       = HoloCam_cx;
            double cam_mtx_cy       = HoloCam_cy;
            double dist_k1          = HoloCam_k1;
            double dist_k2          = HoloCam_k2;
            double dist_p1          = HoloCam_p1;
            double dist_p2          = HoloCam_p2;
            double dist_k3          = HoloCam_k3;

            bool gotValidPose = findExtrinsics(chessX, chessY, chessSquareMeters, cam_mtx_fx, cam_mtx_fy, cam_mtx_cx, cam_mtx_cy, dist_k1, dist_k2, dist_p1, dist_p2, dist_k3);

            if (gotValidPose)
            {
                Debug.Log("Got valid pose!");

                List <double> rvec = new List <double>();
                rvec.Add(GetRvec0());
                rvec.Add(GetRvec1());
                rvec.Add(GetRvec2());

                List <double> tvec = new List <double>();
                tvec.Add(GetTvec0());
                tvec.Add(GetTvec1());
                tvec.Add(GetTvec2());

                Debug.Log("rvec between HoloLens camera and chessboard is " + rvec[0] + " " + rvec[1] + " " + rvec[2]);
                Debug.Log("tvec between HoloLens camera and chessboard is " + tvec[0] + " " + tvec[1] + " " + tvec[2]);



                // NOTE: At this point, we should STOP the HoloLens frame processing by:
                // 1: setting _processingCameraFrames to FALSE
                // 2: calling StopCameraProcessing()

                // the data we have available is:
                // --- rvec and tvec (the pose data between the HoloLens camera and the chessboard)
                // --- topDownCameraCalibrationData ... the received data from the top-down camera app, which includes:
                // ------ topDownCameraCalibrationData["rvec"] ... the pose between the top-down camera and the chessboard
                // ------ topDownCameraCalibrationData["tvec"]
                // ------ topDownCameraCalibrationData["fx"], fy, cx, cy (the top-down camera matrix)
                // ------ topDownCameraCalibrationData["k1"], k2, p1, p2, k3 (the top-down camera distortion coefficients)
                // --- cameraWorldPosition and cameraWorldRotation (the pose data in Unity's coord system between the HoloLens camera and the world)


                // StopCameraProcessing();
                // _processingCameraFrames = false;
            }


            //// Fetch the processed image and render
            imageHandle = getProcessedImage();
            Marshal.Copy(imageHandle, processedImageData, 0, _resolution.width * _resolution.height * 4);
            tex.LoadRawTextureData(processedImageData);
            tex.Apply();



            if (_processingCameraFrames)
            {
                this._videoCapture.RequestNextFrameSample(OnFrameSampleAcquired);
            }
        }, false);
    }