Ejemplo n.º 1
0
    /// <summary>
    /// 照片拍摄完成,获取拍摄的照片,调用Custom Vision API,对图片进行分析
    /// </summary>
    /// <param name="result">拍照的结果</param>
    /// <param name="photoCaptureFrame">拍摄的图片</param>
    private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            audioSource.Stop();
            audioSource.clip = captureAudioClip;
            audioSource.Play();

            ModelManager.Instance.SetPhotoImageActive(true);
            ModelManager.Instance.SetWaitingCanvas(false);
            //ModelManager.Instance.SetTipText("正在处理中..."); //changed by yimei
            ModelManager.Instance.SetTipText("正在处理中...\r\nIn processing...");
            if (ConfigurationManager.Instance.GetMode() == CurrentMode.EdittingMode)
            {
                ToolManager.Instance.ShowMenu();
                currentStatus = CurrentStatus.EdittingPhoto;
            }

            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);
            imageBufferList = FlipVertical(imageBufferList, cameraParameters.cameraResolutionWidth, cameraParameters.cameraResolutionHeight, 4);

            Texture2D targetTexture = CreateTexture(imageBufferList, cameraParameters.cameraResolutionWidth, cameraParameters.cameraResolutionHeight);
            Sprite    sprite        = Sprite.Create(targetTexture, new Rect(0, 0, targetTexture.width, targetTexture.height), new Vector2(0.5f, 0.5f));

            ModelManager.Instance.SetPhotoImage(sprite);

            if (ConfigurationManager.Instance.GetMode() == CurrentMode.SimpleMode)
            {
                StartCoroutine(PostToCustomVisionAPI(targetTexture));
            }
            else
            {
                ModelManager.Instance.PlayAnimation("ShowAnimation");
            }
        }
        else
        {
            audioSource.Stop();
            audioSource.clip = failedAudioClip;
            audioSource.Play();

            currentStatus = CurrentStatus.Ready;
            //ModelManager.Instance.SetTipText("点击进行拍照"); //changed by yimei
            ModelManager.Instance.SetTipText("点击进行拍照\r\nAirtap for taking picture");
            ModelManager.Instance.PlayAnimation("IdleAnimation");
        }
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 2
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            List <byte> imageBufferList = new List <byte>();
            // Copy the raw IMFMediaBuffer data into our empty byte list.
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

            // In this example, we captured the image using the BGRA32 format.
            // So our stride will be 4 since we have a byte for each rgba channel.
            // The raw image data will also be flipped so we access our pixel data
            // in the reverse order.
            int          stride      = 4;
            float        denominator = 1.0f / 255.0f;
            List <Color> colorArray  = new List <Color>();
            Debug.Log(imageBufferList.Count);
            for (int i = imageBufferList.Count - 1; i - 3 >= 0; i -= stride)
            {
                float a = (int)(imageBufferList[i - 0]) * denominator;
                float r = (int)(imageBufferList[i - 1]) * denominator;
                float g = (int)(imageBufferList[i - 2]) * denominator;
                float b = (int)(imageBufferList[i - 3]) * denominator;

                colorArray.Add(new Color(r, g, b, a));
            }

            targetTexture_.SetPixels(colorArray.ToArray());
            targetTexture_.Apply();
            if (_debug)
            {
                if (quadRenderer_ == null)
                {
                    GameObject p = GameObject.CreatePrimitive(PrimitiveType.Quad);
                    quadRenderer_ = p.GetComponent <Renderer>() as Renderer;
                    Debug.Log(quadRenderer_);
                    Debug.Log(Shader.Find("Unlit/Texture"));
                    quadRenderer_.material = new Material(Shader.Find("Unlit/Texture"));

                    p.transform.parent        = this.transform;
                    p.transform.localPosition = new Vector3(0.0f, 0.0f, 1.0f);
                }

                quadRenderer_.material.SetTexture("_MainTex", targetTexture_);
            }
        }
        photoCaptureObject_.StopPhotoModeAsync(OnStoppedPhotoMode);
        // Take another photo
    }
Ejemplo n.º 3
0
    /// <summary>
    /// 照相完毕后调用
    /// </summary>
    /// <param name="result">结果</param>
    /// <param name="photoCaptureFrame">帧</param>
    private void OnProcessFrame(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        Debug.Log("OnProcessFrame");
        if (result.success)
        {
            List <byte> imageBufferList = new List <byte>();
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);
            photoCaptureFrame.TryGetCameraToWorldMatrix(out martrix_camera_to_world);
            photoCaptureFrame.TryGetProjectionMatrix(out martrix_projection);
            //photoCaptureFrame.Dispose();
#if WINDOWS_UWP
            SendData(imageBufferList.ToArray());
            //ShowHoloGrams = !ShowHoloGrams;
#endif
            photo_capture.TakePhotoAsync(OnProcessFrame);
        }
    }
    private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            // ComputerVision APIに送るimageBufferListにメモリ上の画像をコピーする
            List <byte> imageBufferList = new List <byte>();
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

            //ここはデバッグ用 送信画像の出力。どんな画像が取れたのか確認したい場合に使用。邪魔ならphotoPanelごと消してもよい。
            Texture2D debugTexture = new Texture2D(100, 100);
            debugTexture.LoadImage(imageBufferList.ToArray());
            photoPanel.texture = debugTexture;

            StartCoroutine(PostToVisionAPI(imageBufferList.ToArray()));
        }
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 5
0
        public void Print(PhotoCaptureFrame captured)
        {
            if (mSetupped == false)
            {
                throw new System.Exception("Not setuped yet.");
            }


            List <byte> buf = new List <byte>();

            captured.CopyRawImageDataIntoBuffer(buf);

            var b = Serializer.Serialize(
                new Pair <string, byte []>().Set("てすと", buf.ToArray()));

            Print(b.ToArray());
        }
Ejemplo n.º 6
0
    //void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    //{
    //    if (result.success)
    //    {
    //        // Create our Texture2D for use and set the correct resolution
    //        Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
    //        Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height);
    //        // Copy the raw image data into our target texture
    //        photoCaptureFrame.UploadImageDataToTexture(targetTexture);
    //        // Do as we wish with the texture such as apply it to a material, etc.

    //        _outputMaterial.mainTexture = targetTexture;
    //    }
    //    // Clean up
    //    photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    //}

    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            List <byte> imageBufferList = new List <byte>();
            // Copy the raw IMFMediaBuffer data into our empty byte list.
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

            //_visionApiTest.QueryVisionApi("http://www.technewscentral.co.uk/wp-content/uploads/2015/07/sony-tablets1-hands2-lg.jpg"); //tablet
            //_visionApiTest.QueryVisionApi("https://pixabay.com/static/uploads/photo/2012/04/01/18/38/television-23936_960_720.png"); //tv
            //_visionApiTest.QueryVisionApi("https://i5.walmartimages.com/dfw/4ff9c6c9-9356/k2-_c59a878c-3d51-4807-aabd-84b0410de921.v1.jpg"); //phone
            //_visionApiTest.QueryVisionApi("http://core0.staticworld.net/images/article/2015/02/hp-spectre-x360_beauty-100570598-orig.jpg"); //laptop
            _visionApiTest.QueryVisionApi(imageBufferList.ToArray());
        }

        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
        void OnProcessFrame(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            UnityEngine.Debug.Log("++OnProcessFrame");
            if (result.success)
            {
                if (!Const.LOAD_IMAGES)
                {
                    List <byte> imageBufferList = new List <byte>();
                    // Copy the raw IMFMediaBuffer data into our empty byte list.
                    photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

                    photoCaptureFrame.TryGetCameraToWorldMatrix(out _cameraToWorldMatrix);
                    photoCaptureFrame.TryGetProjectionMatrix(out _projectionMatrix);
                    //UnityEngine.Debug.Log(cameraToWorldMatrix);

                    photoCaptureFrame.Dispose();

                    _imageDataRaw   = imageBufferList.ToArray();
                    _frameReadyFlag = true;
                }
                else
                {
                    /*
                     * _indexImageFile = (int)(frameID % _imageFiles.LongCount());
                     * using (IRandomAccessStreamWithContentType stream = await _imageFiles[_indexImageFile].OpenReadAsync())
                     * {
                     *  imageData = new byte[stream.Size];
                     *  using (DataReader reader = new DataReader(stream))
                     *  {
                     *      await reader.LoadAsync((uint)stream.Size);
                     *      reader.ReadBytes(imageData);
                     *  }
                     * }
                     */
                }
            }
            if (Const.HOLO_CAPTURE)
            {
                _photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoModeHOLO);
            }
            else
            {
                _isCapturing = false;
            }
        }
    private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        Debug.Log(photoCaptureFrame.dataLength);
        Debug.Log(photoCaptureFrame.hasLocationData);

        var rgb = new List <byte>();

        photoCaptureFrame.CopyRawImageDataIntoBuffer(rgb);

        //var bytes = File.ReadAllBytes(@"C:\Users\asd14\Pictures\hololens.jpg");
        //var stream = new MemoryStream(bytes);
        var bytes = rgb.ToArray();

        UploadFile(bytes, "https://hologate.tav.cc/image");


        StartCoroutine("DoRequest");
    }
Ejemplo n.º 9
0
 private void OnPhotoCapturedToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame frame)
 {
     if (result.success)
     {
         Debug.Log("Captured image to memory. Sending data to requester.");
         Matrix4x4 calibrationMatrix;
         frame.TryGetProjectionMatrix(out calibrationMatrix);
         List <byte> imageData = new List <byte>();
         frame.CopyRawImageDataIntoBuffer(imageData);
         _currentState.Value = State.READY_TO_TAKE_PHOTO;
         _currentRequester.ReceiveTakenPictureAsBytes(imageData, _cameraParameters.cameraResolutionWidth, _cameraParameters.cameraResolutionHeight, calibrationMatrix);
     }
     else
     {
         Debug.LogError("Failed to capture image to memory.");
         _currentState.Value = State.READY_TO_TAKE_PHOTO;
     }
 }
Ejemplo n.º 10
0
        /// <summary>
        /// Processes the received frame, converts the image to grayscale if requested, and invokes the next photo request.
        /// </summary>
        private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            if (_stopped?.Task != null)
            {
                return;
            }
            if (result.resultType == PhotoCapture.CaptureResultType.UnknownError)
            {
                return;
            }
            if (photoCaptureFrame == null)
            {
                return;
            }
            Size size = new Size(FrameWidth, (double)FrameHeight * 3 / 2); // Luminance (grayscale) of the NV12 format requires image height, chrominance is stored in half resolution. <see href="https://docs.microsoft.com/en-us/windows/win32/medfound/recommended-8-bit-yuv-formats-for-video-rendering#nv12"/>.

            _image = new Mat(size, CvType.CV_8UC1);
            List <byte> imageBuffer = new List <byte>();

            photoCaptureFrame?.CopyRawImageDataIntoBuffer(imageBuffer);
            MatUtils.copyToMat(imageBuffer.ToArray(), _image);

            if (_format == ColorFormat.Grayscale)
            {
                Imgproc.cvtColor(_image, _image, Imgproc.COLOR_YUV2GRAY_NV12);
            }

            Matrix4x4 cameraToWorldMatrix = Matrix4x4.identity;

            photoCaptureFrame?.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
            CameraExtrinsic extrinsic = new CameraExtrinsic(cameraToWorldMatrix);

            Matrix4x4 projectionMatrix = Matrix4x4.identity;

            photoCaptureFrame?.TryGetProjectionMatrix(out projectionMatrix);
            CameraIntrinsic intrinsic = new CameraIntrinsic(projectionMatrix);

            CameraFrame           cameraFrame = new CameraFrame(_image, intrinsic, extrinsic, FrameWidth, FrameHeight, FrameCount++, _format);
            FrameArrivedEventArgs args        = new FrameArrivedEventArgs(cameraFrame);

            FrameArrived?.Invoke(this, args);

            _photoCaptureObject?.TakePhotoAsync(OnCapturedPhotoToMemory);
        }
Ejemplo n.º 11
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        var photoBuffer = new List <byte>();

        if (photoCaptureFrame.pixelFormat == CapturePixelFormat.JPEG)
        {
            photoCaptureFrame.CopyRawImageDataIntoBuffer(photoBuffer);
        }
        else
        {
            photoBuffer = ConvertAndShowOnDebugPane(photoCaptureFrame);
        }

        Messenger.Instance.Broadcast(
            new PhotoCaptureMessage(photoBuffer, _cameraResolution, CopyCameraTransForm()));

        // Deactivate our camera
        _photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 12
0
    private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        // Copy the raw image data into the target texture
        lock (imageBuffer)
        {
            imageBuffer.Clear();
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBuffer);
            byte[] vs = imageBuffer.ToArray();
            tex.LoadRawTextureData(vs);
            tex.Apply();
            rawImage.texture = tex;
            //IMPORTANT: Dispose the capture frame, or the app will crash after a while with access violation
            photoCaptureFrame.Dispose();
            firstScan = false;
        }

        // Deactivate the camera
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 13
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        // Copy the raw image data into the target texture
        photoCaptureFrame.UploadImageDataToTexture(targetTexture);
        photoCaptureFrame.CopyRawImageDataIntoBuffer(buffer);

        Color32[] pix   = targetTexture.GetPixels32();
        int       with  = targetTexture.width;
        int       hight = targetTexture.height;

        int index = MaxRedColor.MaxRedColors(pix);

        int lines  = (int)(index + 1) / 640;
        int stakes = index - (lines * 640);

        if (Variabless.First_x == -1)
        {
            Variabless.First_x = stakes;
            Variabless.First_y = lines;
            print(stakes);
        }
        else if (Variabless.Second_x == -1)
        {
            Variabless.Second_x = stakes;
            Variabless.Second_y = lines;
            print(stakes);
        }
        else if (Variabless.Third_x == -1)
        {
            Variabless.Third_x = stakes;
            Variabless.Third_y = lines;
            print(stakes);
        }
        else if (Variabless.Four_x == -1)
        {
            Variabless.Four_x = stakes;
            Variabless.Four_y = lines;
            print(stakes);
        }

        // Deactivate the camera
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 14
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            DebugDisplay.Instance.Log("OnCapturedPhotoToMemory Copy Started ");

            imageBufferList = new List <byte>();
            // Copy the raw IMFMediaBuffer data into our empty byte list.
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);
            DebugDisplay.Instance.Log("OnCapturedPhotoToMemory " + imageBufferList.Count);
        }
        else
        {
            DebugDisplay.Instance.Log("Failed to save Photo to memory");
            photoReadyCallBack(false, imageBufferList);
        }

        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 15
0
        void OnPhotoCapturedCopyToBytes(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            Matrix4x4 cameraToWorldMatrix;

            photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);

            Matrix4x4 projectionMatrix;

            photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);

            List <byte> capturedImg = new List <byte>();

            photoCaptureFrame.CopyRawImageDataIntoBuffer(capturedImg);

            photoCaptureFrame.Dispose();

            _takePhotoActionCopyToBytes?.Invoke(cameraToWorldMatrix, projectionMatrix, capturedImg, _cameraParameters.cameraResolutionHeight, _cameraParameters.cameraResolutionWidth);
            CanTakePhoto = false;
            _takePhotoActionCopyToBytes = null;
        }
Ejemplo n.º 16
0
    void OnCapturePhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        Matrix4x4   cameraToWorldMatrix;
        List <byte> buffer = new List <byte>();

        photoCaptureFrame.CopyRawImageDataIntoBuffer(buffer);

        //Check if we can receive the position where the photo was taken
        if (!photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix))
        {
            cameraToWorldMatrix = Matrix4x4.identity;
        }

        if (photoReady != null)
        {
            photoReady(id, buffer.ToArray(), cameraToWorldMatrix, cameraResolution);
        }

        // stop the photo mode
        photoCapture.StopPhotoModeAsync(OnPhotoModeStopped);
    }
Ejemplo n.º 17
0
    void OnCapturedPhotoToMemoryAsync(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            //将photoCaptureFrame转为List<byte>,再转为byte[].
            imageBufferList = new List <byte>();
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);
            //将拍摄内容保存到imageBufferArray中
            imageBufferArray = imageBufferList.ToArray();

            photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
            worldToCameraMatrix = cameraToWorldMatrix.inverse;
            photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);

            UnityEngine.Debug.LogFormat(@"The value of cameraToWorld Matrix: {0}{1}{2}{3} ", cameraToWorldMatrix.GetRow(0), cameraToWorldMatrix.GetRow(1), cameraToWorldMatrix.GetRow(2), cameraToWorldMatrix.GetRow(3));

            UnityEngine.Debug.Log("Captured Photo To Memory Succeed! ");
        }

        photoCaptureObj.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 18
0
        private void onCapturedPhotoToMemoryCallback(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            if (!result.success)
            {
                Debug.LogError("Failed to take photo");
                onPhotoCaptured.OnNext(null);
                photoCapture.StopPhotoModeAsync(onPhotoModeStoppedCallback);
                return;
            }

            var buffer = new List <byte>();

            photoCaptureFrame.CopyRawImageDataIntoBuffer(buffer);


            // カメラの向きをワールド座標に変換するためのパラメータ保持
            Matrix4x4 cameraToWorldMatrix;

            photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
            //var cameraRotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

            Matrix4x4 projectionMatrix;

            photoCaptureFrame.TryGetProjectionMatrix(Camera.main.nearClipPlane, Camera.main.farClipPlane, out projectionMatrix);
            var pixelToCameraMatrix = projectionMatrix.inverse;

            Plan = new ShootingPlan
            {
                Resolution       = Resolution,
                ShootingLocation = Camera.main.transform.position,
                CameraToWorld    = cameraToWorldMatrix,
                PixelToCamera    = pixelToCameraMatrix,
            };

            photoCapture.StopPhotoModeAsync(onPhotoModeStoppedCallback);

            var value = buffer.ToArray();

            onPhotoCaptured.OnNext(value);
        }
Ejemplo n.º 19
0
        void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            //Matrix4x4 cameraToWorldMatrix;

            //photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
            //Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

            //Matrix4x4 projectionMatrix;
            //photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);

            //var texture = new Texture2D(this.cameraParameters.cameraResolutionWidth, this.cameraParameters.cameraResolutionHeight, TextureFormat.ARGB32, false);
            //photoCaptureFrame.UploadImageDataToTexture(texture);
            List <byte> byteses = new List <byte>();

            photoCaptureFrame.CopyRawImageDataIntoBuffer(byteses);
            TakeImageAction?.Invoke(byteses);
            //texture.wrapMode = TextureWrapMode.Clamp;
            photoCaptureFrame.Dispose();
            //texture.Compress(true);//ここでの圧縮はDXTフォーマットに圧縮するということ。
            Resources.UnloadUnusedAssets();
            isCapturingPhoto = false;
        }
    void Publish_to_ROS(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            memory = photoCaptureFrame;

            //画像をコピーする
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

            byte[] ROS_data = new byte[imageBufferList.Count];

            Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
            Texture2D  targetTexture    = new Texture2D(cameraResolution.width, cameraResolution.height);
            photoCaptureFrame.UploadImageDataToTexture(targetTexture);

            ROS_data = targetTexture.EncodeToJPG(qualityLevel);

            GameObject.Destroy(targetTexture);

            Publish_to_ROS(ROS_data);
        }
    }
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            List <byte> imageBufferList = new List <byte>();

            Debug.Log("OnCapturedPhotoToMemory Copy Started");

            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

            Debug.Log("OnCapturedPhotoToMemory " + imageBufferList.Count);

            //Execute OCR Coroutine
            ExecuteMCSComputerVisionOCR(imageBufferList, "ocr");
        }
        else
        {
            Debug.Log("Failed to save Photo to memory");
        }

        photoCapture.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 22
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            List <byte> imageBufferList = new List <byte>();
            // Copy the raw IMFMediaBuffer data into our empty byte list.
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

            // In this example, we captured the image using the BGRA32 format.
            // So our stride will be 4 since we have a byte for each rgba channel.
            // The raw image data will also be flipped so we access our pixel data
            // in the reverse order.
            int          stride      = 4;
            float        denominator = 1.0f / 255.0f;
            List <Color> colorArray  = new List <Color>();
            for (int i = imageBufferList.Count - 1; i >= 0; i -= stride)
            {
                float a = (int)(imageBufferList[i - 0]) * denominator;
                float r = (int)(imageBufferList[i - 1]) * denominator;
                float g = (int)(imageBufferList[i - 2]) * denominator;
                float b = (int)(imageBufferList[i - 3]) * denominator;

                colorArray.Add(new Color(r, g, b, a));
            }
            // Now we could do something with the array such as texture.SetPixels() or run image processing on the list
            //Debug.Log(String.Format("imageBufferList={0}", imageBufferList.Count));
            CreateMesh(ref colorArray, photoCaptureFrame);

            // save ?
            if (CaptureCount % 1 == 0)
            {
                SaveFile(PointAll, ColorAll);
                Debug.Log(String.Format("SaveFile={0}", CaptureCount));
            }
        }
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            List <byte> imageBufferList = new List <byte>();
            // Copy the raw IMFMediaBuffer data into our empty byte list.
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

            var cameraToWorldMatrix = new Matrix4x4();
            photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);

            cameraPosition = cameraToWorldMatrix.MultiplyPoint3x4(new Vector3(0, 0, -1));
            cameraRotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

            Matrix4x4 projectionMatrix;
            photoCaptureFrame.TryGetProjectionMatrix(Camera.main.nearClipPlane, Camera.main.farClipPlane, out projectionMatrix);
            Matrix4x4 pixelToCameraMatrix = projectionMatrix.inverse;

            status.GetComponent <TextMesh>().text = "Processing";

            StartCoroutine(PostToFaceAPI(imageBufferList.ToArray(), cameraToWorldMatrix, pixelToCameraMatrix));
        }
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 24
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        List <byte> imageBufferList = new List <byte>();

        byte[] imageArray;
        if (photoCaptureFrame.pixelFormat == CapturePixelFormat.JPEG)
        {
            // Copy the raw IMFMediaBuffer data into our empty byte list.
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);
            imageArray = imageBufferList.ToArray();
        }
        else
        {
            // Copy the raw image data into our target texture
            imageArray = ConvertAndShowOnDebugPane(photoCaptureFrame);
        }

        string data      = System.Convert.ToBase64String(imageArray);
        string pictureID = System.Guid.NewGuid().ToString();

        Log("sending picture via MQTT");
        Log("ID: " + pictureID);
        mqttHelper.Publish(outboundTopic, "{\"ID\":\"" + pictureID + "\",\"image\":\"" + data + "\"}");
    }
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            Debug.Log("Success. Changing things now!");
            List <byte> imageBufferList = new List <byte>();

            // Copy the raw IMFMediaBuffer data into our empty byte list.
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

            // In this example, we captured the image using the BGRA32 format.
            // So our stride will be 4 since we have a byte for each rgba channel.
            // The raw image data will also be flipped so we access our pixel data
            // in the reverse order.
            int          stride      = 4;
            float        denominator = 1.0f / 255.0f;
            List <Color> colorArray  = new List <Color>();
            for (int i = imageBufferList.Count - 1; i >= 0; i -= stride)
            {
                float a = (int)(imageBufferList[i - 0]) * denominator;
                float r = (int)(imageBufferList[i - 1]) * denominator;
                float g = (int)(imageBufferList[i - 2]) * denominator;
                float b = (int)(imageBufferList[i - 3]) * denominator;

                colorArray.Add(new Color(r, g, b, a));
            }


            // Now we could do something with the array such as texture.SetPixels() or run image processing on the list
            Debug.Log("Color Array" + r.ToString());

            int counter = 0;
            r = 0; g = 0; b = 0;
            for (int w = (width / 2) - 50; w < ((width) / 2) + 50; w++)
            {
                for (int hi = (height / 2) - 50; hi < ((height) / 2) + 50; hi++)
                {
                    int index = w * height + hi;
                    counter++;
                    r += colorArray[index].r;
                    g += colorArray[index].g;
                    b += colorArray[index].b;
                }
            }

            r /= counter;
            g /= counter;
            b /= counter;
            // Now we could do something with the array such as texture.SetPixels() or run image processing on the list

            //r /= counter;
            //g /= counter;
            //b /= counter;

            Color rgbColor;
            rgbColor = new Color(r, g, b, 1);

            //Color.RGBToHSV(rgbColor, out h, out s, out v);
            //Color32[] pix = targetTexture.GetPixels32();
            //for (int i = 0; i < pix.Length; i++)
            //{
            //    pix[i].r = (byte)(r * 255);
            //    pix[i].g = (byte)(g * 255);
            //    pix[i].b = (byte)(b * 255);

            //}
            Debug.Log("Current RGB = (" + r.ToString() + ", " + g.ToString() + ", " + b.ToString() + ")");
            //targetTexture.SetPixels32(pix);
            //targetTexture.Apply();
            //GetComponent<MeshRenderer>().material.mainTexture = targetTexture;
        }
        else
        {
            Debug.Log("Failed!! and F****d ");
        }
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 26
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
#if !UNITY_EDITOR
        if (result.success)
        {
            List <byte> imageBufferList = new List <byte>();
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

            Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
            int        imageWidth       = cameraResolution.width;
            int        imageHeight      = cameraResolution.height;

            ZXing.BarcodeReader qrReader = new ZXing.BarcodeReader();
            var qrResult = qrReader.Decode(imageBufferList.ToArray(), imageWidth, imageHeight, ZXing.BitmapFormat.RGBA32);

            if (qrReader == null)
            {
                Debug.Log("error: BarcodeReader.Decode");
                return;
            }

            Debug.Log(qrResult.Text);

            Matrix4x4 projectionMat;
            if (!photoCaptureFrame.TryGetProjectionMatrix(out projectionMat))
            {
                Debug.Log("error: PhotoCaptureFrame.TryGetProjectionMatrix");
                return;
            }

            Matrix4x4 cameraToWorldMat;
            if (!photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMat))
            {
                Debug.Log("error: PhotoCaptureFrame.TryGetCameraToWorldMatrix");
                return;
            }

            if (qrResult.ResultPoints.Length < 3)
            {
                Debug.Log("error: too few ResultPoints");
                return;
            }

            Vector3[] points = new Vector3[3];

            for (int i = 0; i < 3; ++i)
            {
                var pixelPos            = new Vector2(qrResult.ResultPoints[i].X, qrResult.ResultPoints[i].Y);
                var imagePosZeroToOne   = new Vector2(pixelPos.x / imageWidth, 1 - (pixelPos.y / imageHeight));
                var imagePosProjected   = (imagePosZeroToOne * 2) - new Vector2(1, 1);    // -1 to 1 space
                var cameraSpacePos      = UnProjectVector(projectionMat, new Vector3(imagePosProjected.x, imagePosProjected.y, 1));
                var worldSpaceRayPoint1 = cameraToWorldMat.MultiplyPoint(Vector3.zero);   // camera location in world space
                var worldSpaceRayPoint2 = cameraToWorldMat.MultiplyPoint(cameraSpacePos); // ray point in world space

                RaycastHit hit;
                if (!Physics.Raycast(worldSpaceRayPoint1, worldSpaceRayPoint2 - worldSpaceRayPoint1, out hit, 5, 1 << 31))
                {
                    Debug.Log("error: Physics.Raycast failed");
                    return;
                }

                points[i] = hit.point;
            }

            var worldTopLeft    = points[1];
            var worldTopRight   = points[2];
            var worldBottomLeft = points[0];

            var bottomToTop = worldTopLeft - worldBottomLeft;
            var leftToRight = worldTopRight - worldTopLeft;

            qrcodePlane.transform.forward  = bottomToTop;
            qrcodePlane.transform.position = worldBottomLeft + (bottomToTop + leftToRight) * 0.5f;
            plane.transform.localScale     = new Vector3(leftToRight.magnitude, 1, bottomToTop.magnitude);

            //photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
        }
#endif
    }
        private bool OnCaptureCompleted(PhotoCaptureFrame frame)
        {
            if (this.isProcessingImage)
            {
                return(true);
            }

            this.isProcessingImage = true;

            Matrix4x4 cameraToWorldMatrix;

            frame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
            cameraToWorldMatrix = cameraToWorldMatrix * Matrix4x4.Scale(new Vector3(1, 1, -1));

            // Update the preview image with the last image processed
            UpdatePreviewTexture(cameraImageBuffer);
            var res = PVCamManager.Instance.PhotoCapCamResolution;
            var w   = res.width;
            var h   = res.height;

            if (posterLocationHandler == null)
            {
                posterLocationHandler = new PosterLocationHandler(w, h);
            }

            Matrix4x4 cameraClipToWorld;
            Vector3   cameraPos;
            var       camProj = TuneProjectionMatrix(frame);

            GetCameraClipTransformAndWorldPosition(cameraToWorldMatrix, camProj,
                                                   out cameraClipToWorld, out cameraPos);

            posterLocationHandler.UpdateCamera(cameraToWorldMatrix, camProj, cameraPos);

            DoOnSeperateThread(() =>
            {
                // Allocate the buffers to hold the image data
                if ((cameraImageListBuffer == null) || (cameraImageListBuffer.Count != frame.dataLength))
                {
                    cameraImageListBuffer = new List <byte>(frame.dataLength);
                }
                if ((cameraImageBuffer == null) || (cameraImageBuffer.Length != frame.dataLength))
                {
                    cameraImageBuffer    = new byte[frame.dataLength];
                    cameraImageGCHandle  = GCHandle.Alloc(cameraImageBuffer, GCHandleType.Pinned);
                    cameraImagePtrToData = cameraImageGCHandle.AddrOfPinnedObject();
                }

                frame.CopyRawImageDataIntoBuffer(cameraImageListBuffer);
                // Copy the image data from the list into the byte array
                cameraImageListBuffer.CopyTo(cameraImageBuffer);

                PVCamManager.Instance.SignalFinishedWithFrame();

                // Find the poster:
                this.FindPoster(w, h);

                this.isProcessingImage = false;
            });

            return(false);
        }
Ejemplo n.º 28
0
        public CaptureFrameInstance(PhotoCaptureFrame managedFrame)
        {
            // build up image struct, copy it to unmanaged memory
            image_u8 temp = new image_u8();

            temp.width  = WebcamSystem.instance.cameraResolution.width;
            temp.height = WebcamSystem.instance.cameraResolution.height;
            // TASK: implement stride optimization in cases where shape is unoptimized
            // https://docs.microsoft.com/en-us/windows/win32/medfound/image-stride
            temp.stride = WebcamSystem.instance.cameraResolution.width;
            temp.buf    = Marshal.AllocHGlobal(temp.height * temp.stride);
            _bufPtr     = temp.buf; // stored for easy deallocation later

            // Obtain managed BGRA image bytes, allocate space for
            // one-fourth-sized grayscale image bytes (transformedImage)
            List <byte> managedBuffer = new List <byte>();

            managedFrame.CopyRawImageDataIntoBuffer(managedBuffer);
            byte[] transformedImg = new byte[temp.width * temp.height];


            // Do the transformation from BGRA to grayscale, using hardware acceleration if possible
            int  kernelHandle = WebcamSystem.instance.BGRAtoGrayscaleShader.FindKernel("ProcessImage");
            uint groupSize;

            WebcamSystem.instance.BGRAtoGrayscaleShader.GetKernelThreadGroupSizes(kernelHandle, out groupSize, out _, out _);
            if ((temp.width * temp.height) % groupSize == 0)  // TASK: implement stride optimization
            {
                lock (bufLock)
                {
                    if (_inputBuffer == null)
                    {
                        _inputBuffer = new ComputeBuffer(temp.width * temp.height, sizeof(uint));  // bgra values; 4 bytes per pixel
                    }
                    if (_outputBuffer == null)
                    {
                        _outputBuffer = new ComputeBuffer(temp.width * temp.height / 4, sizeof(uint));  // grayscale values; 1 byte per pixel
                    }

                    _inputBuffer.SetData(managedBuffer.ToArray());
                    WebcamSystem.instance.BGRAtoGrayscaleShader.SetBuffer(kernelHandle, "In", _inputBuffer);
                    WebcamSystem.instance.BGRAtoGrayscaleShader.SetBuffer(kernelHandle, "Out", _outputBuffer);
                    // each group processes groupSize "clumps" of four pixels
                    int threadGroupCount = temp.width * temp.height / (int)groupSize / 4;
                    WebcamSystem.instance.BGRAtoGrayscaleShader.Dispatch(kernelHandle, threadGroupCount, 1, 1);
                    _outputBuffer.GetData(transformedImg);
                }
            }
            else
            {
                Debug.LogError("Unusual resolution used-- cannot hardware accelerate! Defaulting to iterative approach...");

                ProcessImage(managedBuffer.ToArray(), temp.width, temp.height, transformedImg);
            }

            // Copy the processed image to unmanaged memory
            Marshal.Copy(transformedImg, 0, temp.buf, transformedImg.Length);
            // Allocate the unmanaged image struct
            unmanagedFrame = Marshal.AllocHGlobal(Marshal.SizeOf(temp));
            Marshal.StructureToPtr <image_u8>(temp, unmanagedFrame, false);
        }
Ejemplo n.º 29
0
    void OnCapturedPhotoToMemoryBYTE(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            List<byte> imageBufferList = new List<byte>();

            // Copy the raw IMFMediaBuffer data into our empty byte list.
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);


            // In this example, we captured the image using the BGRA32 format.
            // So our stride will be 4 since we have a byte for each rgba channel.
            // The raw image data will also be flipped so we access our pixel data
            // in the reverse order.
            int stride = 4;
            float denominator = 1.0f / 255.0f;
            List<Color> colorArray = new List<Color>();
            for (int i = imageBufferList.Count - 1; i >= 0; i -= stride)
            {
                float a = (int)(imageBufferList[i - 0]) * denominator;
                float r = (int)(imageBufferList[i - 1]) * denominator;
                float g = (int)(imageBufferList[i - 2]) * denominator;
                float b = (int)(imageBufferList[i - 3]) * denominator;

                colorArray.Add(new Color(r, g, b, a));
            }

            colorArray.Reverse();

            // Converting the List<byte> to a bute array
            byte[] imageBufferArray = imageBufferList.ToArray();

            // Now we could do something with the array such as texture.SetPixels() or run image processing on the list

//#if !UNITY_EDITOR
        // create a barcode reader instance
        BarcodeReader barcodeReader = new BarcodeReader { AutoRotate = true };

        // Here use the Decoder class to decode the Qr code
        //Decoder.DecodeFromByteArray(imageBufferArray, cameraWidth, cameraHeight);

         try
     {
        // Locatable camera from the HoloLens use BGRA32 format in the MRCManager class 
        Result TextResult = barcodeReader.Decode(imageBufferArray,w,h, RGBLuminanceSource.BitmapFormat.BGRA32);
        //If QR code detected 
        if (TextResult != null)
        {
            Debug.Log("Result decoding: " + TextResult.Text);
                    Textd.text = "Result decoding: " + TextResult.Text;
            //
            // Do what you want with the result here
            //
        }
        //If QR code not detected
        else
        {
                    Textd.text = "No QR code detected";
                    Debug.Log("No QR code detected");
        }
                camTexture.Play();
            }
        //If error while decoding
     catch(System.Exception e)
     {
        Debug.Log("Exception:"+e);
     }





//#endif

        }
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }