Ejemplo n.º 1
0
    /// <summary>
    /// Put photo into RAM
    /// </summary>
    private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            // Convert image into image bytes
            Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height);                               // Create our Texture2D for use and set the correct resolution
            photoCaptureFrame.UploadImageDataToTexture(targetTexture);                                                              // Copy the raw image data into our target texture, then convert to byte array
            byte[] imageBytes = targetTexture.EncodeToPNG();

            StartCoroutine(GetComponent <ApiManager>().GoogleRequest(imageBytes));                                                  // Begin Google API call

            if (SettingsManager.OCRSetting == OCRRunSetting.Manual)                                                                 // Stop PhotoMode if in manual mode (restart on another call)
            {
                GetComponent <CameraManager>().StopPhotoMode();
            }
        }
    }
Ejemplo n.º 2
0
    //void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    //{
    //    if (result.success)
    //    {
    //        // Create our Texture2D for use and set the correct resolution
    //        Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
    //        Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height);
    //        // Copy the raw image data into our target texture
    //        photoCaptureFrame.UploadImageDataToTexture(targetTexture);
    //        // Do as we wish with the texture such as apply it to a material, etc.

    //        _outputMaterial.mainTexture = targetTexture;
    //    }
    //    // Clean up
    //    photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    //}

    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            List <byte> imageBufferList = new List <byte>();
            // Copy the raw IMFMediaBuffer data into our empty byte list.
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

            //_visionApiTest.QueryVisionApi("http://www.technewscentral.co.uk/wp-content/uploads/2015/07/sony-tablets1-hands2-lg.jpg"); //tablet
            //_visionApiTest.QueryVisionApi("https://pixabay.com/static/uploads/photo/2012/04/01/18/38/television-23936_960_720.png"); //tv
            //_visionApiTest.QueryVisionApi("https://i5.walmartimages.com/dfw/4ff9c6c9-9356/k2-_c59a878c-3d51-4807-aabd-84b0410de921.v1.jpg"); //phone
            //_visionApiTest.QueryVisionApi("http://core0.staticworld.net/images/article/2015/02/hp-spectre-x360_beauty-100570598-orig.jpg"); //laptop
            _visionApiTest.QueryVisionApi(imageBufferList.ToArray());
        }

        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 3
0
    private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            // Custom Visionに送るimageBufferListにメモリ上の画像をコピーする
            List <byte> imageBufferList = new List <byte>();
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

            //ここはデバッグ用 送信画像の出力。どんな画像が取れたのか確認したい場合に使用。邪魔ならphotoPanelごと消してもよい。
            Texture2D debugTexture = new Texture2D(100, 100);
            debugTexture.LoadImage(imageBufferList.ToArray());
            photoPanel.texture = debugTexture;

            StartCoroutine(PostToCustomVisionAPI(imageBufferList.ToArray()));
        }
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 4
0
    /// <summary>
    /// 照片拍摄完成,获取拍摄的照片,调用Custom Vision API,对图片进行分析
    /// </summary>
    /// <param name="result">拍照的结果</param>
    /// <param name="photoCaptureFrame">拍摄的图片</param>
    private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            audioSource.Stop();
            audioSource.clip = captureAudioClip;
            audioSource.Play();

            ModelManager.Instance.SetPhotoImageActive(true);
            ModelManager.Instance.SetWaitingCanvas(false);
            ModelManager.Instance.SetTipText("正在处理中...");
            if (ConfigurationManager.Instance.GetMode() == CurrentMode.EdittingMode)
            {
                ToolManager.Instance.ShowMenu();
                currentStatus = CurrentStatus.EdittingPhoto;
            }

            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);
            imageBufferList = FlipVertical(imageBufferList, cameraParameters.cameraResolutionWidth, cameraParameters.cameraResolutionHeight, 4);

            Texture2D targetTexture = CreateTexture(imageBufferList, cameraParameters.cameraResolutionWidth, cameraParameters.cameraResolutionHeight);
            Sprite    sprite        = Sprite.Create(targetTexture, new Rect(0, 0, targetTexture.width, targetTexture.height), new Vector2(0.5f, 0.5f));

            ModelManager.Instance.SetPhotoImage(sprite);

            if (ConfigurationManager.Instance.GetMode() == CurrentMode.SimpleMode)
            {
                StartCoroutine(PostToCustomVisionAPI(targetTexture));
            }
            else
            {
                ModelManager.Instance.PlayAnimation("ShowAnimation");
            }
        }
        else
        {
            audioSource.Stop();
            audioSource.clip = failedAudioClip;
            audioSource.Play();

            currentStatus = CurrentStatus.Ready;
            ModelManager.Instance.SetTipText("点击进行拍照");
            ModelManager.Instance.PlayAnimation("IdleAnimation");
        }
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 5
0
    /// <summary>
    /// 照相完毕后调用
    /// </summary>
    /// <param name="result">结果</param>
    /// <param name="photoCaptureFrame">帧</param>
    private void OnProcessFrame(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        Debug.Log("OnProcessFrame");
        if (result.success)
        {
            List <byte> imageBufferList = new List <byte>();
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);
            photoCaptureFrame.TryGetCameraToWorldMatrix(out martrix_camera_to_world);
            photoCaptureFrame.TryGetProjectionMatrix(out martrix_projection);
            //photoCaptureFrame.Dispose();
#if WINDOWS_UWP
            SendData(imageBufferList.ToArray());
            //ShowHoloGrams = !ShowHoloGrams;
#endif
            photo_capture.TakePhotoAsync(OnProcessFrame);
        }
    }
Ejemplo n.º 6
0
 void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result,
                              PhotoCaptureFrame photoCaptureFrame)
 {
     if (result.success)
     {
         // Create our Texture2D for use and set the correct resolution
         Resolution cameraResolution = PhotoCapture.SupportedResolutions
                                       .OrderByDescending((res) => res.width * res.height).First();
         Texture2D targetTexture = new Texture2D(cameraResolution.width,
                                                 cameraResolution.height);
         // Copy the raw image data into our target texture
         photoCaptureFrame.UploadImageDataToTexture(targetTexture);
         // Do as we wish with the texture such as apply it to a material, etc.
     }
     // Clean up
     photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
 }
Ejemplo n.º 7
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        // Copy the raw image data into the target texture
        photoCaptureFrame.UploadImageDataToTexture(targetTexture);

        // Create a GameObject to which the texture can be applied

        Renderer quadRenderer = quad.GetComponent <Renderer>() as Renderer;


        quadRenderer.material.SetTexture("_MainTex", targetTexture);

        // Deactivate the camera
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
        imgData = ImageConversion.EncodeToJPG(targetTexture);
        StartCoroutine(PostImageToServer());
    }
        void OnProcessFrame(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            UnityEngine.Debug.Log("++OnProcessFrame");
            if (result.success)
            {
                if (!Const.LOAD_IMAGES)
                {
                    List <byte> imageBufferList = new List <byte>();
                    // Copy the raw IMFMediaBuffer data into our empty byte list.
                    photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

                    photoCaptureFrame.TryGetCameraToWorldMatrix(out _cameraToWorldMatrix);
                    photoCaptureFrame.TryGetProjectionMatrix(out _projectionMatrix);
                    //UnityEngine.Debug.Log(cameraToWorldMatrix);

                    photoCaptureFrame.Dispose();

                    _imageDataRaw   = imageBufferList.ToArray();
                    _frameReadyFlag = true;
                }
                else
                {
                    /*
                     * _indexImageFile = (int)(frameID % _imageFiles.LongCount());
                     * using (IRandomAccessStreamWithContentType stream = await _imageFiles[_indexImageFile].OpenReadAsync())
                     * {
                     *  imageData = new byte[stream.Size];
                     *  using (DataReader reader = new DataReader(stream))
                     *  {
                     *      await reader.LoadAsync((uint)stream.Size);
                     *      reader.ReadBytes(imageData);
                     *  }
                     * }
                     */
                }
            }
            if (Const.HOLO_CAPTURE)
            {
                _photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoModeHOLO);
            }
            else
            {
                _isCapturing = false;
            }
        }
Ejemplo n.º 9
0
 private void OnPhotoCapturedToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame frame)
 {
     if (result.success)
     {
         Debug.Log("Captured image to memory. Sending data to requester.");
         Matrix4x4 calibrationMatrix;
         frame.TryGetProjectionMatrix(out calibrationMatrix);
         List <byte> imageData = new List <byte>();
         frame.CopyRawImageDataIntoBuffer(imageData);
         _currentState.Value = State.READY_TO_TAKE_PHOTO;
         _currentRequester.ReceiveTakenPictureAsBytes(imageData, _cameraParameters.cameraResolutionWidth, _cameraParameters.cameraResolutionHeight, calibrationMatrix);
     }
     else
     {
         Debug.LogError("Failed to capture image to memory.");
         _currentState.Value = State.READY_TO_TAKE_PHOTO;
     }
 }
Ejemplo n.º 10
0
        void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            // Copy the raw image data into the target texture
            photoCaptureFrame.UploadImageDataToTexture(targetTexture);

            // Create a GameObject to which the texture can be applied
            //GameObject quad = GameObject.CreatePrimitive(PrimitiveType.Quad);
            //Renderer quadRenderer = quad.GetComponent<Renderer>() as Renderer;
            //quadRenderer.material = new Material(Shader.Find("Custom/Unlit/UnlitTexture"));

            //quad.transform.parent = this.transform;
            //quad.transform.localPosition = new Vector3(0.0f, 0.0f, 3.0f);

            //quadRenderer.material.SetTexture("_MainTex", targetTexture);

            // Deactivate the camera
            photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
        }
Ejemplo n.º 11
0
    //平滑化処理
    Texture2D ImageProgress(Texture2D src, PhotoCaptureFrame photoCaptureFrame, Resolution camera)
    {
        RenderTexture resultTex;

        resultTex = new RenderTexture(camera.width, camera.height, 0, RenderTextureFormat.ARGB32);
        resultTex.enableRandomWrite = true;
        resultTex.Create();

        var step1 = imgCS.FindKernel("imageprogress");

        imgCS.SetTexture(step1, "srcTexture", src);
        imgCS.SetTexture(step1, "Result", resultTex);
        imgCS.Dispatch(step1, src.width / 4, src.height / 4, 1);

        Texture2D texture = CreateTexture2D(resultTex);

        return(texture);
    }
Ejemplo n.º 12
0
    private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        Debug.Log(photoCaptureFrame.dataLength);
        Debug.Log(photoCaptureFrame.hasLocationData);

        var rgb = new List <byte>();

        photoCaptureFrame.CopyRawImageDataIntoBuffer(rgb);

        //var bytes = File.ReadAllBytes(@"C:\Users\asd14\Pictures\hololens.jpg");
        //var stream = new MemoryStream(bytes);
        var bytes = rgb.ToArray();

        UploadFile(bytes, "https://hologate.tav.cc/image");


        StartCoroutine("DoRequest");
    }
Ejemplo n.º 13
0
        /// <summary>
        /// Processes the received frame, converts the image to grayscale if requested, and invokes the next photo request.
        /// </summary>
        private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            if (_stopped?.Task != null)
            {
                return;
            }
            if (result.resultType == PhotoCapture.CaptureResultType.UnknownError)
            {
                return;
            }
            if (photoCaptureFrame == null)
            {
                return;
            }
            Size size = new Size(FrameWidth, (double)FrameHeight * 3 / 2); // Luminance (grayscale) of the NV12 format requires image height, chrominance is stored in half resolution. <see href="https://docs.microsoft.com/en-us/windows/win32/medfound/recommended-8-bit-yuv-formats-for-video-rendering#nv12"/>.

            _image = new Mat(size, CvType.CV_8UC1);
            List <byte> imageBuffer = new List <byte>();

            photoCaptureFrame?.CopyRawImageDataIntoBuffer(imageBuffer);
            MatUtils.copyToMat(imageBuffer.ToArray(), _image);

            if (_format == ColorFormat.Grayscale)
            {
                Imgproc.cvtColor(_image, _image, Imgproc.COLOR_YUV2GRAY_NV12);
            }

            Matrix4x4 cameraToWorldMatrix = Matrix4x4.identity;

            photoCaptureFrame?.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
            CameraExtrinsic extrinsic = new CameraExtrinsic(cameraToWorldMatrix);

            Matrix4x4 projectionMatrix = Matrix4x4.identity;

            photoCaptureFrame?.TryGetProjectionMatrix(out projectionMatrix);
            CameraIntrinsic intrinsic = new CameraIntrinsic(projectionMatrix);

            CameraFrame           cameraFrame = new CameraFrame(_image, intrinsic, extrinsic, FrameWidth, FrameHeight, FrameCount++, _format);
            FrameArrivedEventArgs args        = new FrameArrivedEventArgs(cameraFrame);

            FrameArrived?.Invoke(this, args);

            _photoCaptureObject?.TakePhotoAsync(OnCapturedPhotoToMemory);
        }
    // When screenshot is captured to memory
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            // play photo capture sound
            //Camera.main.GetComponent<AudioSource>().Play();

            // freeing up memory
            Texture.Destroy(_imageAsTextureTmp);

            // save photograph to texture
            _imageAsTextureTmp = new Texture2D(_cameraResolution.width, _cameraResolution.height);
            photoCaptureFrame.UploadImageDataToTexture(_imageAsTextureTmp);

            // position of camera/user at time of capturing screenshot
            photoCaptureFrame.TryGetCameraToWorldMatrix(out _cameraToWorldMatrixTmp);
            photoCaptureFrame.TryGetProjectionMatrix(out _projectionMatrixTmp);

            // measuring captured frames per second
            if (_lastTime == 0)
            {
                _lastTime = Time.time;
            }
            if (Time.time - _lastTime < 1.0f)
            {
                _photoCount++;
            }
            else
            {
                // Debug.LogError("Photos per s: " + _photoCount);
                _lastTime   = Time.time;
                _photoCount = 0;
            }

            // send event if there are subscribers
            var handler = ScreenshotTaken;
            if (handler != null)
            {
                handler.Invoke(this, new EventArgs());
            }
        }

        this._screenshotsTakeable = true;
    }
Ejemplo n.º 15
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            // Create our Texture2D for use and set the correct resolution
            Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
            Texture2D  targetTexture    = new Texture2D(cameraResolution.width, cameraResolution.height);
            // Copy the raw image data into our target texture
            photoCaptureFrame.UploadImageDataToTexture(targetTexture);

            byte[] PNGfile  = targetTexture.EncodeToPNG();
            string filePath = System.IO.Path.Combine(Application.persistentDataPath, "CapturedImage" + viewNumber + ".png");
            Debug.Log("!!!!!!!!!!!!!!!" + filePath);
            File.WriteAllBytes(filePath, PNGfile);//todo: enumerate

            Debug.Log("saved png");


            Matrix4x4 worldTrans;
            Matrix4x4 viewTrans;
            if (photoCaptureFrame.TryGetCameraToWorldMatrix(out worldTrans) && photoCaptureFrame.TryGetProjectionMatrix(out viewTrans))
            {
                filePath = System.IO.Path.Combine(Application.persistentDataPath, "CapturedImage" + viewNumber + ".png.matr");
                File.WriteAllText(filePath, worldTrans + "\n\n" + viewTrans);
                sendModule.addView(worldTrans, viewTrans, filePath);
            }
            else
            {
                Debug.LogError("failed to save matrices");
            }

            AudioSource[] clickSound = GetComponents <AudioSource>();
            clickSound[0].Play();

            if (viewNumber > numberOfPics)
            {
                captureModule.save      = true;
                captureModule.recording = false;
                takePhotos = false;
            }
        }
        // Clean up
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
        void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            if (result.success)
            {
                // Create our Texture2D for use and set the correct resolution
                Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
                Texture2D  targetTexture    = new Texture2D(cameraResolution.width, cameraResolution.height);
                // Copy the raw image data into our target texture
                photoCaptureFrame.UploadImageDataToTexture(targetTexture);
                // Do as we wish with the texture such as apply it to a material, etc.
                byte[] bytes;

                bytes = targetTexture.EncodeToPNG();
                string encodedImage = System.Convert.ToBase64String(bytes);
                CustomMessages.Instance.SendTexture(encodedImage);
            }
            // Clean up
            photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
        }
Ejemplo n.º 17
0
    private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        // Copy the raw image data into the target texture
        lock (imageBuffer)
        {
            imageBuffer.Clear();
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBuffer);
            byte[] vs = imageBuffer.ToArray();
            tex.LoadRawTextureData(vs);
            tex.Apply();
            rawImage.texture = tex;
            //IMPORTANT: Dispose the capture frame, or the app will crash after a while with access violation
            photoCaptureFrame.Dispose();
            firstScan = false;
        }

        // Deactivate the camera
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 18
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            // Create our Texture2D for use and set the correct resolution
            Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
            Texture2D  targetTexture    = new Texture2D(cameraResolution.width, cameraResolution.height);
            // Copy the raw image data into our target texture
            photoCaptureFrame.UploadImageDataToTexture(targetTexture);
            // Do as we wish with the texture such as apply it to a material, etc.
            ImageFrameObject.GetComponent <Renderer>().material.mainTexture = targetTexture;

            StartCoroutine(GetEmotionFromImages2(targetTexture.EncodeToJPG()));
            StartCoroutine(GetVisionData(targetTexture.EncodeToJPG()));
        }
        // Clean up
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
        swCamaraInUse = false;
    }
Ejemplo n.º 19
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        var photoBuffer = new List <byte>();

        if (photoCaptureFrame.pixelFormat == CapturePixelFormat.JPEG)
        {
            photoCaptureFrame.CopyRawImageDataIntoBuffer(photoBuffer);
        }
        else
        {
            photoBuffer = ConvertAndShowOnDebugPane(photoCaptureFrame);
        }

        Messenger.Instance.Broadcast(
            new PhotoCaptureMessage(photoBuffer, _cameraResolution, CopyCameraTransForm()));

        // Deactivate our camera
        _photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 20
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            DebugDisplay.Instance.Log("OnCapturedPhotoToMemory Copy Started ");

            imageBufferList = new List <byte>();
            // Copy the raw IMFMediaBuffer data into our empty byte list.
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);
            DebugDisplay.Instance.Log("OnCapturedPhotoToMemory " + imageBufferList.Count);
        }
        else
        {
            DebugDisplay.Instance.Log("Failed to save Photo to memory");
            photoReadyCallBack(false, imageBufferList);
        }

        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 21
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        // Copy the raw image data into the target texture
        photoCaptureFrame.UploadImageDataToTexture(targetTexture);
        photoCaptureFrame.CopyRawImageDataIntoBuffer(buffer);

        Color32[] pix   = targetTexture.GetPixels32();
        int       with  = targetTexture.width;
        int       hight = targetTexture.height;

        int index = MaxRedColor.MaxRedColors(pix);

        int lines  = (int)(index + 1) / 640;
        int stakes = index - (lines * 640);

        if (Variabless.First_x == -1)
        {
            Variabless.First_x = stakes;
            Variabless.First_y = lines;
            print(stakes);
        }
        else if (Variabless.Second_x == -1)
        {
            Variabless.Second_x = stakes;
            Variabless.Second_y = lines;
            print(stakes);
        }
        else if (Variabless.Third_x == -1)
        {
            Variabless.Third_x = stakes;
            Variabless.Third_y = lines;
            print(stakes);
        }
        else if (Variabless.Four_x == -1)
        {
            Variabless.Four_x = stakes;
            Variabless.Four_y = lines;
            print(stakes);
        }

        // Deactivate the camera
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
        void OnCapturedPhotoToMemory(NRPhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            // Copy the raw image data into our target texture
            photoCaptureFrame.UploadImageDataToTexture(targetTexture);

            // Create a gameobject that we can apply our texture to
            GameObject quad         = GameObject.CreatePrimitive(PrimitiveType.Quad);
            Renderer   quadRenderer = quad.GetComponent <Renderer>() as Renderer;

            quadRenderer.material = new Material(Resources.Load <Shader>("Record/Shaders/CaptureScreen"));

            var headTran = NRSessionManager.Instance.NRHMDPoseTracker.centerCamera.transform;

            quad.name = "picture";
            quad.transform.localPosition = headTran.position + headTran.forward * 3f;
            quad.transform.forward       = headTran.forward;
            quad.transform.localScale    = new Vector3(1.6f, 0.9f, 0);
            quadRenderer.material.SetTexture("_MainTex", targetTexture);
        }
Ejemplo n.º 23
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        // Copy the raw image data into our target texture
        photoCaptureFrame.UploadImageDataToTexture(targetTexture);

        // Create a gameobject that we can apply our texture to
        GameObject quad         = GameObject.CreatePrimitive(PrimitiveType.Quad);
        Renderer   quadRenderer = quad.GetComponent <Renderer>() as Renderer;

        quadRenderer.material = new Material(Shader.Find("Unlit/Texture"));

        quad.transform.parent        = this.transform;
        quad.transform.localPosition = new Vector3(0.0f, 0.0f, 3.0f);

        quadRenderer.material.SetTexture("_MainTex", targetTexture);

        // Deactivate our camera
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 24
0
        void OnPhotoCapturedCopyToBytes(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            Matrix4x4 cameraToWorldMatrix;

            photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);

            Matrix4x4 projectionMatrix;

            photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);

            List <byte> capturedImg = new List <byte>();

            photoCaptureFrame.CopyRawImageDataIntoBuffer(capturedImg);

            photoCaptureFrame.Dispose();

            _takePhotoActionCopyToBytes?.Invoke(cameraToWorldMatrix, projectionMatrix, capturedImg, _cameraParameters.cameraResolutionHeight, _cameraParameters.cameraResolutionWidth);
            CanTakePhoto = false;
            _takePhotoActionCopyToBytes = null;
        }
Ejemplo n.º 25
0
        void OnPhotoCapturedCopyToTexture(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            Matrix4x4 cameraToWorldMatrix;

            photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);

            Matrix4x4 projectionMatrix;

            photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);

            var texture = new Texture2D(_cameraParameters.cameraResolutionWidth, _cameraParameters.cameraResolutionHeight, TextureFormat.RGBA32, false);

            photoCaptureFrame.UploadImageDataToTexture(texture);
            texture.wrapMode = TextureWrapMode.Clamp;

            photoCaptureFrame.Dispose();

            _takePhotoActionCopyToTexture2D?.Invoke(cameraToWorldMatrix, projectionMatrix, texture);
            CanTakePhoto = false;
            _takePhotoActionCopyToTexture2D = null;
        }
Ejemplo n.º 26
0
    void OnCapturePhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        Matrix4x4   cameraToWorldMatrix;
        List <byte> buffer = new List <byte>();

        photoCaptureFrame.CopyRawImageDataIntoBuffer(buffer);

        //Check if we can receive the position where the photo was taken
        if (!photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix))
        {
            cameraToWorldMatrix = Matrix4x4.identity;
        }

        if (photoReady != null)
        {
            photoReady(id, buffer.ToArray(), cameraToWorldMatrix, cameraResolution);
        }

        // stop the photo mode
        photoCapture.StopPhotoModeAsync(OnPhotoModeStopped);
    }
Ejemplo n.º 27
0
    void OnCapturedPhotoToMemoryAsync(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            //将photoCaptureFrame转为List<byte>,再转为byte[].
            imageBufferList = new List <byte>();
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);
            //将拍摄内容保存到imageBufferArray中
            imageBufferArray = imageBufferList.ToArray();

            photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
            worldToCameraMatrix = cameraToWorldMatrix.inverse;
            photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);

            UnityEngine.Debug.LogFormat(@"The value of cameraToWorld Matrix: {0}{1}{2}{3} ", cameraToWorldMatrix.GetRow(0), cameraToWorldMatrix.GetRow(1), cameraToWorldMatrix.GetRow(2), cameraToWorldMatrix.GetRow(3));

            UnityEngine.Debug.Log("Captured Photo To Memory Succeed! ");
        }

        photoCaptureObj.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
    void Publish_to_ROS(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            memory = photoCaptureFrame;

            //画像をコピーする
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

            byte[] ROS_data = new byte[imageBufferList.Count];

            Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
            Texture2D  targetTexture    = new Texture2D(cameraResolution.width, cameraResolution.height);
            photoCaptureFrame.UploadImageDataToTexture(targetTexture);

            ROS_data = targetTexture.EncodeToJPG(qualityLevel);

            GameObject.Destroy(targetTexture);

            Publish_to_ROS(ROS_data);
        }
    }
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            List <byte> imageBufferList = new List <byte>();

            Debug.Log("OnCapturedPhotoToMemory Copy Started");

            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

            Debug.Log("OnCapturedPhotoToMemory " + imageBufferList.Count);

            //Execute OCR Coroutine
            ExecuteMCSComputerVisionOCR(imageBufferList, "ocr");
        }
        else
        {
            Debug.Log("Failed to save Photo to memory");
        }

        photoCapture.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Ejemplo n.º 30
0
        void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            //Matrix4x4 cameraToWorldMatrix;

            //photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
            //Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

            //Matrix4x4 projectionMatrix;
            //photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);

            //var texture = new Texture2D(this.cameraParameters.cameraResolutionWidth, this.cameraParameters.cameraResolutionHeight, TextureFormat.ARGB32, false);
            //photoCaptureFrame.UploadImageDataToTexture(texture);
            List <byte> byteses = new List <byte>();

            photoCaptureFrame.CopyRawImageDataIntoBuffer(byteses);
            TakeImageAction?.Invoke(byteses);
            //texture.wrapMode = TextureWrapMode.Clamp;
            photoCaptureFrame.Dispose();
            //texture.Compress(true);//ここでの圧縮はDXTフォーマットに圧縮するということ。
            Resources.UnloadUnusedAssets();
            isCapturingPhoto = false;
        }
Ejemplo n.º 31
0
 private static void InvokeOnCapturedPhotoToMemoryDelegate(OnCapturedToMemoryCallback callback, long hResult, IntPtr photoCaptureFramePtr)
 {
     PhotoCaptureFrame photoCaptureFrame = null;
     if (photoCaptureFramePtr != IntPtr.Zero)
     {
         photoCaptureFrame = new PhotoCaptureFrame(photoCaptureFramePtr);
     }
     callback(MakeCaptureResult(hResult), photoCaptureFrame);
 }