Пример #1
0
 public void StopCamera()
 {
     if (cameraReady)
     {
         _photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
     }
 }
Пример #2
0
        void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            if (result.success)
            {
                // Create our Texture2D for use and set the correct resolution
                Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
                Texture2D  targetTexture    = new Texture2D(cameraResolution.width, cameraResolution.height);
                // Copy the raw image data into our target texture
                photoCaptureFrame.UploadImageDataToTexture(targetTexture);

                Result = Instantiate(Result, Camera.main.transform.position, Camera.main.transform.rotation);
                Result.GetComponent <Renderer>().material.mainTexture = targetTexture;

                // Do as we wish with the texture such as apply it to a material, etc.

                //Result.GetComponent<Renderer>().material.mainTexture = targetTexture;

                this.result = ToBase64DataUrl(targetTexture);
            }



            // Clean up
            photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
        }
Пример #3
0
 private void OnApplicationQuit()
 {
     if (photoCaptureObject != null)
     {
         photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
     }
 }
 void OnCapturedPhotoToDisk(PhotoCapture.PhotoCaptureResult result)
 {
     if (result.success)
     {
         Debug.Log("Saved Photo to disk!");
         photoCapture.StopPhotoModeAsync(OnStoppedPhotoMode);
     }
     else
     {
         Debug.Log("Failed to save Photo to disk");
     }
 }
Пример #5
0
    private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            // Create our Texture2D for use and set the correct resolution
            Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
            Texture2D  targetTexture    = new Texture2D(cameraResolution.width, cameraResolution.height);

            // Copy the raw image data into our target texture
            photoCaptureFrame.UploadImageDataToTexture(targetTexture);

            // encode as JPEG to send to cognitiva service api's
            var imageBytes = targetTexture.EncodeToJPG();

            // Get information for the image from cognitive services
            GetTagsAndFaces(imageBytes);
            ReadWords(imageBytes);
        }
        else
        {       // show error
            DiagnosticsPanelTyper.TypeText("DIAGNOSTIC\n**************\n\nFailed take picture.\nError: " + result.hResult);
            InfoPanelTyper.TypeText("ABORT");
        }
        // stop handling the picture
        _photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
 /// <summary>
 /// Stop the photo mode.
 /// </summary>
 public void StopCamera()
 {
     if (isReady)
     {
         capture.StopPhotoModeAsync(OnPhotoModeStopped);
     }
 }
Пример #7
0
    void onCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (!result.success)
        {
            Debug.LogError("Error CapturedPhotoToMemory");
            return;
        }

        // 撮影画像の取得
        List <byte> buffer = new List <byte>();

        photoCaptureFrame.CopyRawImageDataIntoBuffer(buffer);
        photoCapture.StopPhotoModeAsync(onStoppedPhotoMode);

        // QR照準内のみを切り取る
        List <byte> trimmedBuffer = trimmingQrSight(buffer, 4);

        // QR照準内の画像を保存
        Texture2D tex = createTexture(trimmedBuffer, cameraParameters.cameraResolutionWidth, cameraParameters.cameraResolutionHeight);

        saveToFile(tex);

        if (callback != null)
        {
            callback(new List <byte>(trimmedBuffer), cameraParameters.cameraResolutionWidth, cameraParameters.cameraResolutionHeight);
        }
    }
Пример #8
0
    public LocatableCameraController()
    {
        _keywordRecognizer = new KeywordRecognizer(_keywords);
        _keywordRecognizer.OnPhraseRecognized += OnPhraseRecognized;
        _keywordRecognizer.Start();

        _currentState = new ReactiveProperty <State>(State.NOT_IN_USE);
        _currentState.ObserveOn(Scheduler.MainThread).SubscribeOn(Scheduler.MainThread).Subscribe(state =>
        {
            if (state == State.NOT_IN_USE)
            {
                _currentRequester = null;
            }
            else if (state == State.STARTING_PHOTO_MODE)
            {
                _photoCaptureObject.StartPhotoModeAsync(_cameraParameters, OnPhotoModeStarted);
            }
            else if (state == State.TAKING_PHOTO)
            {
                _photoCaptureObject.TakePhotoAsync(OnPhotoCapturedToMemory);
            }
            else if (state == State.STOPPING_PHOTO_MODE)
            {
                _photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
            }
        });

#if !UNITY_EDITOR
        PhotoCapture.CreateAsync(false, OnPhotoCaptureCreated);
#endif
    }
Пример #9
0
    void OnCapturedPhotoToDisk(PhotoCapture.PhotoCaptureResult result)
    {
        if (result.success)
        {
            Debug.Log("Saved Photo to disk!");
            photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);

            GameObject photoFrame = GameObject.CreatePrimitive(PrimitiveType.Quad);

            photoFrame.transform.position = Camera.main.transform.position + Camera.main.transform.forward * 3f;
            new Vector3(.5f, 0.3f, 1f);
            Texture2D texture  = new Texture2D(1, 1);
            Renderer  renderer = photoFrame.GetComponent <Renderer>();
            byte[]    imageData;
            imageData = System.IO.File.ReadAllBytes(filePath);
            texture.LoadImage(imageData);
            renderer.material.mainTexture = texture;

            Billboard b = photoFrame.AddComponent <Billboard>();
            //b.PivotAxis = PivotAxis.Y;

            //TapToPlace t = photoFrame.AddComponent<TapToPlace>();
        }
        else
        {
            Debug.Log("Failed to save Photo to disk");
        }
    }
 private void OnPhotoModeStarted(PhotoCapture.PhotoCaptureResult result)
 {
     if (result.success)
     {
         capturing = true
                     while (capturing)
         {
             try
             {
                 photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory);
             }
             catch (Exception e)
             {
                 // Do something
                 capturing = false;
                 // Clean up
                 photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
             }
         }
     }
     else
     {
         Debug.LogError("Unable to start photo mode!");
     }
 }
 void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
 {
     //Debug.Log("OnCapturedPhotoToMemory: start");
     if (result.success)
     {
         //Debug.Log("OnCapturedPhotoToMemory: success");
         // Create our Texture2D for use and set the correct resolution
         Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
         //Debug.Log("OnCapturedPhotoToMemory: create texture");
         Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height);
         // Copy the raw image data into our target texture
         //Debug.Log("OnCapturedPhotoToMemory: upload image to texture");
         photoCaptureFrame.UploadImageDataToTexture(targetTexture);
         // Do as we wish with the texture such as apply it to a material, etc.
         Debug.Log("OnCapturedPhotoToMemory: put it to the material (adjusted): " + cameraResolution.width + " " + cameraResolution.height);
         Transform photo = transform.Find("PhotoDisplay");
         Debug.Log("OnCapturedPhotoToMemory: old size: " + photo.localScale.x + " " + photo.localScale.y + " " + photo.localScale.z);
         Debug.Log("OnCapturedPhotoToMemory: old pos: " + photo.localPosition.x + " " + photo.localPosition.y + " " + photo.localPosition.z);
         photo.localScale    = new Vector3((cameraResolution.width / 100), (cameraResolution.height / 100), photo.localScale.z);
         photo.localPosition = new Vector3(cameraResolution.width / 200, cameraResolution.height / 200, photo.localPosition.z);
         Debug.Log("OnCapturedPhotoToMemory: new size: " + photo.localScale.x + " " + photo.localScale.y + " " + photo.localScale.z);
         Debug.Log("OnCapturedPhotoToMemory: new pos: " + photo.localPosition.x + " " + photo.localPosition.y + " " + photo.localPosition.z);
         photo.gameObject.GetComponent <Renderer>().material.mainTexture = targetTexture;
         photo.transform.Rotate(new Vector3(0, 0, 180));
     }
     // Clean up
     Debug.Log("OnCapturedPhotoToMemory: clean up");
     photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
 }
        void OnProcessFrame(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            UnityEngine.Debug.Log("++OnProcessFrame");
            if (result.success)
            {
                if (!Const.LOAD_IMAGES)
                {
                    List <byte> imageBufferList = new List <byte>();
                    // Copy the raw IMFMediaBuffer data into our empty byte list.
                    photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

                    photoCaptureFrame.TryGetCameraToWorldMatrix(out _cameraToWorldMatrix);
                    photoCaptureFrame.TryGetProjectionMatrix(out _projectionMatrix);
                    //UnityEngine.Debug.Log(cameraToWorldMatrix);

                    photoCaptureFrame.Dispose();

                    _imageDataRaw   = imageBufferList.ToArray();
                    _frameReadyFlag = true;
                }
            }
            if (Const.HOLO_CAPTURE)
            {
                _photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoModeHOLO);
            }
            else
            {
                _isCapturing = false;
            }
        }
Пример #13
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            Debug.Log("OnCapturedPhotoToMemory = " + result.success);
            List <byte> buffer = new List <byte>();
            // Create our Texture2D for use and set the correct resolution
            Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width / 8 * res.height / 8).First();
            Texture2D  targetTexture    = new Texture2D(cameraResolution.width, cameraResolution.height);
            // Copy the raw image data into our target texture
            photoCaptureFrame.UploadImageDataToTexture(targetTexture);
            Renderer renderer = GameObject.FindGameObjectWithTag("DisplayCube").GetComponent <Renderer>();
            renderer.material.mainTexture = targetTexture;
            Debug.Log("Photo Uploaded to Texture");

            Matrix4x4 cameraToWorldMatrix;

            photoCaptureFrame.CopyRawImageDataIntoBuffer(buffer);
            Debug.Log("Raw Image copied into buffer");
            //Check if we can receive the position where the photo was taken
            if (!photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix))
            {
                return;
            }
            Debug.Log("past if");

            //Start a coroutine to handle the server request
            StartCoroutine(UploadAndHandlePhoto(buffer.ToArray(), cameraToWorldMatrix));

            Debug.Log("Photo saved to texture");
        }
        // Clean up
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Пример #14
0
    /// <summary>
    /// Register the full execution of the Photo Capture. If successful, it will begin
    /// the Image Analysis process.
    /// </summary>
    void OnCapturedPhotoToDisk(PhotoCapture.PhotoCaptureResult result)
    {
        // Call StopPhotoMode once the image has successfully captured
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);

        // Debug.Log("on captured photo to disk");
    }
Пример #15
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        // Copy the raw image data into the target texture
        photoCaptureFrame.UploadImageDataToTexture(targetTexture);

        // Create a GameObject to which the texture can be applied
        Renderer quadRenderer = gameObject.GetComponent <Renderer>() as Renderer;

        gameObject.transform.Rotate(Vector3.up * Time.deltaTime * 15f);

        quadRenderer.material.SetTexture("_MainTex", targetTexture);

//        int width = Screen.width;
//        int height = Screen.height;
//
//        targetTexture.ReadPixels(new Rect(0, 0, width, height), 0, 0);
//
//        targetTexture.Apply();
//
//        byte[] bytes = targetTexture.EncodeToPNG();

//        StartCoroutine(upload(bytes));

        // Deactivate the camera
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Пример #16
0
 void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
 {
     this.debug.PrintPlus("in oncapturephototomemory");
     if (result.success)
     {
         // Create our Texture2D for use and set the correct resolution
         Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
         //create new texture object
         Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height);
         // Copy the raw image data into our target texture
         photoCaptureFrame.UploadImageDataToTexture(targetTexture);
         //stop the photo mode
         //photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
         //set the new texture as the sourceimage of the image processor object
         this.debug.PrintPlus("setting imageprocessor texture");
         this._imageProcessor._sourceImage = targetTexture;
         //this.capturedImage = this.targetTexture;
         //this.debug.Print("capturedphoto");
     }
     else
     {
         //an error occurred, so we need to reset processing
         this.debug.PrintPlus("could not capture photo...resetting");
         this.ResetProcessing();
     }
     // finally implement clean up operations
     photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
 }
Пример #17
0
    /*
     * private void OnPhotoModeStarted(PhotoCapture.PhotoCaptureResult result)
     * {
     * if(result.success)
     * {
     *   string filename = string.Format(@"CapturedImage{0}_n.jpg", Time.time);
     *   string filepath = System.IO.Path.Combine(Application.persistentDataPath, filename);
     *   photoCaptureObject.TakePhotoAsync(filepath, PhotoCaptureFileOutputFormat.JPG,OnCapturedPhotoToDisk);
     *   Debug.Log("TakePhoto Succeed!"+filepath);
     * }
     * else
     * {
     *   Debug.LogError("Unable to start photo mode!");
     * }
     *
     * }
     *
     * void OnCapturedPhotoToDisk(PhotoCapture.PhotoCaptureResult result)
     * {
     * if(result.success)
     * {
     *   Debug.Log("Saved Photo to Disk!");
     *   photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
     * }
     * else
     * {
     *   Debug.Log("Failed to save photo to disk!");
     * }
     * }
     */



    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
        worldToCameraMatrix = cameraToWorldMatrix.inverse;
        photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);

        photoCaptureFrame.UploadImageDataToTexture(targetTexture);

        GameObject quad         = GameObject.CreatePrimitive(PrimitiveType.Quad);
        Renderer   quadRenderer = quad.GetComponent <Renderer>() as Renderer;

        quadRenderer.material = new Material(Shader.Find("Unlit/Texture"));


        Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));
        Vector3    position = cameraToWorldMatrix.MultiplyPoint(Vector3.zero);

        Debug.Log("cameraToWorldMatrix: " + cameraToWorldMatrix);
        Debug.Log("Camera Position in World: " + position);

        quad.transform.parent = this.transform;
        //转化为面向用户这一步在Unity Editor出错,即无法设定为相机朝向的反向,在HoloLens上有待尝试
        //quad.transform.position = position;
        //quad.transform.rotation = rotation;
        quad.transform.localPosition = new Vector3(0.0f, 0.0f, 0.1f);
        quad.transform.rotation      = this.transform.rotation;

        quadRenderer.material.SetTexture("_MainTex", targetTexture);

        photoCaptureObj.StopPhotoModeAsync(OnStoppedPhotoMode);

        Debug.Log("Capture Photo to Memory Succeed!");
    }
        /// <summary>
        /// <see cref="PhotoCapture.StopPhotoModeAsync(OnPhotoModeStoppedCallback)"/> as a task.
        /// </summary>
        /// <param name="camera">
        /// The <see cref="PhotoCapture"/> camera.
        /// </param>
        /// <returns>
        /// A <see cref="Task"/> that represents the operation.
        /// </returns>
        static public Task <PhotoCaptureResult> StopPhotoModeAsync(this PhotoCapture camera)
        {
            // Validate
            if (camera == null)
            {
                throw new ArgumentNullException(nameof(camera));
            }

            // Create a completion source
            var tcs = new TaskCompletionSource <PhotoCaptureResult>();

            // Start the callback version
            camera.StopPhotoModeAsync(stopResult =>
            {
                if (stopResult.success)
                {
                    tcs.SetResult(stopResult);
                }
                else
                {
                    tcs.SetException(Marshal.GetExceptionForHR((int)stopResult.hResult));
                }
            });

            // Return the running task from the completion source
            return(tcs.Task);
        }
Пример #19
0
    //Get the image, pose of camera
    async void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            //Debug.Log("\n Saving picture \n");
            List <byte> imageBufferList = new List <byte>();

            // Copy the raw IMFMediaBuffer data into our empty byte list.
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

            photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
            photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);

            Vector3    position = cameraToWorldMatrix.MultiplyPoint(Vector3.zero);
            Quaternion rotation = Quaternion.LookRotation(cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));


            captureCameras.Add(new CameraItem(position, rotation));

            c.NewMesh(position, rotation);

            UploadImage(imageBufferList.ToArray());

            /*if ( sceneId > 0)
             * {
             * UploadImageToScene(imageBufferList.ToArray(), sceneId);
             * }*/
        }
        // Clean up
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Пример #20
0
        /// <summary>
        /// Raises the destroy event.
        /// </summary>
        void OnDestroy()
        {
            if (m_PhotoCaptureObj != null)
            {
                m_PhotoCaptureObj.StopPhotoModeAsync(OnStopPhotoMode);
            }

            if (m_GestureRecognizer != null && m_GestureRecognizer.IsCapturingGestures())
            {
                m_GestureRecognizer.StopCapturingGestures();
                #if UNITY_2017_2_OR_NEWER
                m_GestureRecognizer.Tapped -= OnTappedEvent;
                #else
                m_GestureRecognizer.TappedEvent -= OnTappedEvent;
                #endif
                m_GestureRecognizer.Dispose();
            }

            if (rgbaMat != null)
            {
                rgbaMat.Dispose();
            }

            if (grayMat != null)
            {
                grayMat.Dispose();
            }

            if (cascade != null)
            {
                cascade.Dispose();
            }
        }
Пример #21
0
    private static void OnCapturePhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (!result.success)
        {
            return;
        }

        photoCaptureFrame.UploadImageDataToTexture(capturedTexture);

        Message message = new Message()
        {
            hmdPosition  = GetHMDPosition(),
            hmdRotation  = GetHMDRotation(),
            imageRawData = capturedTexture.EncodeToJPG(100)
        };

        if (Client.Instance != null && Client.Instance.IsServerRunning())
        {
            Client.Instance.SendToServer(message);
        }

        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);

        Capturing = false;
    }
Пример #22
0
    /// <summary>
    /// 照片拍摄完成,获取拍摄的照片,调用Custom Vision API,对图片进行分析
    /// </summary>
    /// <param name="result">拍照的结果</param>
    /// <param name="photoCaptureFrame">拍摄的图片</param>
    private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            audioSource.Stop();
            audioSource.clip = captureAudioClip;
            audioSource.Play();

            ModelManager.Instance.SetPhotoImageActive(true);
            ModelManager.Instance.SetTipText("editing...");

            ToolManager.Instance.ShowMenu();
            currentStatus = CurrentStatus.EdittingPhoto;

            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);
            imageBufferList = FlipVertical(imageBufferList, cameraParameters.cameraResolutionWidth, cameraParameters.cameraResolutionHeight, 4);

            Texture2D targetTexture = CreateTexture(imageBufferList, cameraParameters.cameraResolutionWidth, cameraParameters.cameraResolutionHeight);
            Sprite    sprite        = Sprite.Create(targetTexture, new Rect(0, 0, targetTexture.width, targetTexture.height), new Vector2(0.5f, 0.5f));

            ModelManager.Instance.SetPhotoImage(sprite);
        }
        else
        {
            audioSource.Stop();
            audioSource.clip = failedAudioClip;
            audioSource.Play();

            currentStatus = CurrentStatus.Ready;
            ModelManager.Instance.SetTipText("air tap to take a photo");
        }
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Пример #23
0
 void OnCapturePhotoToDisk(PhotoCapture.PhotoCaptureResult result)
 {
     if (photoCaptureObject != null)
     {
         photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
     }
 }
    private static void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        VuforiaBehaviour.Instance.enabled = true;
        List <byte> imageBufferList = new List <byte>();

        // Copy the raw IMFMediaBuffer data into our empty byte list.
        photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);
        byte[] bytes = imageBufferList.ToArray();

        // Populate image info
        SetImageLabel setImageLabel = GameObject.Find("txtImageInfo").gameObject.GetComponent <SetImageLabel>();

        setImageLabel.image = bytes;

        // Show snapshhot
        UnityEngine.UI.Image imgSnapshot = GameObject.Find("imgSnapshot").gameObject.GetComponent <UnityEngine.UI.Image>();
        Texture2D            texture2D   = new Texture2D(2, 2, TextureFormat.RGBA32, false);

        texture2D.LoadImage(bytes);
        Sprite sprite = Sprite.Create(texture2D, new Rect(0, 0, texture2D.width, texture2D.height), new Vector2(1.0f, 1.0f));

        imgSnapshot.sprite = sprite;

        // Deactivate our camera
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
        /// <summary>
        /// Raises the destroy event.
        /// </summary>
        void OnDestroy()
        {
            if (m_PhotoCaptureObj != null)
            {
                m_PhotoCaptureObj.StopPhotoModeAsync(OnStopPhotoMode);
            }

            if (m_GestureRecognizer != null && m_GestureRecognizer.IsCapturingGestures())
            {
                m_GestureRecognizer.StopCapturingGestures();
                m_GestureRecognizer.TappedEvent -= OnTappedEvent;
                m_GestureRecognizer.Dispose();
            }

            if (rgbaMat != null)
            {
                rgbaMat.Dispose();
            }

            if (grayMat != null)
            {
                grayMat.Dispose();
            }

            if (cascade != null)
            {
                cascade.Dispose();
            }
        }
Пример #26
0
    /// <summary>
    /// TakePhotoAsync  callback. Here we can verify the completed photo, and we are stopping the photo mode.
    /// </summary>
    /// <param name="result"></param>
    /// <param name="photoCaptureFrame"></param>
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        Debug.Log("8. TakePhotoAsync completed");

        if (!VerifyPhotoResult(result))
        {
            Debug.LogError("VerifyPhotoResult is incorrect");
            return;
        }
        Debug.Log("9. Photo result verified");

        try
        {
            // Copy the raw image data into the target texture
            photoCaptureFrame.UploadImageDataToTexture(_targetTexture);
            Debug.Log("10. Uploaded ImageDataToTexture");


            byte[] imagePngBytes = _targetTexture.EncodeToPNG();
            Debug.Log("11. targetTexture.EncodeToPNG");

            // here you can write the imagePngBytes into a file if you want

            Debug.Log("14. Start StopPhotoModeAsync");
            // Deactivate the camera
            _photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
        }
        catch (Exception e)
        {
            Debug.LogError($"EXCEPTION: OnCapturedPhotoToMemory, {e.Message}");
        }
    }
Пример #27
0
 /*
  * private void OnPhotoModeStarted(PhotoCapture.PhotoCaptureResult result)
  * {
  *  if (result.success)
  *  {
  *      string filename = string.Format(@"CapturedImage{0}_n.jpg", Time.time);
  *      string filePath = System.IO.Path.Combine(Application.persistentDataPath, filename);
  *
  *
  *
  *      photoCaptureObject.TakePhotoAsync(filePath, PhotoCaptureFileOutputFormat.JPG, OnCapturedPhotoToDisk);
  *      gui.text = "Picture saved to:" + filePath;
  *  }
  *  else
  *  {
  *      Debug.LogError("Unable to start photo mode!");
  *      gui.text = "Unable to start photo mode!";
  *  }
  * }*/
 /*
  * void OnCapturedPhotoToDisk(PhotoCapture.PhotoCaptureResult result)
  * {
  *  if (result.success)
  *  {
  *      Debug.Log("Saved Photo to disk!");
  *      photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
  *  }
  *  else
  *  {
  *      Debug.Log("Failed to save Photo to disk");
  *      gui.text = "Failed to save Photo to disk";
  *  }
  * }
  */
 void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
 {
     if (result.success)
     {
         /* List<byte> imageBufferList = new List<byte>();
          * // Copy the raw IMFMediaBuffer data into our empty byte list.
          * photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);
          *
          * // In this example, we captured the image using the BGRA32 format.
          * // So our stride will be 4 since we have a byte for each rgba channel.
          * // The raw image data will also be flipped so we access our pixel data
          * // in the reverse order.
          * int stride = 4;
          * float denominator = 1.0f / 255.0f;
          * List<Color> colorArray = new List<Color>();
          * for (int i = imageBufferList.Count - 1; i >= 0; i -= stride)
          * {
          *   float a = (int)(imageBufferList[i - 0]) * denominator;
          *   float r = (int)(imageBufferList[i - 1]) * denominator;
          *   float g = (int)(imageBufferList[i - 2]) * denominator;
          *   float b = (int)(imageBufferList[i - 3]) * denominator;
          *
          *   colorArray.Add(new Color(r, g, b, a));
          * }*/
         gui.text = "Do Something";
         // Now we could do something with the array such as texture.SetPixels() or run image processing on the list
         photoCaptureFrame.UploadImageDataToTexture(ImageTexture);
         mat.mainTexture = ImageTexture;
     }
     photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
 }
Пример #28
0
        private void OnPhotoCapturedToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            if (result.success)
            {
                photoCaptureFrame.UploadImageDataToTexture(_targetTexture);

                var picture = _targetTexture.EncodeToJPG();

                //_httpRequestService.Post(PhotoUploadUrl, picture);

                StartCoroutine(UploadPhoto(picture));

                //ShowText(string.Format("Photo uploaded to {0}", ParametrizedImageUploadUrl));
                _capturedPhotoObject.StopPhotoModeAsync(OnPhotoModeStopped);
            }
        }
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            Debug.Log("photo captured");
            List <byte> imageBufferList = new List <byte>();
            // Copy the raw IMFMediaBuffer data into our empty byte list.
            photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

            var cameraToWorldMatrix = new Matrix4x4();
            photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);

            cameraPosition = cameraToWorldMatrix.MultiplyPoint3x4(new Vector3(0, 0, -1));
            cameraRotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

            Matrix4x4 projectionMatrix;
            photoCaptureFrame.TryGetProjectionMatrix(Camera.main.nearClipPlane, Camera.main.farClipPlane, out projectionMatrix);
            Matrix4x4 pixelToCameraMatrix = projectionMatrix.inverse;

            status.GetComponent <TextMesh>().text = "photo captured, processing...";
            status.transform.position             = cameraPosition;
            status.transform.rotation             = cameraRotation;

            StartCoroutine(PostToFaceAPI(imageBufferList.ToArray(), cameraToWorldMatrix, pixelToCameraMatrix));
        }
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Пример #30
0
        private void ReleaseCamera()
        {
            if (PhotoCap == null || asyncCameraReleaseStarted)
            {
                return;
            }

            asyncCameraReleaseStarted = true;

            PhotoCap.StopPhotoModeAsync((PhotoCapture.PhotoCaptureResult result) =>
            {
                asyncCameraReleaseStarted = false;

                if (result.success)
                {
                    PhotoCap.Dispose();
                    PhotoCap                = null;
                    cameraReady             = false;
                    asyncCameraSetupStarted = false;
                    this.releaseAttempts    = 0;
                }
                else
                {
                    if (this.releaseAttempts <= this.maxReleaseAttempts)
                    {
                        this.RequestReleaseCamera();
                    }

                    ++this.releaseAttempts;
                }
            });
        }