/// <summary>
        /// Called when a photo has been captured to memory, if successfull,
        /// it'll copy the photo to the target texture
        /// </summary>
        /// <param name="result">Result of the photo process</param>
        /// <param name="photoCaptureFrame">Contains the photo information</param>
        //private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result)
        private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            //Debug.Log("################### CameraCaptureHololens -> OnCapturedPhotoToMemory() -> start function!");
            if (result.success)
            {
                //Debug.Log("################### CameraCaptureHololens -> OnCapturedPhotoToMemory() -> success");
                if (targetTexture != null)
                {
                    Destroy(targetTexture);
                }

                targetTexture = new Texture2D(HorizontalCameraResolution, VerticalCameraResolution, TextureFormat.RGB24, false);
                // Copy the raw image data into our target texture
                photoCaptureFrame.UploadImageDataToTexture(targetTexture);

                if (OnFrameCapture != null)
                {
                    // delegated on MarkerDetectionHololens.ProcessImage()
                    OnFrameCapture(targetTexture.GetRawTextureData().ToList(), photoWidth, photoHeight);
                }
            }
            else
            {
                Debug.LogError("Failed to capturing image");
            }

            photoCaptureFrame.Dispose();

            photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory);

            //Debug.Log("################### CameraCaptureHololens -> OnCapturedPhotoToMemory() -> end function!");
        }
        /// <summary>
        /// Called when a photo has been captured to memory, if successfull,
        /// it'll copy the photo to the target texture
        /// </summary>
        /// <param name="result">Result of the photo process</param>
        /// <param name="photoCaptureFrame">Contains the photo information</param>
        private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            if (result.success)
            {
                if (targetTexture != null)
                {
                    Destroy(targetTexture);
                }

                targetTexture = new Texture2D(HorizontalCameraResolution, VerticalCameraResolution, TextureFormat.RGB24, false);
                // Copy the raw image data into our target texture
                photoCaptureFrame.UploadImageDataToTexture(targetTexture);

                if (OnFrameCapture != null)
                {
                    OnFrameCapture(targetTexture.GetRawTextureData().ToList(), photoWidth, photoHeight);
                }
            }
            else
            {
                Debug.LogError("Failed to capturing image");
            }

            photoCaptureFrame.Dispose();

            photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory);
        }
        void OnProcessFrame(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            UnityEngine.Debug.Log("++OnProcessFrame");
            if (result.success)
            {
                if (!Const.LOAD_IMAGES)
                {
                    List <byte> imageBufferList = new List <byte>();
                    // Copy the raw IMFMediaBuffer data into our empty byte list.
                    photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

                    photoCaptureFrame.TryGetCameraToWorldMatrix(out _cameraToWorldMatrix);
                    photoCaptureFrame.TryGetProjectionMatrix(out _projectionMatrix);
                    //UnityEngine.Debug.Log(cameraToWorldMatrix);

                    photoCaptureFrame.Dispose();

                    _imageDataRaw   = imageBufferList.ToArray();
                    _frameReadyFlag = true;
                }
            }
            if (Const.HOLO_CAPTURE)
            {
                _photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoModeHOLO);
            }
            else
            {
                _isCapturing = false;
            }
        }
Exemple #4
0
    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            // Create our Texture2D for use and set the correct resolution
            //Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
            Texture2D targetTexture = new Texture2D(textureWidth, textureHeight);//cameraResolution.width, cameraResolution.height);
            // Copy the raw image data into our target texture
            photoCaptureFrame.UploadImageDataToTexture(targetTexture);

            // Get view and projection matrices from camera, as well as position and lookVector
            Matrix4x4 inverseV;
            Matrix4x4 P;
            Matrix4x4 VP;
            bool      vSuccess         = photoCaptureFrame.TryGetCameraToWorldMatrix(out inverseV);
            bool      pSuccess         = photoCaptureFrame.TryGetProjectionMatrix(out P);
            Vector3   cameraWorldPos   = inverseV.MultiplyPoint(Vector3.zero);
            Vector3   cameraLookVector = inverseV.MultiplyVector(Vector3.forward);
            Debug.Log("RGB Camera View Matrix: " + (vSuccess ? "Found" : "NULL"));
            Debug.Log("RGB Camera Projection Matrix: " + (pSuccess ? "Found" : "NULL"));
            Debug.Log("RGB Camera Position: " + cameraWorldPos);
            Debug.Log("RGB Camera LookVector: " + cameraLookVector);

            //depthCamera.transform.position = (cameraWorldPos - Camera.main.gameObject.transform.position);
            //depthCamera.transform.LookAt(cameraWorldPos + cameraLookVector);
            //Debug.Log("Applied position and lookvector to depth camera");
            //camera.projectionMatrix = P;
            //Debug.Log("Applied projection matrix to depth camera");
            ////camera.worldToCameraMatrix = inverseV.inverse;
            //camera.Render();
            ////camera.RenderWithShader(Shader.Find("DepthOnly"), "depth");
            //Debug.Log("Rendered without depth shader");

            TakeScreenshot(targetTexture);
            Debug.Log("Snapshot Taken");
            // Update snapshot VP matrix before updating the shader
            if (vSuccess && pSuccess)
            {
                VP = P * inverseV.inverse;
                //Snapshot snapshot = snapshots[snapshots.Count - 1];
                //snapshot.vp = VP;
                //snapshot.position = new Vector4(cameraWorldPos.x, cameraWorldPos.y, cameraWorldPos.z, 1);
                //snapshots[snapshots.Count - 1] = snapshot;
                vpArray[numProjectors]    = VP;
                invVPArray[numProjectors] = (inverseV);
                posArray[numProjectors]   = new Vector4(cameraWorldPos.x, cameraWorldPos.y, cameraWorldPos.z, 1);
            }
            Debug.Log("Snapshot Updated");
            UpdateShader();
            Debug.Log("Updated Shader");

            // Free memory
            photoCaptureFrame.Dispose();
        }
        // Clean up
        //photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Exemple #5
0
        /// <summary>
        /// Grab worldToCamera matrix and projection matrix from the Hololens
        /// camera for correct texture placement, clip and save the texture as
        /// an image, then properly clean up (dispose of) the PhotoCaptureFrame
        /// storing relevant image data.
        /// </summary>
        /// <param name="result">
        /// Information about the success of taking the picture.
        /// </param>
        /// <param name="photoCaptureFrame">
        /// The information/image associated with the picture taken.
        /// </param>
        static void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            // After the first photo, we want to lock in the current exposure and white balance settings.
            if (lockCameraSettings && currentPhoto == 0)
            {
                //#if WINDOWS_UWP
#if WINDOWS_UWP
                //#if UNITY_WSA_10_0
                unsafe {
                    //This is how you access a COM object.
                    VideoDeviceController vdm = (VideoDeviceController)Marshal.GetObjectForIUnknown(photoCaptureObj.GetUnsafePointerToVideoDeviceController());
                    //Locks current exposure for all future images
                    vdm.ExposureControl.SetAutoAsync(false); //Figureout how to better handle the Async
                    //Locks the current WhiteBalance for all future images
                    vdm.WhiteBalanceControl.SetPresetAsync(ColorTemperaturePreset.Fluorescent);

                    //vdm.WhiteBalanceControl.SetPresetAsync(ColorTemperaturePreset.Manual);
                }
#endif
            }

            // Grab appropriate matrices, and write them to the local storage
            // arrays at the correct index
            Matrix4x4 worldToCameraMatrix;
            Matrix4x4 projectionMatrix;
            bool      matricesExtracted = ExtractCameraMatrices(photoCaptureFrame, out worldToCameraMatrix, out projectionMatrix);
            if (matricesExtracted)
            {
                WriteMatricesToArrays(worldToCameraMatrix, projectionMatrix, currentPhoto);

                // Set up local class texture to save as a picture/texture - Hololens camera requires BGRA32 format
                m_Texture = new Texture2D(m_CameraParameters.cameraResolutionWidth, m_CameraParameters.cameraResolutionHeight, TextureFormat.BGRA32, false);
                photoCaptureFrame.UploadImageDataToTexture(m_Texture);
                m_Texture          = ClipTexture(m_Texture);
                m_Texture.wrapMode = TextureWrapMode.Clamp;
                SaveTexture();

                // Reset displayed message to remind user how to take photos or end texturing process
                TextManager.SetText(Messages.PhotoPrompt + Messages.AppendNumPhotosTaken());
            }
            else
            {
                TextManager.SetText(Messages.MatrixFail);
            }
            // Clean up camera memory
            photoCaptureFrame.Dispose();

            // Automatically shut down the operation if the maximum number of
            // textures is reached.
            if (currentPhoto >= MaxPhotoNum)
            {
                StopTextureCapture();
            }
        }
    /// <summary>)
    /// Serialize the image that was taken and turn it into a rawrequest.
    /// 1. Take photo and decode it as jpeg string string
    /// 2. decode the jpeg wtih base64 to be serializeable
    /// 3. serialize everything as json string
    /// 4. serialize the json string as raw request
    /// </summary>
    private IEnumerator <bool> SerializeRequest(PhotoCaptureFrame photoCapturedFrame)
    {
        yield return(true);

        //Texture2D tex = new Texture2D(ImageCapture.Instance.width,
        //                              ImageCapture.Instance.height);
        //photoCapturedFrame.UploadImageDataToTexture(tex);
        //byte[] jpgEncoded = tex.EncodeToJPG

        List <byte> jpgEncodedList = new List <byte>();

        photoCapturedFrame.CopyRawImageDataIntoBuffer(jpgEncodedList);
        byte[] jpgEncoded = jpgEncodedList.ToArray();



        // server expects an base64 encoded JPG encoded string
        // should have the form {"inputs": [{"b64": <b64encodejpgencodedstring>}]}
        string           b64Encode        = Convert.ToBase64String(jpgEncoded);
        DetectionRequest detectionRequest = new DetectionRequest {
            inputs = new List <B64> {
                new B64 {
                    b64 = b64Encode
                }
            }
        };

        string jsonRequest = JsonConvert.SerializeObject(detectionRequest);

        RequestBufferElem requestBufferElem = new RequestBufferElem()
        {
            rawRequest = Encoding.UTF8.GetBytes(jsonRequest)
        };

        if (!photoCapturedFrame.TryGetCameraToWorldMatrix(out requestBufferElem.cameraToWorld) ||
            !photoCapturedFrame.TryGetProjectionMatrix(out requestBufferElem.projection))
        {
            requestBufferElem.hasWorldData = false;
        }
        else
        {
            requestBufferElem.hasWorldData = true;
        }

        photoCapturedFrame.Dispose();

        rawRequestBuffer.Enqueue(requestBufferElem);
        rawRequestBufferEmpty = false;

        timestamp = stopwatch.ElapsedMilliseconds;
    }
Exemple #7
0
 // Save to Tex2d
 void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
 {
     if (result.success)
     {
         Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
         Texture2D  targetTexture    = new Texture2D(cameraResolution.width, cameraResolution.height);
         photoCaptureFrame.UploadImageDataToTexture(targetTexture);
         photoCaptureFrame.Dispose();
         Destroy(mat.mainTexture);
         mat.SetTexture("_MainTex", targetTexture);
         MakeDetectRequest(targetTexture);
     }
     //photoCaptureObj.StopPhotoModeAsync(OnStoppedPhotoMode);
 }
        private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            // Copy the raw image data into the target texture
            lock (imageBuffer)
            {
                imageBuffer.Clear();
                photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBuffer);
                //IMPORTANT: Dispose the capture frame, or the app will crash after a while with access violation
                photoCaptureFrame.Dispose();
                firstScan = false;
            }

            // Deactivate the camera
            photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
        }
        void OnProcessFrame(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            UnityEngine.Debug.Log("++OnProcessFrame");
            if (result.success)
            {
                if (!Const.LOAD_IMAGES)
                {
                    List <byte> imageBufferList = new List <byte>();
                    // Copy the raw IMFMediaBuffer data into our empty byte list.
                    photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);

                    photoCaptureFrame.TryGetCameraToWorldMatrix(out _cameraToWorldMatrix);
                    photoCaptureFrame.TryGetProjectionMatrix(out _projectionMatrix);
                    //UnityEngine.Debug.Log(cameraToWorldMatrix);

                    photoCaptureFrame.Dispose();

                    _imageDataRaw   = imageBufferList.ToArray();
                    _frameReadyFlag = true;
                }
                else
                {
                    /*
                     * _indexImageFile = (int)(frameID % _imageFiles.LongCount());
                     * using (IRandomAccessStreamWithContentType stream = await _imageFiles[_indexImageFile].OpenReadAsync())
                     * {
                     *  imageData = new byte[stream.Size];
                     *  using (DataReader reader = new DataReader(stream))
                     *  {
                     *      await reader.LoadAsync((uint)stream.Size);
                     *      reader.ReadBytes(imageData);
                     *  }
                     * }
                     */
                }
            }
            if (Const.HOLO_CAPTURE)
            {
                _photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoModeHOLO);
            }
            else
            {
                _isCapturing = false;
            }
        }
Exemple #10
0
        void OnPhotoCapturedCopyToBytes(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            Matrix4x4 cameraToWorldMatrix;

            photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);

            Matrix4x4 projectionMatrix;

            photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);

            List <byte> capturedImg = new List <byte>();

            photoCaptureFrame.CopyRawImageDataIntoBuffer(capturedImg);

            photoCaptureFrame.Dispose();

            _takePhotoActionCopyToBytes?.Invoke(cameraToWorldMatrix, projectionMatrix, capturedImg, _cameraParameters.cameraResolutionHeight, _cameraParameters.cameraResolutionWidth);
            CanTakePhoto = false;
            _takePhotoActionCopyToBytes = null;
        }
Exemple #11
0
        void OnPhotoCapturedCopyToTexture(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            Matrix4x4 cameraToWorldMatrix;

            photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);

            Matrix4x4 projectionMatrix;

            photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);

            var texture = new Texture2D(_cameraParameters.cameraResolutionWidth, _cameraParameters.cameraResolutionHeight, TextureFormat.RGBA32, false);

            photoCaptureFrame.UploadImageDataToTexture(texture);
            texture.wrapMode = TextureWrapMode.Clamp;

            photoCaptureFrame.Dispose();

            _takePhotoActionCopyToTexture2D?.Invoke(cameraToWorldMatrix, projectionMatrix, texture);
            CanTakePhoto = false;
            _takePhotoActionCopyToTexture2D = null;
        }
    private void OnCapturedPhotoToMemoryCallback(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
    {
        if (result.success)
        {
            photoCaptureFrame.UploadImageDataToTexture(targetTexture);
            _imageBytes = ImageConversion.EncodeToJPG(targetTexture);

            var filename = string.Format(@"CapturedImage{0}.jpg", captureCount);
            var filePath = Path.Combine(Application.persistentDataPath, filename);
            SaveImage(filePath, _imageBytes);

            if (photoCaptureFrame.hasLocationData)
            {
                Debug.Log("Save matrices");
                photoCaptureFrame.TryGetProjectionMatrix(out projectionMat);
                photoCaptureFrame.TryGetCameraToWorldMatrix(out worldMat);
            }
        }

        photoCaptureFrame.Dispose();
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }
Exemple #13
0
        void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            //Matrix4x4 cameraToWorldMatrix;

            //photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
            //Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

            //Matrix4x4 projectionMatrix;
            //photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);

            //var texture = new Texture2D(this.cameraParameters.cameraResolutionWidth, this.cameraParameters.cameraResolutionHeight, TextureFormat.ARGB32, false);
            //photoCaptureFrame.UploadImageDataToTexture(texture);
            List <byte> byteses = new List <byte>();

            photoCaptureFrame.CopyRawImageDataIntoBuffer(byteses);
            TakeImageAction?.Invoke(byteses);
            //texture.wrapMode = TextureWrapMode.Clamp;
            photoCaptureFrame.Dispose();
            //texture.Compress(true);//ここでの圧縮はDXTフォーマットに圧縮するということ。
            Resources.UnloadUnusedAssets();
            isCapturingPhoto = false;
        }
Exemple #14
0
        /// <summary>
        /// カメラ行列、プロジェクション行列の保存とテクスチャの保存(あとでサーバーに投げて変換された画像の保存に変える)
        /// </summary>
        /// <param name="result"></param>
        /// <param name="photoCaptureFrame"></param>
        void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            Matrix4x4 cameraToWorldMatrix;

            photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
            Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

            Matrix4x4 projectionMatrix;

            photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);

            this.projectionMatrixList.Add(projectionMatrix);
            this.world2CameraMatrixList.Add(worldToCameraMatrix);

            var texture = new Texture2D(this.cameraParameters.cameraResolutionWidth, this.cameraParameters.cameraResolutionHeight, TextureFormat.ARGB32, false);

            photoCaptureFrame.UploadImageDataToTexture(texture);
            ////ここから
            //var bytesTmp = texture.EncodeToPNG();
            //File.WriteAllBytes(Application.persistentDataPath + "/RoomFull" + (currentPhotoCount + 1) + ".png", bytesTmp);
            ////ここまでリサイズ前の画像を見たいがためのデバッグ用コード

            texture.wrapMode = TextureWrapMode.Clamp;
            texture          = CropTexture(texture, TEXTURE_WIDTH, TEXTURE_HEIGHT);
            photoCaptureFrame.Dispose();

            //var bytes = texture.EncodeToPNG();
            //text.text += "save photo \n" + Application.persistentDataPath + "/Room" + (currentPhotoCount + 1) + ".png";

            ////write to LocalState folder
            //File.WriteAllBytes(Application.persistentDataPath + "/Room" + (currentPhotoCount + 1) + ".png", bytes);

            texture.Compress(true);//ここでの圧縮はDXTフォーマットに圧縮するということ。
            Graphics.CopyTexture(texture, 0, 0, texture2DArray, currentPhotoCount, 0);
            currentPhotoCount++;
            UpdateTextureArray();
            Resources.UnloadUnusedAssets();
        }