/* * public void OnPhotoKeyWordDetected() * { * //if it is capturing photo now, just return * if (isCapturingPhoto) * { * return; * } * * isCapturingPhoto = true; * //TextManager.Instance.setText("Taking picture..."); * * photoCaptureObj.TakePhotoAsync(OnPhotoCaptured); * } */ void OnPhotoCaptured(UnityEngine.XR.WSA.WebCam.PhotoCapture.PhotoCaptureResult result, UnityEngine.XR.WSA.WebCam.PhotoCaptureFrame photoCaptureFrame) { //After the first photo, we want to lock in the current exposure and white balance settings. if (lockCameraSettings && currentPhoto == 1) { #if WINDOWS_UWP unsafe { //This is how you access a COM object. VideoDeviceController vdm = (VideoDeviceController)Marshal.GetObjectForIUnknown(photoCaptureObj.GetUnsafePointerToVideoDeviceController()); //Locks current exposure for all future images vdm.ExposureControl.SetAutoAsync(false); //Figureout how to better handle the Async //Locks the current WhiteBalance for all future images vdm.WhiteBalanceControl.SetPresetAsync(ColorTemperaturePreset.Manual); } #endif } //temp to store the matrix Matrix4x4 cameraToWorldMatrix; photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse; Matrix4x4 projectionMatrix; photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix); #if UNITY_EDITOR projectionMatrix = GetDummyProjectionMatrix(); #endif projectionMatrixList.Add(projectionMatrix); worldToCameraMatrixList.Add(worldToCameraMatrix); m_Texture = new Texture2D(m_CameraParameters.cameraResolutionWidth, m_CameraParameters.cameraResolutionHeight, TextureFormat.RGBA32, false); photoCaptureFrame.UploadImageDataToTexture(m_Texture); m_Texture.wrapMode = TextureWrapMode.Clamp; m_Texture = ResizeTexture(m_Texture, TEXTURE_WIDTH, TEXTURE_HEIGHT); //textureList.Add(m_Texture); photoCaptureFrame.Dispose(); //save room to png bytes = m_Texture.EncodeToPNG(); //write to LocalState folder File.WriteAllBytes(Application.persistentDataPath + "/Room" + (currentPhoto + 1) + ".png", bytes); m_Texture.Compress(true); Graphics.CopyTexture(m_Texture, 0, 0, textureArray, currentPhoto + 1, 0); if (OnTextureUpdated != null) { OnTextureUpdated(); } currentPhoto += 1; isCapturingPhoto = false; Resources.UnloadUnusedAssets(); }
void onCapturedPhotoToMemory(UnityEngine.XR.WSA.WebCam.PhotoCapture.PhotoCaptureResult result, UnityEngine.XR.WSA.WebCam.PhotoCaptureFrame photoCaptureFrame) { if (!result.success) { Debug.LogError("Error CapturedPhotoToMemory"); return; } // 撮影画像の取得 List <byte> buffer = new List <byte>(); photoCaptureFrame.CopyRawImageDataIntoBuffer(buffer); photoCapture.StopPhotoModeAsync(onStoppedPhotoMode); // QR照準内のみを切り取る List <byte> trimmedBuffer = trimmingQrSight(buffer, 4); // QR照準内の画像を保存 Texture2D tex = createTexture(trimmedBuffer, cameraParameters.cameraResolutionWidth, cameraParameters.cameraResolutionHeight); saveToFile(tex); if (callback != null) { callback(new List <byte>(trimmedBuffer), cameraParameters.cameraResolutionWidth, cameraParameters.cameraResolutionHeight); } }
//カメラの設定 ここまで。 private void OnCapturedPhotoToMemory(UnityEngine.XR.WSA.WebCam.PhotoCapture.PhotoCaptureResult result, UnityEngine.XR.WSA.WebCam.PhotoCaptureFrame photoCaptureFrame) { if (result.success) { List <byte> imageBufferList = new List <byte>(); // Copy the raw IMFMediaBuffer data into our empty byte list. photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList); DisplayImage(imageBufferList.ToArray()); //画像表示処理呼び出し StartCoroutine(GetVisionDataFromImages(imageBufferList.ToArray())); //API呼び出し } photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); }
public IntPtr GetUnsafePointerToBuffer() { return(PhotoCaptureFrame.GetUnsafePointerToBuffer(this.m_NativePtr)); }
// Capture de l'image void OnCapturedPhotoToMemory(UnityEngine.XR.WSA.WebCam.PhotoCapture.PhotoCaptureResult result, UnityEngine.XR.WSA.WebCam.PhotoCaptureFrame photoCaptureFrame) { if (result.success) { Debug.Log("photo captured"); List <byte> imageBufferList = new List <byte>(); photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList); var cameraToWorldMatrix = new Matrix4x4(); photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); camera_pos_ = cameraToWorldMatrix.MultiplyPoint3x4(new Vector3(0, 0, -1)); camera_rot_ = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); Matrix4x4 projectionMatrix; photoCaptureFrame.TryGetProjectionMatrix(Camera.main.nearClipPlane, Camera.main.farClipPlane, out projectionMatrix); Matrix4x4 pixelToCameraMatrix = projectionMatrix.inverse; status.GetComponent <TextMesh>().text = "photo captured, processing..."; status.transform.position = camera_pos_; status.transform.rotation = camera_rot_; StartCoroutine(PostToServer(imageBufferList.ToArray(), cameraToWorldMatrix, pixelToCameraMatrix)); } photo_capture_.StopPhotoModeAsync(OnStoppedPhotoMode); }