//Get the image, pose of camera async void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { //Debug.Log("\n Saving picture \n"); List <byte> imageBufferList = new List <byte>(); // Copy the raw IMFMediaBuffer data into our empty byte list. photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList); photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix); Vector3 position = cameraToWorldMatrix.MultiplyPoint(Vector3.zero); Quaternion rotation = Quaternion.LookRotation(cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); captureCameras.Add(new CameraItem(position, rotation)); c.NewMesh(position, rotation); UploadImage(imageBufferList.ToArray()); /*if ( sceneId > 0) * { * UploadImageToScene(imageBufferList.ToArray(), sceneId); * }*/ } // Clean up photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); }
void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { Debug.Log("photo captured"); List <byte> imageBufferList = new List <byte>(); // Copy the raw IMFMediaBuffer data into our empty byte list. photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList); var cameraToWorldMatrix = new Matrix4x4(); photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); cameraPosition = cameraToWorldMatrix.MultiplyPoint3x4(new Vector3(0, 0, -1)); cameraRotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); Matrix4x4 projectionMatrix; photoCaptureFrame.TryGetProjectionMatrix(Camera.main.nearClipPlane, Camera.main.farClipPlane, out projectionMatrix); Matrix4x4 pixelToCameraMatrix = projectionMatrix.inverse; status.GetComponent <TextMesh>().text = "photo captured, processing..."; status.transform.position = cameraPosition; status.transform.rotation = cameraRotation; StartCoroutine(PostToFaceAPI(imageBufferList.ToArray(), cameraToWorldMatrix, pixelToCameraMatrix)); } photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); }
void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { Debug.Log("OnCapturedPhotoToMemory = " + result.success); List <byte> buffer = new List <byte>(); // Create our Texture2D for use and set the correct resolution Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width / 8 * res.height / 8).First(); Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height); // Copy the raw image data into our target texture photoCaptureFrame.UploadImageDataToTexture(targetTexture); Renderer renderer = GameObject.FindGameObjectWithTag("DisplayCube").GetComponent <Renderer>(); renderer.material.mainTexture = targetTexture; Debug.Log("Photo Uploaded to Texture"); Matrix4x4 cameraToWorldMatrix; photoCaptureFrame.CopyRawImageDataIntoBuffer(buffer); Debug.Log("Raw Image copied into buffer"); //Check if we can receive the position where the photo was taken if (!photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix)) { return; } Debug.Log("past if"); //Start a coroutine to handle the server request StartCoroutine(UploadAndHandlePhoto(buffer.ToArray(), cameraToWorldMatrix)); Debug.Log("Photo saved to texture"); } // Clean up photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); }
public CameraLocation(PhotoCaptureFrame frame) { hasLocationData = frame.hasLocationData; nearClipPlane = 0.1f; farClipPlane = 50.0f; // Grab the PhotoCaptureFrame's location data and record it if (frame.hasLocationData) { if (!frame.TryGetCameraToWorldMatrix(out cameraToWorldTransform) || !frame.TryGetProjectionMatrix(nearClipPlane, farClipPlane, out projectionTransform)) { hasLocationData = false; } else { worldToCameraTransform = cameraToWorldTransform.inverse; } } // Safeguard against a failure to grab the data if (!hasLocationData) { cameraToWorldTransform = new Matrix4x4(); projectionTransform = new Matrix4x4(); worldToCameraTransform = new Matrix4x4(); if (Constants.DebugStrings.DebugFlag) { Debug.Log(Constants.ErrorStrings.LocatableCameraLocationNotFound); } } customName = Constants.Names.CameraLocationAutoName; }
void CreateUI(PictureRecognitionResultItem2 result, PhotoCaptureFrame photoCaptureFrame) { Matrix4x4 cameraToWorldMatrix; photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); Matrix4x4 projectionMatrix; photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix); Vector3 headPosition = cameraToWorldMatrix.MultiplyPoint(Vector3.zero); var imagePosZeroToOneCenter = new Vector2((result.xmin + result.xmax) / 2, 1 - (result.ymin + result.ymax) / 2); var imagePosProjectedCenter = (imagePosZeroToOneCenter * 2) - new Vector2(1, 1); var cameraSpacePosCenter = UnProjectVector(projectionMatrix, new Vector3(imagePosProjectedCenter.x, imagePosProjectedCenter.y, 1)); var worldSpaceBoxPosCenter = cameraToWorldMatrix.MultiplyPoint(cameraSpacePosCenter); RaycastHit hit; if (Physics.Raycast(headPosition, (worldSpaceBoxPosCenter - headPosition).normalized, out hit, 20f, SpatialMappingManager.Instance.LayerMask)) { GameObject sphere = (GameObject)Resources.Load("SizeChangableSphere"); AddedObjects.Add(Instantiate(sphere, hit.point, Quaternion.identity)); } else { GameObject sphere = (GameObject)Resources.Load("SizeChangableSphere"); AddedObjects.Add(Instantiate(sphere, worldSpaceBoxPosCenter, Quaternion.identity)); } }
void OnProcessFrame(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { UnityEngine.Debug.Log("++OnProcessFrame"); if (result.success) { if (!Const.LOAD_IMAGES) { List <byte> imageBufferList = new List <byte>(); // Copy the raw IMFMediaBuffer data into our empty byte list. photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList); photoCaptureFrame.TryGetCameraToWorldMatrix(out _cameraToWorldMatrix); photoCaptureFrame.TryGetProjectionMatrix(out _projectionMatrix); //UnityEngine.Debug.Log(cameraToWorldMatrix); photoCaptureFrame.Dispose(); _imageDataRaw = imageBufferList.ToArray(); _frameReadyFlag = true; } } if (Const.HOLO_CAPTURE) { _photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoModeHOLO); } else { _isCapturing = false; } }
/// <summary> /// Extract the worldToCamera matrix (i.e. V of the MVP matrix /// traditionally used to translate model coordinates to world /// coordinates to camera coordinates), and the projection matrix /// (i.e. P of the MVP matrix traditionally used to translate /// camera/view coordinates to clip space). /// </summary> /// <param name="photoCaptureFrame"> /// The PhotoCaptureFrame from which the matrices are to be derived /// from. /// </param> /// <param name="worldToCameraMatrix"> /// The matrix translating Hololens room mesh world coordinates to /// view/camera space. /// </param> /// <param name="projectionMatrix"> /// The matrix translating view/camera space coordinates to clip space. /// </param> private static bool ExtractCameraMatrices(PhotoCaptureFrame photoCaptureFrame, out Matrix4x4 worldToCameraMatrix, out Matrix4x4 projectionMatrix) { bool success = true; Matrix4x4 cameraToWorldMatrix; if (photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix)) { worldToCameraMatrix = cameraToWorldMatrix.inverse; } else { TextManager.SetText(Messages.MatrixFail); worldToCameraMatrix = new Matrix4x4(); success = false; } if (!photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix)) { TextManager.SetText(Messages.MatrixFail); projectionMatrix = new Matrix4x4(); success = false; } return(success); }
/* * private void OnPhotoModeStarted(PhotoCapture.PhotoCaptureResult result) * { * if(result.success) * { * string filename = string.Format(@"CapturedImage{0}_n.jpg", Time.time); * string filepath = System.IO.Path.Combine(Application.persistentDataPath, filename); * photoCaptureObject.TakePhotoAsync(filepath, PhotoCaptureFileOutputFormat.JPG,OnCapturedPhotoToDisk); * Debug.Log("TakePhoto Succeed!"+filepath); * } * else * { * Debug.LogError("Unable to start photo mode!"); * } * * } * * void OnCapturedPhotoToDisk(PhotoCapture.PhotoCaptureResult result) * { * if(result.success) * { * Debug.Log("Saved Photo to Disk!"); * photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); * } * else * { * Debug.Log("Failed to save photo to disk!"); * } * } */ void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); worldToCameraMatrix = cameraToWorldMatrix.inverse; photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix); photoCaptureFrame.UploadImageDataToTexture(targetTexture); GameObject quad = GameObject.CreatePrimitive(PrimitiveType.Quad); Renderer quadRenderer = quad.GetComponent <Renderer>() as Renderer; quadRenderer.material = new Material(Shader.Find("Unlit/Texture")); Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); Vector3 position = cameraToWorldMatrix.MultiplyPoint(Vector3.zero); Debug.Log("cameraToWorldMatrix: " + cameraToWorldMatrix); Debug.Log("Camera Position in World: " + position); quad.transform.parent = this.transform; //转化为面向用户这一步在Unity Editor出错,即无法设定为相机朝向的反向,在HoloLens上有待尝试 //quad.transform.position = position; //quad.transform.rotation = rotation; quad.transform.localPosition = new Vector3(0.0f, 0.0f, 0.1f); quad.transform.rotation = this.transform.rotation; quadRenderer.material.SetTexture("_MainTex", targetTexture); photoCaptureObj.StopPhotoModeAsync(OnStoppedPhotoMode); Debug.Log("Capture Photo to Memory Succeed!"); }
/// <summary> /// On image capture /// </summary> /// <param name="result"></param> /// <param name="photoCaptureFrame"></param> void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { // Copy the raw image data into the target texture photoCaptureFrame.UploadImageDataToTexture(targetTexture); previewImage.texture = targetTexture; try { byte[] imageData = targetTexture.EncodeToJPG(90); //WriteImageToDisk(imageData); Matrix4x4 cameraToWorldMatrix; Matrix4x4 projectionMatrix; photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); photoCaptureFrame.TryGetProjectionMatrix(0, 5, out projectionMatrix); StartCoroutine(SendImageToServer(imageData, cameraToWorldMatrix, projectionMatrix)); } catch (Exception e) { DebugManager.Instance.PrintToInfoLog("Error in OnCapturedPhotoToMemory:" + e.ToString()); } }
private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { // Face APIに送るimageBufferListにメモリ上の画像をコピーする List <byte> imageBufferList = new List <byte>(); photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList); //ここはデバッグ用 送信画像の出力。どんな画像が取れたのか確認したい場合に使用。邪魔ならphotoPanelごと消してもよい。 Texture2D debugTexture = new Texture2D(100, 100); debugTexture.LoadImage(imageBufferList.ToArray()); photoPanel.texture = debugTexture; // カメラの向きをワールド座標に変換するためのパラメータ保持 var cameraToWorldMatrix = new Matrix4x4(); photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); cameraRotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); Matrix4x4 projectionMatrix; photoCaptureFrame.TryGetProjectionMatrix(Camera.main.nearClipPlane, Camera.main.farClipPlane, out projectionMatrix); Matrix4x4 pixelToCameraMatrix = projectionMatrix.inverse; StartCoroutine(PostToFaceAPI(imageBufferList.ToArray(), cameraToWorldMatrix, pixelToCameraMatrix)); } photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); }
private Ray ImageToWorld(PhotoCaptureFrame photoCaptureFrame, Vector2 pos, out Vector3 worldPos) { Matrix4x4 cameraToWorldMatrix; Matrix4x4 projectionMatrix; if (!photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix)) { cameraToWorldMatrix = Camera.main.cameraToWorldMatrix; Debug.Log("Failed to get view matrix from photo"); } if (!photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix)) { projectionMatrix = Camera.main.projectionMatrix; Debug.Log("Failed to get view matrix from photo"); } Vector3 normalizedPos = Normalize(pos); Vector4 cameraSpacePos = UnProjectVector(projectionMatrix, normalizedPos); Vector3 origin = cameraToWorldMatrix * new Vector4(0, 0, 0, 1); worldPos = cameraToWorldMatrix * cameraSpacePos; return(new Ray(origin, (worldPos - origin))); }
private async void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { List <byte> imageBufferList = new List <byte>(); photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList); // Get the transform matrix Matrix4x4 cameraToWorld = new Matrix4x4(); Matrix4x4 projection = new Matrix4x4(); bool mappable = true; mappable &= photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorld); mappable &= photoCaptureFrame.TryGetProjectionMatrix(out projection); // Upload the locatable photo & Download the detection results await SocketManager.Instance.SendPhoto(imageBufferList.ToArray()); BoundingBox[] boxes = await SocketManager.Instance.RecvDetections(); SceneUnderstanding.Instance.RecvDetections(cameraToWorld, projection, boxes, mappable); isCapturing = false; stopCapturing = false; cnt += 1; if (cnt == 50) { stopCapturing = true; } } }
void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { // Create our Texture2D for use and set the correct resolution //Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); Texture2D targetTexture = new Texture2D(textureWidth, textureHeight);//cameraResolution.width, cameraResolution.height); // Copy the raw image data into our target texture photoCaptureFrame.UploadImageDataToTexture(targetTexture); // Get view and projection matrices from camera, as well as position and lookVector Matrix4x4 inverseV; Matrix4x4 P; Matrix4x4 VP; bool vSuccess = photoCaptureFrame.TryGetCameraToWorldMatrix(out inverseV); bool pSuccess = photoCaptureFrame.TryGetProjectionMatrix(out P); Vector3 cameraWorldPos = inverseV.MultiplyPoint(Vector3.zero); Vector3 cameraLookVector = inverseV.MultiplyVector(Vector3.forward); Debug.Log("RGB Camera View Matrix: " + (vSuccess ? "Found" : "NULL")); Debug.Log("RGB Camera Projection Matrix: " + (pSuccess ? "Found" : "NULL")); Debug.Log("RGB Camera Position: " + cameraWorldPos); Debug.Log("RGB Camera LookVector: " + cameraLookVector); //depthCamera.transform.position = (cameraWorldPos - Camera.main.gameObject.transform.position); //depthCamera.transform.LookAt(cameraWorldPos + cameraLookVector); //Debug.Log("Applied position and lookvector to depth camera"); //camera.projectionMatrix = P; //Debug.Log("Applied projection matrix to depth camera"); ////camera.worldToCameraMatrix = inverseV.inverse; //camera.Render(); ////camera.RenderWithShader(Shader.Find("DepthOnly"), "depth"); //Debug.Log("Rendered without depth shader"); TakeScreenshot(targetTexture); Debug.Log("Snapshot Taken"); // Update snapshot VP matrix before updating the shader if (vSuccess && pSuccess) { VP = P * inverseV.inverse; //Snapshot snapshot = snapshots[snapshots.Count - 1]; //snapshot.vp = VP; //snapshot.position = new Vector4(cameraWorldPos.x, cameraWorldPos.y, cameraWorldPos.z, 1); //snapshots[snapshots.Count - 1] = snapshot; vpArray[numProjectors] = VP; invVPArray[numProjectors] = (inverseV); posArray[numProjectors] = new Vector4(cameraWorldPos.x, cameraWorldPos.y, cameraWorldPos.z, 1); } Debug.Log("Snapshot Updated"); UpdateShader(); Debug.Log("Updated Shader"); // Free memory photoCaptureFrame.Dispose(); } // Clean up //photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); }
void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (m_Canvas == null) { m_Canvas = GameObject.CreatePrimitive(PrimitiveType.Quad); m_Canvas.name = "PhotoCaptureCanvas"; m_CanvasRenderer = m_Canvas.GetComponent <Renderer>() as Renderer; // m_CanvasRenderer.material = new Material(Shader.Find("AR/HolographicImageBlend")); // m_CanvasRenderer.material = new Material(Shader.Find("HolographicImageBlend")); //Material m = new Material(Shader.Find("HolographicImageBlend")); // m_CanvasRenderer.material = new Material(Shader.Find("Unlit/HolographicImageBlend")); Material m = new Material(Shader.Find("Unlit/HolographicImageBlend")); if (m != null) { m_CanvasRenderer.material = m; } else { Debug.Log("Shader Material PROBLEM!!!"); } } Matrix4x4 cameraToWorldMatrix; photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse; Matrix4x4 projectionMatrix; photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix); photoCaptureFrame.UploadImageDataToTexture(m_Texture); m_Texture.wrapMode = TextureWrapMode.Clamp; m_CanvasRenderer.sharedMaterial.SetTexture("_MainTex", m_Texture); m_CanvasRenderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix); m_CanvasRenderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix); m_CanvasRenderer.sharedMaterial.SetFloat("_VignetteScale", 1.0f); // Position the canvas object slightly in front // of the real world web camera. Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2); // Rotate the canvas object so that it faces the user. Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); m_Canvas.transform.position = position; m_Canvas.transform.rotation = rotation; Debug.Log("Took picture!"); m_CapturingPhoto = false; }
/// <summary>) /// Serialize the image that was taken and turn it into a rawrequest. /// 1. Take photo and decode it as jpeg string string /// 2. decode the jpeg wtih base64 to be serializeable /// 3. serialize everything as json string /// 4. serialize the json string as raw request /// </summary> private IEnumerator <bool> SerializeRequest(PhotoCaptureFrame photoCapturedFrame) { yield return(true); //Texture2D tex = new Texture2D(ImageCapture.Instance.width, // ImageCapture.Instance.height); //photoCapturedFrame.UploadImageDataToTexture(tex); //byte[] jpgEncoded = tex.EncodeToJPG List <byte> jpgEncodedList = new List <byte>(); photoCapturedFrame.CopyRawImageDataIntoBuffer(jpgEncodedList); byte[] jpgEncoded = jpgEncodedList.ToArray(); // server expects an base64 encoded JPG encoded string // should have the form {"inputs": [{"b64": <b64encodejpgencodedstring>}]} string b64Encode = Convert.ToBase64String(jpgEncoded); DetectionRequest detectionRequest = new DetectionRequest { inputs = new List <B64> { new B64 { b64 = b64Encode } } }; string jsonRequest = JsonConvert.SerializeObject(detectionRequest); RequestBufferElem requestBufferElem = new RequestBufferElem() { rawRequest = Encoding.UTF8.GetBytes(jsonRequest) }; if (!photoCapturedFrame.TryGetCameraToWorldMatrix(out requestBufferElem.cameraToWorld) || !photoCapturedFrame.TryGetProjectionMatrix(out requestBufferElem.projection)) { requestBufferElem.hasWorldData = false; } else { requestBufferElem.hasWorldData = true; } photoCapturedFrame.Dispose(); rawRequestBuffer.Enqueue(requestBufferElem); rawRequestBufferEmpty = false; timestamp = stopwatch.ElapsedMilliseconds; }
void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { //imageBufferList待用 imageBufferList = new List <byte>(); photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList); photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); worldToCameraMatrix = cameraToWorldMatrix.inverse; photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix); Debug.LogFormat(@"The value of cameraToWorld Matrix: {0}{1}{2}{3} ", cameraToWorldMatrix.GetRow(0), cameraToWorldMatrix.GetRow(1), cameraToWorldMatrix.GetRow(2), cameraToWorldMatrix.GetRow(3)); photoCaptureFrame.UploadImageDataToTexture(targetTexture); //创建相框,并赋予照片材质和Shader矩阵 GameObject quad = GameObject.CreatePrimitive(PrimitiveType.Quad); Renderer quadRenderer = quad.GetComponent <Renderer>() as Renderer; quadRenderer.material = new Material(Shader.Find("AR/HolographicImageBlend")); quadRenderer.sharedMaterial.SetTexture("_MainTex", targetTexture); quadRenderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix); quadRenderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix); quadRenderer.sharedMaterial.SetFloat("_VignetteScale", 1.0f); //设置包含照片quad的位置和朝向。位置为拍摄时camera的位置,朝向用户 //每个object由自己的局部坐标轴,确定一个物体的坐标轴朝向即可定向和定位该物体 //lookRotation由坐标轴的朝向构建一个代表该朝向的四元数 //GetColumn()方法中的参数是列号,从0开始 //此段目的:将相框Quad放置在HoloLens上真实相机前边一点。 //LookRotation代表一个特定的旋转四元数,即旋转方向 Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); //cameraToWorldMatrix矩阵0,1,2列代表right/up/forward方向。最后一行和最后一列,除[3][3]值是1以外,其余均是0. // 将Quad放置在camera前一点。若要放在相机的位置,直接乘Vector3.zero Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2); quad.transform.parent = this.transform; quad.transform.position = position; quad.transform.rotation = rotation; Debug.Log("Quad's Position: " + quad.transform.position); capturingSucceed = true; Debug.Log("Capture Photo to Memory Succeed!"); } photoCaptureObj.StopPhotoModeAsync(OnStoppedPhotoMode); }
void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { // Copy the raw image data into the target texture //photoCaptureFrame.UploadImageDataToTexture(targetTexture); try { photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); photoCaptureFrame.TryGetProjectionMatrix(out ProjectionMatrix); // Debug.Log(cameraToWorldMatrix); // Debug.Log(ProjectionMatrix); } catch (Exception ex) { } }
void UpdateCameraMatricesWithCapturedPhoto(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { // Get view and projection matrices from camera Matrix4x4 inverseV; Matrix4x4 P; Matrix4x4 VP; bool vSuccess = photoCaptureFrame.TryGetCameraToWorldMatrix(out inverseV); bool pSuccess = photoCaptureFrame.TryGetProjectionMatrix(out P); if (vSuccess && pSuccess) { currentInverseVP = P * inverseV; } } }
/// <summary> /// 照相完毕后调用 /// </summary> /// <param name="result">结果</param> /// <param name="photoCaptureFrame">帧</param> private void OnProcessFrame(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { Debug.Log("OnProcessFrame"); if (result.success) { List <byte> imageBufferList = new List <byte>(); photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList); photoCaptureFrame.TryGetCameraToWorldMatrix(out martrix_camera_to_world); photoCaptureFrame.TryGetProjectionMatrix(out martrix_projection); //photoCaptureFrame.Dispose(); #if WINDOWS_UWP SendData(imageBufferList.ToArray()); //ShowHoloGrams = !ShowHoloGrams; #endif photo_capture.TakePhotoAsync(OnProcessFrame); } }
void OnProcessFrame(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { UnityEngine.Debug.Log("++OnProcessFrame"); if (result.success) { if (!Const.LOAD_IMAGES) { List <byte> imageBufferList = new List <byte>(); // Copy the raw IMFMediaBuffer data into our empty byte list. photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList); photoCaptureFrame.TryGetCameraToWorldMatrix(out _cameraToWorldMatrix); photoCaptureFrame.TryGetProjectionMatrix(out _projectionMatrix); //UnityEngine.Debug.Log(cameraToWorldMatrix); photoCaptureFrame.Dispose(); _imageDataRaw = imageBufferList.ToArray(); _frameReadyFlag = true; } else { /* * _indexImageFile = (int)(frameID % _imageFiles.LongCount()); * using (IRandomAccessStreamWithContentType stream = await _imageFiles[_indexImageFile].OpenReadAsync()) * { * imageData = new byte[stream.Size]; * using (DataReader reader = new DataReader(stream)) * { * await reader.LoadAsync((uint)stream.Size); * reader.ReadBytes(imageData); * } * } */ } } if (Const.HOLO_CAPTURE) { _photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoModeHOLO); } else { _isCapturing = false; } }
void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { // Create our Texture2D for use and set the correct resolution Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height); // Copy the raw image data into our target texture photoCaptureFrame.UploadImageDataToTexture(targetTexture); byte[] PNGfile = targetTexture.EncodeToPNG(); string filePath = System.IO.Path.Combine(Application.persistentDataPath, "CapturedImage" + viewNumber + ".png"); Debug.Log("!!!!!!!!!!!!!!!" + filePath); File.WriteAllBytes(filePath, PNGfile);//todo: enumerate Debug.Log("saved png"); Matrix4x4 worldTrans; Matrix4x4 viewTrans; if (photoCaptureFrame.TryGetCameraToWorldMatrix(out worldTrans) && photoCaptureFrame.TryGetProjectionMatrix(out viewTrans)) { filePath = System.IO.Path.Combine(Application.persistentDataPath, "CapturedImage" + viewNumber + ".png.matr"); File.WriteAllText(filePath, worldTrans + "\n\n" + viewTrans); sendModule.addView(worldTrans, viewTrans, filePath); } else { Debug.LogError("failed to save matrices"); } AudioSource[] clickSound = GetComponents <AudioSource>(); clickSound[0].Play(); if (viewNumber > numberOfPics) { captureModule.save = true; captureModule.recording = false; takePhotos = false; } } // Clean up photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); }
// When screenshot is captured to memory void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { // play photo capture sound //Camera.main.GetComponent<AudioSource>().Play(); // freeing up memory Texture.Destroy(_imageAsTextureTmp); // save photograph to texture _imageAsTextureTmp = new Texture2D(_cameraResolution.width, _cameraResolution.height); photoCaptureFrame.UploadImageDataToTexture(_imageAsTextureTmp); // position of camera/user at time of capturing screenshot photoCaptureFrame.TryGetCameraToWorldMatrix(out _cameraToWorldMatrixTmp); photoCaptureFrame.TryGetProjectionMatrix(out _projectionMatrixTmp); // measuring captured frames per second if (_lastTime == 0) { _lastTime = Time.time; } if (Time.time - _lastTime < 1.0f) { _photoCount++; } else { // Debug.LogError("Photos per s: " + _photoCount); _lastTime = Time.time; _photoCount = 0; } // send event if there are subscribers var handler = ScreenshotTaken; if (handler != null) { handler.Invoke(this, new EventArgs()); } } this._screenshotsTakeable = true; }
/// <summary> /// Processes the received frame, converts the image to grayscale if requested, and invokes the next photo request. /// </summary> private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (_stopped?.Task != null) { return; } if (result.resultType == PhotoCapture.CaptureResultType.UnknownError) { return; } if (photoCaptureFrame == null) { return; } Size size = new Size(FrameWidth, (double)FrameHeight * 3 / 2); // Luminance (grayscale) of the NV12 format requires image height, chrominance is stored in half resolution. <see href="https://docs.microsoft.com/en-us/windows/win32/medfound/recommended-8-bit-yuv-formats-for-video-rendering#nv12"/>. _image = new Mat(size, CvType.CV_8UC1); List <byte> imageBuffer = new List <byte>(); photoCaptureFrame?.CopyRawImageDataIntoBuffer(imageBuffer); MatUtils.copyToMat(imageBuffer.ToArray(), _image); if (_format == ColorFormat.Grayscale) { Imgproc.cvtColor(_image, _image, Imgproc.COLOR_YUV2GRAY_NV12); } Matrix4x4 cameraToWorldMatrix = Matrix4x4.identity; photoCaptureFrame?.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); CameraExtrinsic extrinsic = new CameraExtrinsic(cameraToWorldMatrix); Matrix4x4 projectionMatrix = Matrix4x4.identity; photoCaptureFrame?.TryGetProjectionMatrix(out projectionMatrix); CameraIntrinsic intrinsic = new CameraIntrinsic(projectionMatrix); CameraFrame cameraFrame = new CameraFrame(_image, intrinsic, extrinsic, FrameWidth, FrameHeight, FrameCount++, _format); FrameArrivedEventArgs args = new FrameArrivedEventArgs(cameraFrame); FrameArrived?.Invoke(this, args); _photoCaptureObject?.TakePhotoAsync(OnCapturedPhotoToMemory); }
void OnPhotoCapturedCopyToBytes(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { Matrix4x4 cameraToWorldMatrix; photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); Matrix4x4 projectionMatrix; photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix); List <byte> capturedImg = new List <byte>(); photoCaptureFrame.CopyRawImageDataIntoBuffer(capturedImg); photoCaptureFrame.Dispose(); _takePhotoActionCopyToBytes?.Invoke(cameraToWorldMatrix, projectionMatrix, capturedImg, _cameraParameters.cameraResolutionHeight, _cameraParameters.cameraResolutionWidth); CanTakePhoto = false; _takePhotoActionCopyToBytes = null; }
void OnPhotoCapturedCopyToTexture(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { Matrix4x4 cameraToWorldMatrix; photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); Matrix4x4 projectionMatrix; photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix); var texture = new Texture2D(_cameraParameters.cameraResolutionWidth, _cameraParameters.cameraResolutionHeight, TextureFormat.RGBA32, false); photoCaptureFrame.UploadImageDataToTexture(texture); texture.wrapMode = TextureWrapMode.Clamp; photoCaptureFrame.Dispose(); _takePhotoActionCopyToTexture2D?.Invoke(cameraToWorldMatrix, projectionMatrix, texture); CanTakePhoto = false; _takePhotoActionCopyToTexture2D = null; }
void OnCapturePhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { Matrix4x4 cameraToWorldMatrix; List <byte> buffer = new List <byte>(); photoCaptureFrame.CopyRawImageDataIntoBuffer(buffer); //Check if we can receive the position where the photo was taken if (!photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix)) { cameraToWorldMatrix = Matrix4x4.identity; } if (photoReady != null) { photoReady(id, buffer.ToArray(), cameraToWorldMatrix, cameraResolution); } // stop the photo mode photoCapture.StopPhotoModeAsync(OnPhotoModeStopped); }
void OnCapturedPhotoToMemoryAsync(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { //将photoCaptureFrame转为List<byte>,再转为byte[]. imageBufferList = new List <byte>(); photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList); //将拍摄内容保存到imageBufferArray中 imageBufferArray = imageBufferList.ToArray(); photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); worldToCameraMatrix = cameraToWorldMatrix.inverse; photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix); UnityEngine.Debug.LogFormat(@"The value of cameraToWorld Matrix: {0}{1}{2}{3} ", cameraToWorldMatrix.GetRow(0), cameraToWorldMatrix.GetRow(1), cameraToWorldMatrix.GetRow(2), cameraToWorldMatrix.GetRow(3)); UnityEngine.Debug.Log("Captured Photo To Memory Succeed! "); } photoCaptureObj.StopPhotoModeAsync(OnStoppedPhotoMode); }
private void onCapturedPhotoToMemoryCallback(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (!result.success) { Debug.LogError("Failed to take photo"); onPhotoCaptured.OnNext(null); photoCapture.StopPhotoModeAsync(onPhotoModeStoppedCallback); return; } var buffer = new List <byte>(); photoCaptureFrame.CopyRawImageDataIntoBuffer(buffer); // カメラの向きをワールド座標に変換するためのパラメータ保持 Matrix4x4 cameraToWorldMatrix; photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); //var cameraRotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); Matrix4x4 projectionMatrix; photoCaptureFrame.TryGetProjectionMatrix(Camera.main.nearClipPlane, Camera.main.farClipPlane, out projectionMatrix); var pixelToCameraMatrix = projectionMatrix.inverse; Plan = new ShootingPlan { Resolution = Resolution, ShootingLocation = Camera.main.transform.position, CameraToWorld = cameraToWorldMatrix, PixelToCamera = pixelToCameraMatrix, }; photoCapture.StopPhotoModeAsync(onPhotoModeStoppedCallback); var value = buffer.ToArray(); onPhotoCaptured.OnNext(value); }
private void OnCapturedPhotoToMemoryCallback(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { photoCaptureFrame.UploadImageDataToTexture(targetTexture); _imageBytes = ImageConversion.EncodeToJPG(targetTexture); var filename = string.Format(@"CapturedImage{0}.jpg", captureCount); var filePath = Path.Combine(Application.persistentDataPath, filename); SaveImage(filePath, _imageBytes); if (photoCaptureFrame.hasLocationData) { Debug.Log("Save matrices"); photoCaptureFrame.TryGetProjectionMatrix(out projectionMat); photoCaptureFrame.TryGetCameraToWorldMatrix(out worldMat); } } photoCaptureFrame.Dispose(); photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); }
/// <summary> /// カメラ行列、プロジェクション行列の保存とテクスチャの保存(あとでサーバーに投げて変換された画像の保存に変える) /// </summary> /// <param name="result"></param> /// <param name="photoCaptureFrame"></param> void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { Matrix4x4 cameraToWorldMatrix; photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse; Matrix4x4 projectionMatrix; photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix); this.projectionMatrixList.Add(projectionMatrix); this.world2CameraMatrixList.Add(worldToCameraMatrix); var texture = new Texture2D(this.cameraParameters.cameraResolutionWidth, this.cameraParameters.cameraResolutionHeight, TextureFormat.ARGB32, false); photoCaptureFrame.UploadImageDataToTexture(texture); ////ここから //var bytesTmp = texture.EncodeToPNG(); //File.WriteAllBytes(Application.persistentDataPath + "/RoomFull" + (currentPhotoCount + 1) + ".png", bytesTmp); ////ここまでリサイズ前の画像を見たいがためのデバッグ用コード texture.wrapMode = TextureWrapMode.Clamp; texture = CropTexture(texture, TEXTURE_WIDTH, TEXTURE_HEIGHT); photoCaptureFrame.Dispose(); //var bytes = texture.EncodeToPNG(); //text.text += "save photo \n" + Application.persistentDataPath + "/Room" + (currentPhotoCount + 1) + ".png"; ////write to LocalState folder //File.WriteAllBytes(Application.persistentDataPath + "/Room" + (currentPhotoCount + 1) + ".png", bytes); texture.Compress(true);//ここでの圧縮はDXTフォーマットに圧縮するということ。 Graphics.CopyTexture(texture, 0, 0, texture2DArray, currentPhotoCount, 0); currentPhotoCount++; UpdateTextureArray(); Resources.UnloadUnusedAssets(); }