/// <summary> /// 照片拍摄完成,获取拍摄的照片,调用Custom Vision API,对图片进行分析 /// </summary> /// <param name="result">拍照的结果</param> /// <param name="photoCaptureFrame">拍摄的图片</param> private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { audioSource.Stop(); audioSource.clip = captureAudioClip; audioSource.Play(); ModelManager.Instance.SetPhotoImageActive(true); ModelManager.Instance.SetTipText("editing..."); ToolManager.Instance.ShowMenu(); currentStatus = CurrentStatus.EdittingPhoto; photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList); imageBufferList = FlipVertical(imageBufferList, cameraParameters.cameraResolutionWidth, cameraParameters.cameraResolutionHeight, 4); Texture2D targetTexture = CreateTexture(imageBufferList, cameraParameters.cameraResolutionWidth, cameraParameters.cameraResolutionHeight); Sprite sprite = Sprite.Create(targetTexture, new Rect(0, 0, targetTexture.width, targetTexture.height), new Vector2(0.5f, 0.5f)); ModelManager.Instance.SetPhotoImage(sprite); } else { audioSource.Stop(); audioSource.clip = failedAudioClip; audioSource.Play(); currentStatus = CurrentStatus.Ready; ModelManager.Instance.SetTipText("air tap to take a photo"); } photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); }
void OnStartPhotoMode(PhotoCapture.PhotoCaptureResult result) { SetupGestureRecognizer(); Debug.Log("Ready!"); Debug.Log("Air Tap to take a picture."); }
void OnStoppedPhotoMode(PhotoCapture.PhotoCaptureResult result) { // Shutdown the photo capture resource photoCaptureObject.Dispose(); photoCaptureObject = null; MixedRealityCapture.Instance.AfterTakeAPicture(); }
/// <summary> /// Register the full execution of the Photo Capture. If successful, it will begin /// the Image Analysis process. /// </summary> void OnCapturedPhotoToDisk(PhotoCapture.PhotoCaptureResult result) { // Call StopPhotoMode once the image has successfully captured photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); // Debug.Log("on captured photo to disk"); }
void OnStoppedPhotoMode(PhotoCapture.PhotoCaptureResult result) { // Shutdown our photo capture resource photoCaptureObject.Dispose(); photoCaptureObject = null; Debug.Log("DISPOSED AND STOPPED PHOTO MODE"); }
void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { Debug.Log("OnCapturedPhotoToMemory = " + result.success); List <byte> buffer = new List <byte>(); // Create our Texture2D for use and set the correct resolution Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width / 8 * res.height / 8).First(); Texture2D targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height); // Copy the raw image data into our target texture photoCaptureFrame.UploadImageDataToTexture(targetTexture); Renderer renderer = GameObject.FindGameObjectWithTag("DisplayCube").GetComponent <Renderer>(); renderer.material.mainTexture = targetTexture; Debug.Log("Photo Uploaded to Texture"); Matrix4x4 cameraToWorldMatrix; photoCaptureFrame.CopyRawImageDataIntoBuffer(buffer); Debug.Log("Raw Image copied into buffer"); //Check if we can receive the position where the photo was taken if (!photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix)) { return; } Debug.Log("past if"); //Start a coroutine to handle the server request StartCoroutine(UploadAndHandlePhoto(buffer.ToArray(), cameraToWorldMatrix)); Debug.Log("Photo saved to texture"); } // Clean up photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); }
//Take a photo and save it to texture private void OnPhotoModeStarted(PhotoCapture.PhotoCaptureResult result) { Debug.Log("entered On Photo mode started"); if (result.success) { Debug.Log("OnPhotoModeStarted = " + result.success); photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory); //photoCaptureObject.TakePhotoAsync(delegate (PhotoCapture.PhotoCaptureResult result1, PhotoCaptureFrame photoCaptureFrame) // { // List<byte> buffer = new List<byte>(); // Matrix4x4 cameraToWorldMatrix; // photoCaptureFrame.CopyRawImageDataIntoBuffer(buffer); // //Check if we can receive the position where the photo was taken // if (!photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix)) // { // return; // } // //Start a coroutine to handle the server request // StartCoroutine(UploadAndHandlePhoto(buffer.ToArray(), cameraToWorldMatrix)); // }); } else { Debug.LogError("Unable to start photo mode!"); } }
void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { List <byte> imageBufferList = new List <byte>(); // Copy the raw IMFMediaBuffer data into our empty byte list. photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList); // In this example, we captured the image using the BGRA32 format. // So our stride will be 4 since we have a byte for each rgba channel. // The raw image data will also be flipped so we access our pixel data // in the reverse order. int stride = 4; float denominator = 1.0f / 255.0f; List <Color> colorArray = new List <Color>(); for (int i = imageBufferList.Count - 1; i >= 0; i -= stride) { float a = (int)(imageBufferList[i - 0]) * denominator; float r = (int)(imageBufferList[i - 1]) * denominator; float g = (int)(imageBufferList[i - 2]) * denominator; float b = (int)(imageBufferList[i - 3]) * denominator; colorArray.Add(new Color(r, g, b, a)); } // Now we could do something with the array such as texture.SetPixels() or run image processing on the list } photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); }
/// <summary> /// Called right after the photo capture process has concluded /// </summary> void OnCapturedPhotoToDisk(PhotoCapture.PhotoCaptureResult result) { if (result.success) { FaceRecName.instance.displayText.text = "Picture taken"; photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); } }
/// <summary> /// Called when a photo was taken /// </summary> /// <param name="result"></param> /// <param name="photoCaptureFrame"></param> private void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { // copy to texture photoCaptureFrame.UploadImageDataToTexture(CurrentPhoto); // decativate camera capturer.StopPhotoModeAsync(OnPhotoModeStoppped); }
private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { // Copy the raw image data into the target texture photoCaptureFrame.UploadImageDataToTexture(targetTexture); // Deactivate the camera photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); }
void OnStoppedPhotoMode(PhotoCapture.PhotoCaptureResult result) { // Dispose from the object in memory and request the image analysis // to the VisionManager class photoCaptureObject.Dispose(); photoCaptureObject = null; StartCoroutine(VisionManager.instance.AnalyseLastImageCaptured()); }
void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { //Debug.Log("copying to memory"); photoCaptureFrame.UploadImageDataToTexture(targetTexture); photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode); mc.GetPhoto(targetTexture); //Debug.Log("done"); }
private void OnPhotoModeStarted(PhotoCapture.PhotoCaptureResult result) { string file = string.Format(@"Image_{0:yyyy-MM-dd_hh-mm-ss-tt}.jpg", DateTime.Now); currentImagePath = System.IO.Path.Combine(Application.persistentDataPath, file); m_PhotoCapture.TakePhotoAsync(currentImagePath, PhotoCaptureFileOutputFormat.JPG, OnCapturedPhotoToDisk); }
void OnStoppedPhotoMode(PhotoCapture.PhotoCaptureResult result) /// Register the full execution of the Photo Capture. If successfull, it will begin the Image Analysis process. { photoCaptureObject.Dispose(); photoCaptureObject = null; // Request image caputer analysis StartCoroutine(FaceAnalysis.Instance.DetectFacesFromImage()); }
void OnPhotoModeStarted(PhotoCapture.PhotoCaptureResult result) { if (photoCapture != null) { //Take a picture photoCapture.TakePhotoAsync(OnCapturePhotoToMemory); } }
void OnStoppedPhotoMode(PhotoCapture.PhotoCaptureResult result) { // Shutdown the photo capture resource photoCaptureObject.Dispose(); photoCaptureObject = null; //Debug.Log("disposed of photo"); taking = false; }
void OnStoppedPhotoMode(PhotoCapture.PhotoCaptureResult result) { photoCaptureObject.Dispose(); photoCaptureObject = null; Debug.Log("Captured images have been saved at the following path."); Debug.Log(Application.persistentDataPath); }
//在拍照结束时,capturingPhoto为假,capturingSucceed为真 void OnStoppedPhotoMode(PhotoCapture.PhotoCaptureResult result) { photoCaptureObj.Dispose(); photoCaptureObj = null; capturingPhoto = false; UnityEngine.Debug.Log("Stopped Photo Mode Succeed!"); }
private void OnPhotoModeStopped(PhotoCapture.PhotoCaptureResult result) { capture.Dispose(); capture = null; isReady = false; Info.text = "Camera off"; }
/// <summary> /// Register the full execution of the Photo Capture. If successfull, it will begin the Image Analysis process. /// </summary> void OnStoppedPhotoMode(PhotoCapture.PhotoCaptureResult result) { photoCaptureObject.Dispose(); photoCaptureObject = null; // Request image caputer analysis StartCoroutine(FaceAnalysis.Instance.DetectFacesFromImage()); }
private void OnPhotoModeStopped(PhotoCapture.PhotoCaptureResult result) { _capture.Dispose(); _capture = null; _isCameraReady = false; SetStatus("Camera off"); }
private void OnCapturedToDisk(PhotoCapture.PhotoCaptureResult result) { if (result.success) { ShowText(string.Format("Photo saved to {0}", _filePath)); _capturedPhotoObject.StopPhotoModeAsync(OnPhotoModeStopped); } }
void OnStoppedPhotoMode(PhotoCapture.PhotoCaptureResult result) { photoCaptureObj.Dispose(); photoCaptureObj = null; Debug.Log("Stopped Photo Mode Succeed!"); }
private void onPhotoModeStoppedCallback(PhotoCapture.PhotoCaptureResult result) { photoCapture.Dispose(); photoCapture = null; canSave = false; canTakePhoto.Value = true; }
/// <summary> /// PhotoCaptureResult verifier. /// </summary> /// <param name="result"></param> /// <returns></returns> private bool VerifyPhotoResult(PhotoCapture.PhotoCaptureResult result) { if (result.success) { return(true); } Debug.LogError("- VerifyPhotoResult - failed"); return(false); }
private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (result.success) { // Create our Texture2D for use and set the correct resolution Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First(); //var t2d = new Texture2D(cameraResolution.width, cameraResolution.height); //photoCaptureFrame.UploadImageDataToTexture(t2d); //var imageData = t2d.GetPixels32(); //// // Do as we wish with the texture such as apply it to a material, etc. Task.Factory.StartNew(() => { var byteList = new List <Byte>(); using (photoCaptureFrame) { photoCaptureFrame.CopyRawImageDataIntoBuffer(byteList); } Debug.Log("Photo taken"); //BGRA32 var targetList = new List <Color32>(); for (int i = 0; i < byteList.Count; i = i + 4) { var r = byteList[i + 2]; var g = byteList[i + 1]; var b = byteList[i]; var a = byteList[i + 3]; targetList.Add(new Color32(r, g, b, a)); } var imageData = targetList.ToArray(); try { QRCodeReader.Instance.ScanCode(imageData, cameraResolution.width, cameraResolution.height); } catch (Exception) { throw; } finally { newFrame = photoCaptureFrame; } }); } }
void OnStoppedPhotoMode(PhotoCapture.PhotoCaptureResult result) { // Shutdown the photo capture resource photoCaptureObject.Dispose(); photoCaptureObject = null; TrackerManager.Instance.GetTracker <ObjectTracker>().Start(); CameraDevice.Instance.Start(); }
private void OnStoppedPhotoMode(PhotoCapture.PhotoCaptureResult result) { // photo capture のリソースをシャットダウンします photoCaptureObject.Dispose(); photoCaptureObject = null; photoTaking = false; debug.text = "写真撮影終了"; }
//当拍照完成时,触发AnalyseLastImageCaptured()方法,对最新照片进行分析 void OnStoppedPhotoMode(PhotoCapture.PhotoCaptureResult result) { Debug.Log("Stopped Photo Mode..."); photoCaptureObject.Dispose(); photoCaptureObject = null; StartCoroutine(CustomVisionAnalyser.Instance.AnalyseLastImageCaptured(filePath)); }