virtual public async void Capture() { await Task.Delay(250); m_bCaptureRunning = true; float captureStartTime = Time.realtimeSinceStartup; float uploadStartTime = Time.realtimeSinceStartup; XRCpuImage image; ARCameraManager cameraManager = m_Sdk.cameraManager; var cameraSubsystem = cameraManager.subsystem; if (cameraSubsystem != null && cameraSubsystem.TryAcquireLatestCpuImage(out image)) { JobCaptureAsync j = new JobCaptureAsync(); j.run = (int)(m_ImageRun & 0xEFFFFFFF); j.index = m_ImageIndex++; j.anchor = false; if (AutomaticCaptureLocationProvider.Instance.gpsOn) { j.latitude = AutomaticCaptureLocationProvider.Instance.latitude; j.longitude = AutomaticCaptureLocationProvider.Instance.longitude; j.altitude = AutomaticCaptureLocationProvider.Instance.altitude; } else { j.latitude = j.longitude = j.altitude = 0.0; } ARHelper.GetIntrinsics(out j.intrinsics); Quaternion rot = m_MainCamera.transform.rotation; Vector3 pos = m_MainCamera.transform.position; ARHelper.GetRotation(ref rot); j.rotation = ARHelper.SwitchHandedness(Matrix4x4.Rotate(rot)); j.position = ARHelper.SwitchHandedness(pos); int width = image.width; int height = image.height; byte[] pixels; int channels = 1; if (m_RgbCapture) { ARHelper.GetPlaneDataRGB(out pixels, image); channels = 3; } else { ARHelper.GetPlaneData(out pixels, image); } byte[] capture = new byte[channels * width * height + 1024]; Task <icvCaptureInfo> captureTask = Task.Run(() => { return(Core.CaptureImage(capture, capture.Length, pixels, width, height, channels)); }); await captureTask; string path = string.Format("{0}/{1}", tempImagePath, System.Guid.NewGuid()); using (BinaryWriter writer = new BinaryWriter(File.OpenWrite(path))) { writer.Write(capture, 0, captureTask.Result.captureSize); } j.imagePath = path; j.encodedImage = ""; j.OnStart += () => { uploadStartTime = Time.realtimeSinceStartup; }; j.OnResult += (SDKImageResult result) => { float et = Time.realtimeSinceStartup - uploadStartTime; Debug.Log(string.Format("Image uploaded successfully in {0} seconds", et)); OnImageUploaded?.Invoke(); }; j.Progress.ProgressChanged += (s, progress) => { int value = (int)(100f * progress); Debug.Log(string.Format("Upload progress: {0}%", value)); }; j.OnError += (e) => { Debug.Log(string.Format("Capture error: " + e)); }; m_Jobs.Add(j); image.Dispose(); float elapsedTime = Time.realtimeSinceStartup - captureStartTime; Debug.Log(string.Format("Capture in {0} seconds", elapsedTime)); } m_bCaptureRunning = false; }
protected override async void Capture(bool anchor) { await Task.Delay(250); m_bCaptureRunning = true; float captureStartTime = Time.realtimeSinceStartup; float uploadStartTime = Time.realtimeSinceStartup; #if PLATFORM_LUMIN XRCameraImage image; #else XRCpuImage image; #endif ARCameraManager cameraManager = m_Sdk.cameraManager; var cameraSubsystem = cameraManager.subsystem; #if PLATFORM_LUMIN if (cameraSubsystem != null && cameraSubsystem.TryGetLatestImage(out image)) #else if (cameraSubsystem != null && cameraSubsystem.TryAcquireLatestCpuImage(out image)) #endif { JobCaptureAsync j = new JobCaptureAsync(); j.run = (int)(m_ImageRun & 0xEFFFFFFF); j.index = m_ImageIndex++; j.anchor = anchor; if (mapperSettings.useGps) { j.latitude = m_Latitude; j.longitude = m_Longitude; j.altitude = m_Altitude; } else { j.latitude = j.longitude = j.altitude = 0.0; } Camera cam = this.mainCamera; Quaternion _q = cam.transform.rotation; Matrix4x4 rot = Matrix4x4.Rotate(new Quaternion(_q.x, _q.y, -_q.z, -_q.w)); Vector3 _p = cam.transform.position; Vector3 pos = new Vector3(_p.x, _p.y, -_p.z); j.rotation = rot; j.position = pos; ARHelper.GetIntrinsics(out j.intrinsics); int width = image.width; int height = image.height; byte[] pixels; int channels = 1; if (mapperSettings.captureRgb) { ARHelper.GetPlaneDataRGB(out pixels, image); channels = 3; } else { ARHelper.GetPlaneData(out pixels, image); } byte[] capture = new byte[channels * width * height + 1024]; Task <icvCaptureInfo> captureTask = Task.Run(() => { return(Core.CaptureImage(capture, capture.Length, pixels, width, height, channels)); }); await captureTask; string path = string.Format("{0}/{1}", this.tempImagePath, System.Guid.NewGuid()); using (BinaryWriter writer = new BinaryWriter(File.OpenWrite(path))) { writer.Write(capture, 0, captureTask.Result.captureSize); } j.imagePath = path; j.encodedImage = ""; NotifyIfConnected(captureTask.Result); if (m_SessionFirstImage) { m_SessionFirstImage = false; } j.OnStart += () => { uploadStartTime = Time.realtimeSinceStartup; mappingUIManager.SetProgress(0); mappingUIManager.ShowProgressBar(); }; j.OnResult += (SDKImageResult result) => { float et = Time.realtimeSinceStartup - uploadStartTime; Debug.Log(string.Format("Image uploaded successfully in {0} seconds", et)); mappingUIManager.HideProgressBar(); }; j.Progress.ProgressChanged += (s, progress) => { int value = (int)(100f * progress); //Debug.Log(string.Format("Upload progress: {0}%", value)); mappingUIManager.SetProgress(value); }; j.OnError += (e) => { mappingUIManager.HideProgressBar(); }; m_Jobs.Add(j.RunJobAsync()); image.Dispose(); float elapsedTime = Time.realtimeSinceStartup - captureStartTime; Debug.Log(string.Format("Capture in {0} seconds", elapsedTime)); } m_bCaptureRunning = false; var captureButton = workspaceManager.captureButton.GetComponent <Button>(); captureButton.interactable = true; }