public unsafe void RequestLocalization() { LocalizationRequest lr = new LocalizationRequest(); lr.cloud_Ids = cloudMaps; lr.width = 640; lr.height = 480; lr.channel = 3; lr.Camera_fx = 482.33990478515627f; lr.Camera_fy = 482.3245544433594f; lr.Camera_px = 322.75787353515627f; lr.Camera_py = 237.1666717529297f; lr.version = m_Sdk.arwaysdkversion; Vector3 camPos = ARCamera.transform.position; Quaternion camRot = ARCamera.transform.rotation; m_Texture = Convert_WebCamTexture_To_Texture2d(tex); byte[] _bytesjpg = m_Texture.EncodeToJPG(); lr.image = Convert.ToBase64String(_bytesjpg); lr.timestamp = 0.1; //show requeset counts.. loc_attempts_txt.GetComponent <TMP_Text>().enabled = true; string output = JsonUtility.ToJson(lr); StartCoroutine(sendCameraImages(output, camPos, camRot)); }
public static ExtendedEntry GetProviderAddress(string serviceName) { var r = new Requestor("getter"); var lr = new LocalizationRequest(serviceName); var resp = r.deliver_and_wait_feedback(_namingServiceAddress, Marshaller.MarshallObject(lr)); var reply = (LocalizationReply)Marshaller.UnMarshallObject(resp); if (reply.Request_resolved) { Console.WriteLine("@localized " + serviceName + ": " + reply.Entry_data); } else { Console.WriteLine("@cannot localize " + serviceName); return(null); } return(reply.Entry_data); }
/// <summary> /// Requests the localization. /// </summary> public unsafe void RequestLocalization() { XRCameraIntrinsics intr; ARCameraManager cameraManager = m_Sdk.cameraManager; var cameraSubsystem = cameraManager.subsystem; if (cameraSubsystem != null && cameraSubsystem.TryGetIntrinsics(out intr) && cameraManager.TryAcquireLatestCpuImage(out XRCpuImage image)) { loaderText.text = "Localizing..."; loaderPanel.SetActive(true); LocalizationRequest lr = new LocalizationRequest(); lr.cloud_Ids = cloudMaps; lr.width = image.width; lr.height = image.height; lr.channel = 3; lr.Camera_fx = intr.focalLength.x; lr.Camera_fy = intr.focalLength.y; lr.Camera_px = intr.principalPoint.x; lr.Camera_py = intr.principalPoint.y; lr.version = m_Sdk.arwaysdkversion; Vector3 camPos = ARCamera.transform.position; Quaternion camRot = ARCamera.transform.rotation; var format = TextureFormat.RGB24; if (m_Texture == null || m_Texture.width != image.width || m_Texture.height != image.height) { m_Texture = new Texture2D(image.width, image.height, format, false); } // Convert the image to format, flipping the image across the Y axis. // We can also get a sub rectangle, but we'll get the full image here. var conversionParams = new XRCpuImage.ConversionParams(image, format, XRCpuImage.Transformation.MirrorX); // Texture2D allows us write directly to the raw texture data // This allows us to do the conversion in-place without making any copies. var rawTextureData = m_Texture.GetRawTextureData <byte>(); try { image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length); } finally { // We must dispose of the XRCameraImage after we're finished // with it to avoid leaking native resources. image.Dispose(); } // Apply the updated texture data to our texture m_Texture.Apply(); byte[] _bytesjpg = m_Texture.EncodeToJPG(); lr.image = Convert.ToBase64String(_bytesjpg); lr.timestamp = image.timestamp; //show requeset counts.. loc_attempts_txt.GetComponent <TMP_Text>().enabled = true; string output = JsonUtility.ToJson(lr); StartCoroutine(sendCameraImages(output, camPos, camRot)); } }