/// <summary> /// Called each time the Vuforia state is updated /// </summary> private void OnTrackablesUpdated() { //Here's the getting camera pixel part of the code. not sure how it works but it works if (!mFormatRegistered) { return; } if (!mAccessCameraImage) { return; } Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat); if (image == null) { return; } string imageInfo = mPixelFormat + " image: \n"; imageInfo += " size: " + image.Width + " x " + image.Height + "\n"; imageInfo += " bufferSize: " + image.BufferWidth + " x " + image.BufferHeight + "\n"; imageInfo += " stride: " + image.Stride; // Debug.Log(imageInfo); byte[] pixels = image.Pixels; if (pixels != null && pixels.Length > 0) { // Debug.Log("Image pixels: " + pixels[0] + "," + pixels[1] + "," + pixels[2] + ",..."); // I have no idea what the double type is or does. seems to be similar to float double totalLuminance = 0.0; for (int p = 0; p < pixels.Length; p += 4) { totalLuminance += pixels[p] * 0.299 + pixels[p + 1] * 0.587 + pixels[p + 2] * 0.114; } totalLuminance /= (pixels.Length); //this takes the totalLuminance in the line above and puts it in the decimal range. Needs to be cast as a float. My math is arbitrary but works out ligtColorNum = (float)totalLuminance * 0.0255f; //ligtColorNum is put in for RGB. will change color along the gray scale lightColor = new Color(ligtColorNum, ligtColorNum, ligtColorNum, 1.0f); // Debug.Log("color ++++++++++++++++++++++++++++++++++++++++++++++++++++" + ligtColorNum); // I got this math from someone else's code. seems to convert totalLuminance to smaller number for adjusting light luminance. totalLuminance /= 255.0; totalLuminance *= intesityModifier; // Debug.Log("Total luminance ========================" + totalLuminance); m_LightToEffect.intensity = (float)totalLuminance; //originally tried to change color of light in scene but it looked flickery. Could be fun though for someone to try light color effects // m_LightToEffect.color = lightColor; //This is the secret ingredient in the special sauce. This adjusts the Ambient light that's always present in a scene. //If you were to only adjust the lights in the scene it would never go completely dark if you turned the lights off in a room. //still won't go completely dark because your screen will have some illumingation, but it gets pretty close. RenderSettings.ambientIntensity = m_LightToEffect.intensity; //I'm changing the color of the ambient light on the grayscale. It's a little redundant but I think it helps. Experiment and see how you feel. RenderSettings.ambientLight = lightColor; // Debug.Log("light intensity = " + m_LightToEffect.intensity); //I'm not exacly sure what the color temp does. It was a setting in someone else's code, but seems to be working out. colorTemperature = (float?)(totalLuminance * temperatureModifier); m_LightToEffect.colorTemperature = (float)colorTemperature; // Debug.Log("calculating color temperature =========================" + colorTemperature); } }
/// <summary> /// Called each time the Vuforia state is updated /// </summary> void OnTrackablesUpdated() { if (mFormatRegistered) { if (mAccessCameraImage) { Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat); if (image != null) { Debug.Log( "\nImage Format: " + image.PixelFormat + "\nImage Size: " + image.Width + "x" + image.Height + "\nBuffer Size: " + image.BufferWidth + "x" + image.BufferHeight + "\nImage Stride: " + image.Stride + "\n" ); byte[] pixels = image.Pixels; if (pixels != null && pixels.Length > 0) { /* * Debug.Log( * "\nImage pixels: " + * pixels[0] + ", " + * pixels[1] + ", " + * pixels[2] + ", ...\n" * ); */ texture.Resize(image.Width, image.Height); texture.LoadRawTextureData(pixels); texture.Apply(); rawImage.texture = texture; rawImage.material.mainTexture = texture; } } } } }
private void Update() { if (m_formatRegistered) { if (Time.time - m_lastCalcTime >= 2) { Debugger.Instance.Log(Time.time.ToString("0.000")); Vuforia.Image image = CameraDevice.Instance.GetCameraImage(m_PixelFormat); if (image != null) { Debugger.Instance.Log(" Got the image object "); } else { Debugger.Instance.Log(" Didn't get the image object "); } var cameraTexture = new Texture2D(0, 0); image.CopyToTexture(cameraTexture); if (m_cameraImage != null) { m_cameraImage.texture = cameraTexture; } var luminance = GetTextureLuminance(cameraTexture); m_LuminanceValue.Value = luminance; m_lastCalcTime = Time.time; Debugger.Instance.LogLine(luminance.ToString("0.00")); } } else { Debugger.Instance.LogLine("Did not register"); } }
/// <summary> /// Called each time the Vuforia state is updated /// </summary> void OnTrackablesUpdated() { if (capturing) { if (mFormatRegistered) { if (mAccessCameraImage) { Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat); if (image != null) { // Debug.Log ( // "\nImage Format: " + image.PixelFormat + // "\nImage Size: " + image.Width + "x" + image.Height + // "\nBuffer Size: " + image.BufferWidth + "x" + image.BufferHeight + // "\nImage Stride: " + image.Stride + "\n" // ); // byte[] pixels = image.Pixels; // // if (pixels != null && pixels.Length > 0) // { // Debug.Log( // "\nImage pixels: " + // pixels[0] + ", " + // pixels[1] + ", " + // pixels[2] + ", ...\n" // ); // } if (mPixelFormat == Image.PIXEL_FORMAT.GRAYSCALE) { inputMat = new Mat(image.Height, image.Width, CvType.CV_8UC1); } else if (mPixelFormat == Image.PIXEL_FORMAT.RGB888) { inputMat = new Mat(image.Height, image.Width, CvType.CV_8UC3); } //Debug.Log ("inputMat dst ToString " + inputMat.ToString ()); inputMat.put(0, 0, image.Pixels); Imgproc.cvtColor(inputMat, inputMat, Imgproc.COLOR_BGR2GRAY); Imgproc.threshold(inputMat, inputMat, 0, 255, Imgproc.THRESH_OTSU); //Imgproc.equalizeHist (inputMat, inputMat); //Imgproc.putText (inputMat, "CameraImageToMatSample " + inputMat.cols () + "x" + inputMat.rows (), new Point (5, inputMat.rows () - 5), Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar (255, 0, 0, 255)); if (outputTexture == null) { outputTexture = new Texture2D(inputMat.cols(), inputMat.rows(), TextureFormat.RGB24, false); } //outputTexture = new Texture2D (inputMat.cols (), inputMat.rows (), TextureFormat.RGBA32, false); Utils.matToTexture2D(inputMat, outputTexture); // relevantRect = transform.GetComponent<OCRBoundingBox>().screenshotRect; // outputTexture.ReadPixels(relevantRect, 0, 0, false); // outputTexture.Apply(); // int x = Mathf.FloorToInt(relevantRect.x); // int y = Mathf.FloorToInt(relevantRect.y); // int width = Mathf.FloorToInt(relevantRect.width); // int height = Mathf.FloorToInt(relevantRect.height); // outputTexture.Resize(Screen.width, Screen.height, TextureFormat.RGBA32, false); // Color[] pix = outputTexture.GetPixels(x, y, width, height); // Texture2D displayTexture = new Texture2D(width, height); // displayTexture.SetPixels(pix); // displayTexture.Apply(); // quad.transform.localScale = new Vector3 ((float)image.Width, (float)image.Height, 1.0f); //quad.GetComponent<Renderer> ().material.mainTexture = outputTexture; mainCamera.orthographicSize = image.Height / 2; TesseractDemoScript.instance.SendToTesseract(TesseractDemoScript.instance.rotateTexture(outputTexture, true)); //TesseractDemoScript.instance.SendToTesseract(outputTexture); //StartCoroutine(getTextFromImage(EncodeImageBase64(outputTexture))); capturing = false; } } } } }
public IEnumerator TakePhoto() { yield return(new WaitForEndOfFrame()); count++; // Try register camera image format Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat); string FocalLength = ""; Vuforia.CameraDevice.Instance.GetField("focal-length", out FocalLength); decimal fl = System.Convert.ToDecimal(FocalLength); fl = fl * 100; int Newfl = System.Convert.ToInt32(fl); FocalLength = Newfl.ToString(); FocalLength = FocalLength + "/100"; string Model = SystemInfo.deviceModel; if (image != null) { Texture2D photo = new Texture2D(image.Width, image.Height); Texture2D flip = new Texture2D(image.Width, image.Height); image.CopyToTexture(photo); photo.Apply(); flip = FlipTexture(photo); flip.Compress(false); byte[] bytes = flip.EncodeToJPG(); // on Android - /Data/Data/com.companyname.gamename/Files System.DateTime now = new System.DateTime(); now = System.DateTime.Now; string filename = string.Format("{0}{1}{2}_{3}{4}{5}.jpg", now.Year, now.Month, now.Day, now.Hour, now.Minute, now.Second); string path = Application.persistentDataPath + filename; System.IO.File.WriteAllBytes(path, bytes); AndroidJavaObject ex = new AndroidJavaObject("android.media.ExifInterface", path); ex.Call("setAttribute", "FocalLength", FocalLength); ex.Call("setAttribute", "Model", Model); ex.Call("saveAttributes"); byte[] imageData = System.IO.File.ReadAllBytes(path); WWWForm locationForm = new WWWForm(); locationForm.AddField("buildingID", "1"); locationForm.AddBinaryData("image", imageData); locationForm.AddField("useVise", "true"); UnityWebRequest locationWww = UnityWebRequest.Post("http://crowdsensing.cs.hut.fi:5004/location/fine", locationForm); System.IO.File.Delete(path); yield return(locationWww.SendWebRequest()); if (locationWww.isNetworkError || locationWww.isHttpError) { GameObject.Find("Canvas/Panel/Text").GetComponent <Text>().text = "Error: " + locationWww.downloadHandler.text; } else { string LocationText = locationWww.downloadHandler.text; string[] LocationInfomation = LocationText.Split(new char[2] { ':', ',' }); PosX = LocationInfomation[2]; PosY = LocationInfomation[4]; PosZ = LocationInfomation[6].Substring(0, LocationInfomation[6].Length - 1); DirX = LocationInfomation[9]; DirY = LocationInfomation[11]; DirZ = LocationInfomation[13].Substring(0, LocationInfomation[13].Length - 1); RotX = LocationInfomation[16]; RotY = LocationInfomation[18]; RotZ = LocationInfomation[20]; RotW = LocationInfomation[22].Substring(0, LocationInfomation[22].Length - 1); ReadPosition(); } } else { Debug.Log("No image!"); } }