void takeScreenshots() { /* Grab texture and send to server */ //arcamera.GetComponent<ColorBlindFilter>().colorBlindMode = ColorBlindMode.None; Vuforia.Image cameraImage = CameraDevice.Instance.GetCameraImage(mPixelFormat); if (cameraImage == null) { Debug.Log("pixel format is not available yet"); } else { Texture2D texture2DHuman = new Texture2D(cameraImage.Height, cameraImage.Width); cameraImage.CopyToTexture(texture2DHuman); ScreenshotManager.SaveImage(flipTexture(texture2DHuman), "HumanVision"); //StartCoroutine(savePhoto(texture2DHuman, "HumanVision")); Texture2D texture2DDoggy = new Texture2D(cameraImage.Height, cameraImage.Width); cameraImage.CopyToTexture(texture2DDoggy); ScreenshotManager.SaveImage(addColorBlindnessToTexture(flipTexture(texture2DDoggy)), "DoggyVision"); //StartCoroutine(savePhoto(texture2DDoggy, "DoggyVision")); // Upload bytes of image to server //StartCoroutine(Upload(bytes)); } }
public Texture2D GetImage() { Texture2D camTex = new Texture2D(image.Width, image.Height); image.CopyToTexture(camTex); return(camTex); }
/* * Get snaped image from camera */ private Texture2D TakeTextureSnap() { Vuforia.Image captured = CameraDevice.Instance.GetCameraImage(mPixelFormat); if (captured == null || !captured.IsValid()) { return(null); } byte[] pixels = captured.Pixels; if (pixels == null || pixels.Length <= 0) { return(null); } // Make temperate Texture2D to copy camera pixel data Texture2D tmp = new Texture2D(captured.Width, captured.Height, TextureFormat.RGB24, false); captured.CopyToTexture(tmp); /* * TODO: Change captureImageWidth to be proportional to Screen.width */ return(TextureTools.CropWithRect( tmp, //new Rect(0, 0, Mathf.Min(tmp.width, Screen.width), Mathf.Min(tmp.height, Screen.height)), //new Rect(0, 0, 610, 1280), new Rect(0, 0, capturedImageWidth, capturedImageWidth), TextureTools.RectOptions.Center, 0, 0)); }
/// <summary> /// Called each time the Vuforia state is updated /// </summary> private void OnTrackablesUpdated() { if (mFormatRegistered) { if (mAccessCameraImage) { Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat); if ((image != null) && (captureCounter < 3)) { Texture2D snap = new Texture2D(image.Width, image.Height); image.CopyToTexture(snap); snap.Apply(); Texture2D flipped = new Texture2D(snap.width, snap.height); int xN = snap.width; int yN = snap.height; for (int i = 0; i < xN; i++) { for (int j = 0; j < yN; j++) { flipped.SetPixel(i, yN - j - 1, snap.GetPixel(i, j)); } } flipped.Apply(); System.IO.File.WriteAllBytes(savePath + captureCounter.ToString() + ".png", flipped.EncodeToPNG()); ++captureCounter; } } } }
private void doRawShot() { waitScreen = false; int width = Screen.width; int height = Screen.height; Texture2D tex = new Texture2D(width, height); //, TextureFormat.RGB565, false); vImg = CameraDevice.Instance.GetCameraImage(Vuforia.Image.PIXEL_FORMAT.GRAYSCALE); vImg.CopyToTexture(tex); tex.Apply(); DateTime dt = DateTime.Now; string path; string imgPath; if (Application.isEditor) { path = "/AR_Screen/"; } else { path = "/../../../../DCIM/AR_Screen/"; } string allDirPath = Application.persistentDataPath + path; if (!Directory.Exists(allDirPath)) { Directory.CreateDirectory(allDirPath); } string str = string.Format("_RawScreenshot_{0:D4}{1:D2}{2:D2}_{3:D2}{4:D2}{5:D2}.jpg", dt.Year, dt.Month, dt.Day, dt.Hour, dt.Minute, dt.Second); imgPath = allDirPath + str; Debug.Log(str); byte[] ba = tex.EncodeToJPG(); File.WriteAllBytes(imgPath, ba); if (File.Exists(imgPath)) { Debug.Log("saved " + imgPath); savedText.text = "Сохранено : " + imgPath; } else { savedText.text = "Не удалось сохранить : " + imgPath; Debug.Log("not found " + imgPath); } }
public Color32[] GetImage() { Texture2D camTex = new Texture2D(image.Width, image.Height); //copy to texture image.CopyToTexture(camTex); //crop var cropped = TextureTools.CropTexture(camTex); //scale var scaled = TextureTools.scaled(cropped, 224, 224, FilterMode.Bilinear); //return scaled color32[] return(scaled.GetPixels32()); }
public void saveImage(Vuforia.Image image) { snap = new Texture2D(image.Width, image.Height); //int num = Random.Range(0, 26); image.CopyToTexture(snap); snap.Apply(); System.IO.File.WriteAllBytes((Application.persistentDataPath + "/image" + num.ToString() + ".png"), snap.EncodeToPNG()); num++; SaveLoad.Save(num); }
public void CapturePic() { Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat); if (image != null) { Texture2D texture2D = new Texture2D(image.Width, image.Height); image.CopyToTexture(texture2D); texture2D.SetPixels32(f1(texture2D.GetPixels32(), image.Width, image.Height)); if (Screen.orientation == ScreenOrientation.Portrait) { texture2D.SetPixels32(f2(texture2D.GetPixels32(), image.Width, image.Height)); Color32[] color32s = f3(texture2D.GetPixels32(), image.Width, image.Height); texture2D = new Texture2D(image.Height, image.Width); texture2D.SetPixels32(color32s); } System.Action <WWW> action = callback; apiCaller.Send(texture2D, action); } }
IEnumerator PutCameraImage() { while (!mServerFoundTrackable) { bool isCameraImageValid = false; Vuforia.Image cameraImage = null; while (!isCameraImageValid) { cameraImage = CameraDevice.Instance.GetCameraImage(mPixelFormat); isCameraImageValid = cameraImage.Width > 0 && cameraImage.Height > 0; if (!isCameraImageValid) { yield return(new WaitForSeconds(.2f)); } } Texture2D cameraTexture = new Texture2D(cameraImage.Width, cameraImage.Height); cameraImage.CopyToTexture(cameraTexture, false); #if UNITY_EDITOR cameraTexture.Point(cameraTexture.width / 3 < 1440 ? cameraTexture.width / 3 : 1440, cameraTexture.height / 3 < 1080 ? cameraTexture.height / 3 : 1080); #else cameraTexture.Point(cameraTexture.width / 2, cameraTexture.height / 2); cameraTexture.RotateTexture(true); #endif byte[] data = cameraTexture.EncodeToPNG(); string url = "http://" + mServerIp + ":" + mServerPort + "/app/put"; UnityWebRequest www = UnityWebRequest.Put(url, data); yield return(www.SendWebRequest()); if (www.isNetworkError) { Debug.LogError("Failed to upload camera image"); } else { Debug.Log("Successfully uploaded camera image"); } yield return(new WaitForSeconds(0.2f)); } }
/// <summary> /// Called each time the Vuforia state is updated /// </summary> private void OnTrackablesUpdated() { if (mFormatRegistered) { if (mAccessCameraImage) { Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat); if (image != null) { string imageInfo = mPixelFormat + " image: \n"; imageInfo += " size: " + image.Width + " x " + image.Height + "\n"; imageInfo += " bufferSize: " + image.BufferWidth + " x " + image.BufferHeight + "\n"; imageInfo += " stride: " + image.Stride; Debug.Log(imageInfo); byte[] pixels = image.Pixels; if (pixels != null && pixels.Length > 0) { Debug.Log("Image pixels: " + pixels[0] + "," + pixels[1] + "," + pixels[2] + ",..."); } Texture2D text = new Texture2D(1, 1); image.CopyToTexture(text); //text.LoadRawTextureData(pixels); //var pixelsRaw = text.GetPixels(); Debug.Log("Image pixels color: " + text.GetPixel(1, 2)); Sprite sprite = Sprite.Create(text, new Rect(0, 0, text.width, text.height), new Vector2(.5f, .5f)); m_Image.sprite = sprite; //Image is a defined reference to an image component } } } }
/// <summary> /// Called each time the Vuforia state is updated /// </summary> void OnTrackablesUpdated() { if (mFormatRegistered) { if (mAccessCameraImage) { Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat); if (image != null) { //Debug.Log( // "\nImage Format: " + image.PixelFormat + // "\nImage Size: " + image.Width + "x" + image.Height + // "\nBuffer Size: " + image.BufferWidth + "x" + image.BufferHeight + // "\nImage Stride: " + image.Stride + "\n" //); // lis les pixels du flux video Vuforia #if UNITY_EDITOR screenshot.ReadPixels(new Rect(0, 0, 640, 480), 0, 0); screenshot.Apply(); #else //screenshot.ReadPixels(new Rect(0, 0, 640, 480), 0, 0); //screenshot.Apply(); image.CopyToTexture(screenshot); screenshot.Apply(); #endif // positionne les "points à l'ecran // Peau Vector3 point = plane.transform.position + plane.transform.TransformPoint(peauPositionOffset); Vector3 screenPos0 = cam.WorldToScreenPoint(point); peau.transform.position = new Vector3(screenPos0.x, screenPos0.y, 0); //print("screenPos0x: " + screenPos0.x + "screenPos0y: " + screenPos0.y); int video_x = (int)(screenPos0.x * xRatio); #if UNITY_EDITOR int video_y = (int)((Screen.height - screenPos0.y) * yRatio); #else int video_y = (int)((Screen.height - screenPos0.y) * yRatio); #endif Color peauPixelColor = screenshot.GetPixel(video_x, video_y); //museau point = plane.transform.position + plane.transform.TransformPoint(museauPositionOffset); Vector3 screenPos1 = cam.WorldToScreenPoint(point); museau.transform.position = new Vector3(screenPos1.x, screenPos1.y, 0); video_x = (int)(screenPos1.x * xRatio); #if UNITY_EDITOR video_y = (int)((Screen.height - screenPos1.y) * yRatio); #else video_y = (int)((Screen.height - screenPos1.y) * yRatio); #endif Color museauPixelColor = screenshot.GetPixel(video_x, video_y); //ventre point = plane.transform.position + plane.transform.TransformPoint(ventrePositionOffset); Vector3 screenPos2 = cam.WorldToScreenPoint(point); ventre.transform.position = new Vector3(screenPos2.x, screenPos2.y, 0); video_x = (int)(screenPos2.x * xRatio); #if UNITY_EDITOR video_y = (int)((Screen.height - screenPos2.y) * yRatio); #else video_y = (int)((Screen.height - screenPos0.y) * yRatio); #endif Color ventrePixelColor = screenshot.GetPixel(video_x, video_y); // le point vert (debug) video_x = (int)(screenPos0.x * xRatio); #if UNITY_EDITOR video_y = (int)((Screen.height - screenPos0.y) * yRatio); #else video_y = (int)((Screen.height - screenPos0.y) * yRatio); #endif //print("video_x: " + video_x + " video_y:" + video_y + "width =" + screenshot.width + "height =" + screenshot.height); if (video_x > 0 && video_x + 10 < screenshot.width && video_y > 0 && video_y + 10 < screenshot.width) { screenshot.SetPixels(video_x, video_y, 10, 10, greenColorBlock); screenshot.Apply(); } // vignette debug screenshotDisplay.texture = screenshot; // fin le point vert (debug) // applique la couleur au point UnityEngine.UI.Image img = peau.GetComponentsInChildren <UnityEngine.UI.Image>()[0]; img.color = peauPixelColor; img = museau.GetComponentsInChildren <UnityEngine.UI.Image>()[0]; img.color = museauPixelColor; img = ventre.GetComponentsInChildren <UnityEngine.UI.Image>()[0]; img.color = ventrePixelColor; if (!fixedColor) { for (int i = 0; i < Model3Drenderer.materials.Length; i++) { //renderer.materials[i].shader = Shader.Find("_Color"); if (Model3Drenderer.materials[i].name == "corps (Instance)") { Model3Drenderer.materials[i].color = peauPixelColor; } if (Model3Drenderer.materials[i].name == "museau (Instance)") { Model3Drenderer.materials[i].color = museauPixelColor; } if (Model3Drenderer.materials[i].name == "ventre") { Model3Drenderer.materials[i].color = ventrePixelColor; } } } //if (camTexture == null) // camTexture = new Texture2D(image.Width, image.Height); //camTexture.LoadImage(image.Pixels); //camTexture.name = "cam texture"; //camTexture.LoadRawTextureData(image.Pixels); //var data = camTexture.GetRawTextureData<Color32>(); //int index = 0; //for (int y = 0; y < camTexture.height; y++) //{ // for (int x = 0; x < camTexture.width; x++) // { // data[index++] = image.Pixels[index++]; // } //} //camTexture.Apply(); //Renderer renderer = GetComponent<Renderer>(); //renderer.material.mainTexture = camTexture; //byte[] pixels = image.Pixels; //if (pixels != null && pixels.Length > 0) //{ // Debug.Log( // "\nImage pixels: " + // pixels[0] + ", " + // pixels[1] + ", " + // pixels[2] + ", ...\n" // ); //} } } } }
private void CaptureImage(Vector3 topLeftPosition, Vector3 bottomRightPosition, Vector3 bottomLeftPosition) { if (mFormatRegistered) { if (mAccessCameraImage) { Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat); if (image != null) { Debug.Log( "\nImage format: " + image.PixelFormat + "\nImage size: " + image.Width + " by " + image.Height + "\nBuffer size: " + image.BufferWidth + " by " + image.BufferHeight + "\nImage stride: " + image.Stride + "\n" ); byte[] pixels = image.Pixels; if (pixels != null && pixels.Length > 0) { Debug.Log( "\nImage pixels: " + pixels[0] + ", " + pixels[1] + ", " + pixels[2] + "... \n" ); Texture2D imageTexture = new Texture2D(image.Width, image.Height); image.CopyToTexture(imageTexture); // Save the image to the computer SaveTextureAsPNG(imageTexture); pictureIndex--; try { double theta = -Math.Atan((double)((bottomLeftPosition.x - topLeftPosition.x) / (topLeftPosition.y - bottomLeftPosition.y))); Texture2D rotatedTexture = RotateTextureShear(imageTexture, theta); pictureFileName = "/Users/AlisonNoyes/Desktop/vuforiapicrot"; SaveTextureAsPNG(rotatedTexture); pictureIndex--; // Adjust the locations of the QR codes in order to allow proper distortion and cropping Vector3 center = new Vector3(imageTexture.width / 2, imageTexture.height / 2, 0); bottomLeftPosition = rotateVectorAroundPoint(bottomLeftPosition, center, theta); topLeftPosition = rotateVectorAroundPoint(topLeftPosition, center, theta); bottomRightPosition = rotateVectorAroundPoint(bottomRightPosition, center, theta); Debug.Log("top left corner: " + topLeftPosition.x + " " + topLeftPosition.y); Debug.Log("bottom right corner: " + bottomRightPosition.x + " " + bottomRightPosition.y); /* * Texture2D warpedTexture = WarpTexture(rotatedTexture, topLeftPosition, bottomLeftPosition, bottomRightPosition); * pictureFileName = "/Users/AlisonNoyes/Desktop/vuforiapicwarp"; * SaveTextureAsPNG(warpedTexture); * pictureIndex--; */ Texture2D croppedTexture = CropTexture(rotatedTexture, topLeftPosition, bottomRightPosition); pictureFileName = "/Users/AlisonNoyes/Desktop/vuforiapiccrop"; SaveTextureAsPNG(croppedTexture); pictureFileName = "/Users/AlisonNoyes/Desktop/vuforiapic"; } catch (QRCodeNotVisibleException e) { throw new QRCodeNotVisibleException(); } } } } } }
/// <summary> /// Called each time the Vuforia state is updated /// </summary> private void OnTrackablesUpdated() { incrementer++; if (mFormatRegistered) { if (mAccessCameraImage && incrementer % 60 == 0) { Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat); WebCamTexture webCamTexture; if (image != null) { string imageInfo = mPixelFormat + " image: \n"; imageInfo += " size: " + image.Width + " x " + image.Height + "\n"; imageInfo += " bufferSize: " + image.BufferWidth + " x " + image.BufferHeight + "\n"; imageInfo += " stride: " + image.Stride; byte[] pixels = image.Pixels; // Array.Reverse(pixels); //array reversal for (int i = 0; i < pixels.Length / 2; i++) { byte tmp = pixels[i]; pixels[i] = pixels[pixels.Length - i - 1]; pixels[pixels.Length - i - 1] = tmp; } Texture2D texture = new Texture2D(image.Width, image.Height); cameraHeight = (float)image.Height; cameraWidth = (float)image.Width; image.CopyToTexture(texture); texture.Apply(); byte[] pixxels = texture.EncodeToPNG(); //byte[] reversed = pixxels.Reverse().ToArray(); //byte[] bytes = GetTheBytes(); // // Debug.Log (Application.dataPath); // // // File.WriteAllBytes(Application.dataPath + "SavedScreen.png", pixxels); // MemoryStream ms = new MemoryStream(pixels); // Image returnImage = System.Drawing.Image.FromStream(ms); // returnImage.CopyToTexture // // Texture2D photo = new Texture2D(image.Width, image.Height); //// photo.SetPixels(image.Pixels); // photo.Apply(); // // //Encode to a PNG // byte[] pixels = photo.EncodeToPNG(); if (pixxels != null && pixxels.Length > 0) { DetectFaces(pixxels); // Debug.Log("Image pixels: " + pixels[0] + "," + pixels[1] + "," + pixels[2] + ",..."); } } } } }