static void SnapShot(Camera cam, string path) { cam.Render(); cam.Render(); Texture2D image = new Texture2D(cam.targetTexture.width, cam.targetTexture.height, TextureFormat.RGB24, false); image.ReadPixels(new Rect(0, 0, cam.targetTexture.width, cam.targetTexture.height), 0, 0); image.Apply(); var bytes = image.EncodeToPNG(); System.IO.File.WriteAllBytes(path, bytes); }
public static void Capture(string fileName, int width, int height, UnityEngine.Camera baseCamera = null, bool safeMode = false) { // create RenderTexture RenderTexture renderTexture = new RenderTexture(width, height, 24, RenderTextureFormat.Default); renderTexture.Create(); RenderTexture currentRT = RenderTexture.active; // rendering string[] whitelist = new string[] { (baseCamera ?? UnityEngine.Camera.main).name, "SkyBoxCamera" }; foreach (UnityEngine.Camera c in UnityEngine.Camera.allCameras) { if (Array.Exists(whitelist, i => (i.Equals(c.name))) && c.targetTexture == null && c.enabled && c.gameObject.activeSelf) { UnityEngine.Camera camera = c; if (safeMode) { GameObject go = UnityEngine.Object.Instantiate(camera.gameObject); camera = go.GetComponent <UnityEngine.Camera>(); camera.fieldOfView = c.fieldOfView; camera.orthographic = c.orthographic; camera.nearClipPlane = c.nearClipPlane; camera.farClipPlane = c.farClipPlane; camera.clearFlags = c.clearFlags; } RenderTexture currentTargetTexture = camera.targetTexture; camera.targetTexture = renderTexture; camera.Render(); camera.targetTexture = currentTargetTexture; camera.Render(); if (safeMode) { GameObject.DestroyImmediate(camera.gameObject); } } } // to Texture2D Texture2D texture2D = new Texture2D(renderTexture.width, renderTexture.height, TextureFormat.RGB24, false); RenderTexture.active = renderTexture; texture2D.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0); texture2D.Apply(); RenderTexture.active = currentRT; renderTexture.DiscardContents(); renderTexture.Release(); // save PNG byte[] bytes = texture2D.EncodeToPNG(); UnityEngine.Object.Destroy(renderTexture); UnityEngine.Object.Destroy(texture2D); File.WriteAllBytes(fileName, bytes); }
private IEnumerator CameraWorker() { var waitForSeconds = new WaitForSeconds(UpdatePeriod * adjustCapturingRate); while (true) { cam.enabled = true; cam.Render(); var readback = AsyncGPUReadback.Request(cam.targetTexture, 0, readbackDstFormat); yield return(new WaitUntil(() => readback.done)); cam.enabled = false; if (readback.hasError) { Debug.LogError("Failed to read GPU texture"); continue; } // Debug.Assert(request.done); camData.SetTextureData(readback.GetData <byte>()); if (parameters.save_enabled) { var saveName = name + "_" + Time.time; camData.SaveRawImageData(parameters.save_path, saveName); // Debug.LogFormat("{0}|{1} captured", parameters.save_path, saveName); } yield return(waitForSeconds); } }
/// <summary> /// 对相机截图。 /// </summary> /// <returns>The screenshot2.</returns> /// <param name="camera">Camera.要被截屏的相机</param> /// <param name="rect">Rect.截屏的区域</param> Texture2D CaptureCamera(Camera camera, Rect rect) { // 创建一个RenderTexture对象 Debug.Log(rect.width + " " + rect.height); RenderTexture rt = new RenderTexture((int)rect.width, (int)rect.height, 0); // 临时设置相关相机的targetTexture为rt, 并手动渲染相关相机 camera.targetTexture = rt; camera.Render(); //--- 如果这样加上第二个相机,可以实现只截图某几个指定的相机一起看到的图像。 //camera2.targetTexture = rt; //camera2.Render(); //------------------------------------------------------------------- // 激活这个rt, 并从中中读取像素。 RenderTexture.active = rt; Texture2D screenShot = new Texture2D((int)rect.width, (int)rect.height, TextureFormat.RGB24, false); screenShot.ReadPixels(rect, 0, 0);// 注:这个时候,它是从RenderTexture.active中读取像素 screenShot.Apply(); // 重置相关参数,以使用camera继续在屏幕上显示 camera.targetTexture = null; //ps: camera2.targetTexture = null; RenderTexture.active = null; // JC: added to avoid errors GameObject.Destroy(rt); CacheFactory.SaveToPicture(screenShot, "/ScreenShot.png",CacheFactory.PictureType.JPG); return screenShot; }
Texture2D Capture(Camera camera, Rect rect) { // 创建一个RenderTexture对象 RenderTexture rt = new RenderTexture((int)rect.width, (int)rect.height, 0); // 临时设置相关相机的targetTexture为rt, 并手动渲染相关相机 camera.targetTexture = rt; camera.Render(); //ps: --- 如果这样加上第二个相机,可以实现只截图某几个指定的相机一起看到的图像。 //ps: camera2.targetTexture = rt; //ps: camera2.Render(); //ps: ------------------------------------------------------------------- // 激活这个rt, 并从中中读取像素。 RenderTexture.active = rt; Texture2D screenShot = new Texture2D((int)rect.width, (int)rect.height, TextureFormat.ARGB32, false); screenShot.ReadPixels(rect, 0, 0);// 注:这个时候,它是从RenderTexture.active中读取像素 screenShot.Apply(); // 重置相关参数,以使用camera继续在屏幕上显示 camera.targetTexture = null; //ps: camera2.targetTexture = null; RenderTexture.active = null; // JC: added to avoid errors GameObject.Destroy(rt); // 最后将这些纹理数据,成一个png图片文件 byte[] bytes = screenShot.EncodeToPNG(); string filename = Application.dataPath + "/Screenshot.png"; System.IO.File.WriteAllBytes(filename, bytes); Debug.Log(string.Format("截屏了一张照片: {0}", filename)); return screenShot; }
IEnumerator CaptureByCamera(Camera mCamera, Rect mRect, string mFileName) { yield return new WaitForEndOfFrame (); //初始化RenderTexture RenderTexture mRender = new RenderTexture ((int)mRect.width, (int)mRect.height, 0); //设置相机的渲染目标 mCamera.targetTexture = mRender; //开始渲染 mCamera.Render (); //激活渲染贴图读取信息 RenderTexture.active = mRender; Texture2D mTexture = new Texture2D ((int)mRect.width, (int)mRect.height, TextureFormat.RGB24, false); //读取屏幕像素信息并存储为纹理数据 mTexture.ReadPixels (mRect, 0, 0); //应用 mTexture.Apply (); //释放相机,销毁渲染贴图 mCamera.targetTexture = null; RenderTexture.active = null; GameObject.Destroy (mRender); //将图片信息编码为字节信息 byte[] bytes = mTexture.EncodeToPNG (); //保存 System.IO.File.WriteAllBytes (mFileName, bytes); }
// вызывается из потока событий unity static private PacketShootReady shoot(PacketHeader packet) { if (activecamera != "") { int w = 1280; int h = 720; UnityEngine.GameObject obj = cameras[idnames[activecamera]]; UnityEngine.Camera camera = obj.GetComponent <UnityEngine.Camera>(); UnityEngine.Texture2D image = new UnityEngine.Texture2D(w, h, UnityEngine.TextureFormat.RGB24, false); UnityEngine.RenderTexture texture = new UnityEngine.RenderTexture(w, h, 0); UnityEngine.RenderTexture.active = texture; camera.targetTexture = texture; camera.Render(); image.ReadPixels(new UnityEngine.Rect(0, 0, w, h), 0, 0); image.Apply(); byte[] jpg = UnityEngine.ImageConversion.EncodeToJPG(image); camera.targetTexture = null; UnityEngine.RenderTexture.active = null; UnityEngine.Object.Destroy(image); return(new PacketShootReady(1, Convert.ToBase64String(jpg))); } return(new PacketShootReady(0, "")); }
private IEnumerator CameraWorker() { var waitForSeconds = new WaitForSeconds(WaitPeriod()); while (true) { universalCamData.enabled = true; camSensor.enabled = true; // Debug.Log("start render and request "); if (camSensor.isActiveAndEnabled) { camSensor.Render(); } var readback = AsyncGPUReadback.Request(camSensor.targetTexture, 0, readbackDstFormat, OnCompleteAsyncReadback); universalCamData.enabled = false; camSensor.enabled = false; yield return(null); readback.WaitForCompletion(); yield return(waitForSeconds); } }
public Texture2D ClickScreenShot() { camera.gameObject.SetActive(true); //DirectoryInfo dir = new DirectoryInfo(path); //if (!dir.Exists) //{ // Directory.CreateDirectory(path); //} string name; name = path + System.DateTime.Now.ToString("yyyy-MM-dd_HH-mm-ss") + ".png"; RenderTexture rt = new RenderTexture(resWidth, resHeight, 24); camera.targetTexture = rt; Texture2D screenShot = new Texture2D(resWidth, resHeight, TextureFormat.RGB24, false); Rect rec = new Rect(0, 0, screenShot.width, screenShot.height); camera.Render(); RenderTexture.active = rt; screenShot.ReadPixels(new Rect(0, 0, resWidth, resHeight), 0, 0); screenShot.Apply(); //byte[] bytes = screenShot.EncodeToPNG(); //File.WriteAllBytes(name, bytes); camera.gameObject.SetActive(false); return(screenShot); }
public IEnumerator GLTFScenarios([ValueSource("ModelFilePaths")] string modelPath) { ActiveGLTFObject = new GameObject(); GLTFComponent gltfcomponent = ActiveGLTFObject.AddComponent <GLTFComponent>(); gltfcomponent.GLTFUri = GLTF_ASSETS_PATH + modelPath; AssetGenerator.Manifest.Camera cam = cameras[Path.GetFileNameWithoutExtension(modelPath)]; Camera.main.transform.position = new Vector3(cam.Translation[0], cam.Translation[1], cam.Translation[2]); yield return(gltfcomponent.Load()); //wait one frame for rendering to complete yield return(null); Camera mainCamera = Camera.main; RenderTexture rt = new RenderTexture(512, 512, 24); mainCamera.targetTexture = rt; Texture2D actualContents = new Texture2D(rt.width, rt.height, TextureFormat.RGB24, false); mainCamera.Render(); RenderTexture.active = rt; actualContents.ReadPixels(new Rect(0, 0, 512, 512), 0, 0); byte[] pngActualfile = actualContents.EncodeToPNG(); string outputpath = Path.GetDirectoryName(modelPath); string outputfullpath = GLTF_SCENARIO_OUTPUT_PATH + outputpath; Directory.CreateDirectory(outputfullpath); string filename = Path.GetFileNameWithoutExtension(modelPath); string expectedFilePath = outputfullpath + "/" + filename + "_EXPECTED.png"; string actualFilePath = outputfullpath + "/" + filename + "_ACTUAL.png"; if (GENERATE_REFERENCEDATA) { File.WriteAllBytes(expectedFilePath, pngActualfile); } else { if (File.Exists(expectedFilePath)) { byte[] pngActualfileContents = File.ReadAllBytes(expectedFilePath); File.WriteAllBytes(actualFilePath, pngActualfile); //compare against expected Texture2D expectedContents = new Texture2D(rt.width, rt.height, TextureFormat.RGB24, false); expectedContents.LoadImage(pngActualfileContents); Color[] expectedPixels = expectedContents.GetPixels(); Color[] actualPixels = actualContents.GetPixels(); Assert.AreEqual(expectedPixels.Length, actualPixels.Length); string errormessage = "\r\nExpectedPath: " + expectedFilePath + "\r\n ActualPath: " + actualFilePath; for (int i = 0; i < expectedPixels.Length; i++) { Assert.AreEqual(expectedPixels[i], actualPixels[i], errormessage); } } } }
private void OnRenderImage(RenderTexture source, RenderTexture destination) { cam.targetTexture = _supersampleRenderTexture; cam.Render(); cam.targetTexture = null; Graphics.Blit(_supersampleRenderTexture, destination); }
Texture2D RTImage(Camera cam) { RenderTexture currentRT = RenderTexture.active; RenderTexture.active = cam.targetTexture; cam.Render(); Texture2D image = new Texture2D(cam.targetTexture.width, cam.targetTexture.height); image.ReadPixels(new Rect(0, 0, cam.targetTexture.width, cam.targetTexture.height), 0, 0); image.Apply(); RenderTexture.active = currentRT; return image; }
/*Source: http://raypendergraph.wikidot.com/codesnippet:capturing-a-camera-image-in-unity*/ public Texture2D CaptureImage(Camera camera, int width, int height) { //Debug.Log ("Capturing"); //captured = new Texture2D (width, height); camera.Render(); RenderTexture.active = camera.targetTexture; captured.ReadPixels(new Rect(0,0,width,height),0,0); captured.Apply(); RenderTexture.active = null; return captured; }
public RenderTexture GetUIRenderTexture() { RenderTexture rt = new RenderTexture(Screen.width, Screen.height, 16, RenderTextureFormat.ARGB32); rt.Create(); UIRenderCamera.targetTexture = rt; UIRenderCamera.Render(); UIRenderCamera.targetTexture = null; return(rt); }
public RenderTexture GetCurrentRt() { RenderTexture rt = new RenderTexture(Screen.width, Screen.height, 16, RenderTextureFormat.ARGB32); //下面的方法会增加引用 //RenderTexture rt = RenderTexture.GetTemporary(Screen.width, Screen.height, 16, RenderTextureFormat.ARGB32); uiCamera.targetTexture = rt; uiCamera.Render(); uiCamera.targetTexture = null; return(rt); }
static public int Render(IntPtr l) { try { UnityEngine.Camera self = (UnityEngine.Camera)checkSelf(l); self.Render(); pushValue(l, true); return(1); } catch (Exception e) { return(error(l, e)); } }
static public int Render(IntPtr l) { try{ UnityEngine.Camera self = (UnityEngine.Camera)checkSelf(l); self.Render(); return(0); } catch (Exception e) { LuaDLL.luaL_error(l, e.ToString()); return(0); } }
// вызывается из потока событий unity static private PacketDepthReady depth(PacketHeader packet) { if (activecamera != "") { int w = 1280; int h = 720; UnityEngine.GameObject obj = cameras[idnames[activecamera]]; UnityEngine.Camera camera = obj.GetComponent <UnityEngine.Camera>(); UnityEngine.Texture2D map = new UnityEngine.Texture2D(w, h, UnityEngine.TextureFormat.RGB24, false); UnityEngine.RenderTexture texture = new UnityEngine.RenderTexture(w, h, 0); UnityEngine.RenderTexture.active = texture; camera.targetTexture = texture; obj.GetComponent <camera>().IsdDepth = true; camera.Render(); obj.GetComponent <camera>().IsdDepth = false; map.ReadPixels(new UnityEngine.Rect(0, 0, w, h), 0, 0); map.Apply(); byte[] raw = map.GetRawTextureData(); byte[] floats = new byte[h * w * 4]; for (int y = 0; y < h; y++) { int y_ = h - y - 1; for (int x = 0; x < w; x++) { float depth = raw[(y_ * w + x) * 3 + 0] / 255.0f + raw[(y_ * w + x) * 3 + 1] / 65025.0f + raw[(y_ * w + x) * 3 + 2] / 16581375.0f; depth = (depth == 0.0f || depth == 1.0f) ? 0.0f : camera.nearClipPlane + (camera.farClipPlane - camera.nearClipPlane) * depth; byte[] eb = BitConverter.GetBytes(depth); floats[(y * w + x) * 4 + 0] = BitConverter.IsLittleEndian ? eb[3] : eb[0]; floats[(y * w + x) * 4 + 1] = BitConverter.IsLittleEndian ? eb[2] : eb[1]; floats[(y * w + x) * 4 + 2] = BitConverter.IsLittleEndian ? eb[1] : eb[2]; floats[(y * w + x) * 4 + 3] = BitConverter.IsLittleEndian ? eb[0] : eb[3]; } } camera.targetTexture = null; UnityEngine.RenderTexture.active = null; UnityEngine.Object.Destroy(map); return(new PacketDepthReady(1, Convert.ToBase64String(floats))); } return(new PacketDepthReady(0, "")); }
Texture2D RTImage(Camera cam) { cam.targetTexture = RenderTexture.GetTemporary(128, 128, 16); cam.Render(); RenderTexture currentRT = RenderTexture.active; RenderTexture.active = cam.targetTexture; Texture2D image = new Texture2D(cam.targetTexture.width, cam.targetTexture.height); image.ReadPixels(new Rect(0, 0, cam.targetTexture.width, cam.targetTexture.height), 0, 0); image.Apply(); RenderTexture.active = currentRT; Debug.Log("Sent Image to Portrait"); return image; }
private IEnumerator CameraWorker() { var image = imageStamped.Image; var waitForSeconds = new WaitForSeconds(WaitPeriod()); while (true) { cam.enabled = true; if (cam.isActiveAndEnabled) { cam.Render(); } var readback = AsyncGPUReadback.Request(cam.targetTexture, 0, readbackDstFormat); cam.enabled = false; yield return(null); readback.WaitForCompletion(); if (readback.hasError) { Debug.LogError("Failed to read GPU texture"); continue; } // Debug.Assert(request.done); if (readback.done) { camData.SetTextureBufferData(readback.GetData <byte>()); if (image.Data.Length == camData.GetImageDataLength()) { // Debug.Log(imageStamped.Image.Height + "," + imageStamped.Image.Width); image.Data = camData.GetImageData(); if (GetParameters().save_enabled) { var saveName = name + "_" + Time.time; camData.SaveRawImageData(GetParameters().save_path, saveName); // Debug.LogFormat("{0}|{1} captured", GetParameters().save_path, saveName); } } } yield return(waitForSeconds); } }
public Texture2D DoRenderAtlas(GameObject gameObject, int width, int height, int padding, Rect[] rss, List<MB3_TextureCombiner.MB_TexSet> textureSetss, int indexOfTexSetToRenders, bool isNormalMap, bool fixOutOfBoundsUVs, MB3_TextureCombiner texCombiner, MB2_LogLevel LOG_LEV){ LOG_LEVEL = LOG_LEV; textureSets = textureSetss; indexOfTexSetToRender = indexOfTexSetToRenders; _padding = padding; _isNormalMap = isNormalMap; _fixOutOfBoundsUVs = fixOutOfBoundsUVs; combiner = texCombiner; rs = rss; Shader s; if (_isNormalMap){ s = Shader.Find ("MeshBaker/NormalMapShader"); } else { s = Shader.Find ("MeshBaker/AlbedoShader"); } if (s == null){ Debug.LogError ("Could not find shader for RenderTexture. Try reimporting mesh baker"); return null; } mat = new Material(s); _destinationTexture = new RenderTexture(width,height,24,RenderTextureFormat.ARGB32); _destinationTexture.filterMode = FilterMode.Point; myCamera = gameObject.GetComponent<Camera>(); myCamera.orthographic = true; myCamera.orthographicSize = height >> 1; myCamera.aspect = width / height; myCamera.targetTexture = _destinationTexture; myCamera.clearFlags = CameraClearFlags.Color; Transform camTransform = myCamera.GetComponent<Transform>(); camTransform.localPosition = new Vector3(width/2.0f, height/2f, 3); camTransform.localRotation = Quaternion.Euler(0, 180, 180); _doRenderAtlas = true; if (LOG_LEVEL >= MB2_LogLevel.debug) Debug.Log(string.Format ("Begin Camera.Render destTex w={0} h={1} camPos={2}", width, height, camTransform.localPosition)); //This triggers the OnRenderObject callback myCamera.Render(); _doRenderAtlas = false; MB_Utility.Destroy(mat); MB_Utility.Destroy(_destinationTexture); if (LOG_LEVEL >= MB2_LogLevel.debug) Debug.Log ("Finished Camera.Render "); Texture2D tempTex = targTex; targTex = null; return tempTex; }
private byte[] captureCam( Camera cam, int w, int h ) { var oldcullmask = cam.cullingMask; cam.cullingMask = layerMask; RenderTexture rt = RenderTexture.GetTemporary( w, h ); var tmp = cam.targetTexture; cam.targetTexture = rt; cam.Render(); RenderTexture.active = rt; _result.ReadPixels( new Rect( 0, 0, w, h ), 0, 0, false ); cam.targetTexture = tmp; cam.cullingMask = oldcullmask; return _result.EncodeToPNG(); }
public static void TakeScreenshot(Camera camera, string folderName, string prefix, string suffix, ScreenshotConfig screenshotConfig) { var scrTexture = new Texture2D(screenshotConfig.Width, screenshotConfig.Height, TextureFormat.RGB24, false); var scrRenderTexture = new RenderTexture(scrTexture.width, scrTexture.height, 24); var camRenderTexture = camera.targetTexture; camera.targetTexture = scrRenderTexture; camera.Render(); camera.targetTexture = camRenderTexture; RenderTexture.active = scrRenderTexture; scrTexture.ReadPixels(new Rect(0, 0, scrTexture.width, scrTexture.height), 0, 0); scrTexture.Apply(); SaveTextureAsFile(scrTexture, folderName, prefix, suffix, screenshotConfig); }
void CreateRenderTexture(Sprite sprite) { GameObject.Find("background-1").GetComponent<SpriteRenderer>().sprite = sprite; int width = (int)GameObject.Find("background-1").GetComponent<Renderer>().bounds.size.x; int height = (int)GameObject.Find("background-1").GetComponent<Renderer>().bounds.size.y; Vector3 nsize = Camera.main.WorldToScreenPoint(new Vector3(width, height, 0)); nsize.x *= 2.0f; //nsize.x *= 1.33f; nsize.y *= 2.0f; //nsize.y *= 1.33f; tex = new RenderTexture((int)nsize.x, (int)nsize.y, 1); tex.useMipMap = false; tex.filterMode = FilterMode.Point; tex.antiAliasing = 1; tex.Create(); cam = GetComponent<Camera>(); float previousOrthoSize = cam.orthographicSize; float previousAspect = cam.aspect; RenderTexture previousRenderTexture = cam.targetTexture; Texture2D image = new Texture2D((int)nsize.x - 50, (int)nsize.y); cam.targetTexture = tex; float scale = 1.0f / (cam.orthographicSize / GameObject.Find("background-1").GetComponent<Renderer>().bounds.size.y / 2.0f); cam.orthographicSize = scale; cam.aspect = GameObject.Find("background-1").GetComponent<Renderer>().bounds.size.x / GameObject.Find("background-1").GetComponent<Renderer>().bounds.size.y; RenderTexture past = RenderTexture.active; RenderTexture.active = tex; cam.Render(); image.ReadPixels(new Rect(25, 0, (int)nsize.x - 50, (int)nsize.y), 0, 0); image.Apply(); cam.targetTexture = previousRenderTexture; cam.orthographicSize = previousOrthoSize; cam.aspect = previousAspect; RenderTexture.active = past; tex.Release(); enhancedBackgroundImages.Add(Sprite.Create(image, new Rect(0, 0, image.width, image.height), new Vector2(0.5f, 0.5f))); }
// Use this for initialization void Start() { main_camera = GameObject.Find (camera_name).GetComponent<Camera> (); if (hologram) { render_texture = new RenderTexture (Constant.WIDTH, Constant.HEIGHT * 2, 24); main_camera.targetTexture = render_texture; main_camera.Render (); RenderTexture.active = render_texture; GameObject.Find ("triangle0").GetComponent<Renderer> ().material.mainTexture = render_texture; GameObject.Find ("triangle1").GetComponent<Renderer> ().material.mainTexture = render_texture; GameObject.Find ("triangle2").GetComponent<Renderer> ().material.mainTexture = render_texture; GameObject.Find ("triangle3").GetComponent<Renderer> ().material.mainTexture = render_texture; } else { main_camera.depth = 0; main_camera.transform.rotation = Quaternion.Euler(0, 0, 0); } }
private IEnumerator CaptureByCamera(Camera mCamera, Rect mRect, string mFileName) { yield return new WaitForEndOfFrame(); RenderTexture mRender = new RenderTexture ((int)mRect.width, (int)mRect.height, 0); mCamera.targetTexture = mRender; mCamera.Render (); RenderTexture.active = mRender; Texture2D mTexture = new Texture2D ((int)mRect.width, (int)mRect.height, TextureFormat.RGB24, false); mTexture.ReadPixels (mRect, 0, 0); mTexture.Apply (); mCamera.targetTexture = null; RenderTexture.active = null; GameObject.Destroy (mRender); byte [] bytes = mTexture.EncodeToPNG (); System.IO.File.WriteAllBytes (mFileName, bytes); }
public static Texture2D TakeScreenshot(UnityEngine.Camera _camera, int _width, int _height) { Rect rect = new Rect(0, 0, _width, _height); RenderTexture renderTexture = new RenderTexture(_width, _height, 24); Texture2D screenShot = new Texture2D(_width, _height, TextureFormat.RGBA32, false); _camera.targetTexture = renderTexture; _camera.Render(); RenderTexture.active = renderTexture; screenShot.ReadPixels(rect, 0, 0); _camera.targetTexture = null; RenderTexture.active = null; Object.Destroy(renderTexture); renderTexture = null; return(screenShot); }
private Texture2D TakeScreenshot(int width, int height, Camera screenshotCamera) { if (width <= 0 || height <= 0) { return null; } if (screenshotCamera == null) screenshotCamera = Camera.main; Texture2D screenshot = new Texture2D(width, height, TextureFormat.RGB24, false); RenderTexture renderTex = new RenderTexture(width, height, 24); screenshotCamera.targetTexture = renderTex; screenshotCamera.Render(); RenderTexture.active = renderTex; screenshot.ReadPixels(new Rect(0, 0, width, height), 0, 0); screenshot.Apply(false); screenshotCamera.targetTexture = null; RenderTexture.active = null; Destroy(renderTex); screenshot = ChangeScreenshot(screenshot); return screenshot; }
public IEnumerator TakeScreenShot() { yield return(new WaitForEndOfFrame()); camOV = OVcamera.GetComponent <UnityEngine.Camera>(); RenderTexture currentRT = RenderTexture.active; RenderTexture.active = camOV.targetTexture; camOV.Render(); Texture2D imageOverview = new Texture2D(camOV.targetTexture.width, camOV.targetTexture.height, TextureFormat.RGB24, false); imageOverview.ReadPixels(new Rect(0, 0, camOV.targetTexture.width, camOV.targetTexture.height), 0, 0); imageOverview.Apply(); RenderTexture.active = currentRT; // Encode texture into PNG byte[] bytes = imageOverview.EncodeToPNG(); // save in memory System.IO.File.WriteAllBytes(path + count + ".png", bytes); }
void DrawCamera(Rect previewRect, Camera camera) { if (Event.current.type == EventType.Repaint) { Rect cameraOriginalRect = camera.pixelRect; camera.pixelRect = previewRect; camera.Render(); camera.pixelRect = cameraOriginalRect; } }
/// <summary> /// Render brick to texture /// </summary> /// <returns>The project.</returns> /// <param name="brick">Brick.</param> /// <param name="size">Texture size.</param> public RenderTexture Project(Brick brick, int size) { //create render texture RenderTexture texture = new RenderTexture(size, size, 16); //create camera GameObject cameraObject = new GameObject(); UnityEngine.Camera camera = cameraObject.AddComponent <UnityEngine.Camera>(); camera.targetTexture = texture; camera.transform.position = cameraPosition; camera.clearFlags = CameraClearFlags.Depth; camera.allowHDR = true; camera.allowMSAA = false; camera.cullingMask = 1 << layer; //camera.orthographic = true; camera.orthographic = false; camera.aspect = 1f; camera.fieldOfView = 20; //add postprocessing stack to camera var posrprocessing = cameraObject.AddComponent <PostProcessingBehaviour>(); posrprocessing.profile = (PostProcessingProfile)Resources.Load("MeshProjecotorPostProcessingProfile"); //create lighting GameObject lightObject = new GameObject(); lightObject.transform.rotation = Quaternion.Euler(new Vector3(60, 30, 0)); lightObject.transform.SetParent(cameraObject.transform); lightObject.layer = layer; Light light = lightObject.AddComponent <Light>(); light.type = LightType.Directional; light.color = Color.white; light.shadows = LightShadows.None; //add object GameObject brickObject = UnityEngine.Object.Instantiate(brick.gameObject); brickObject.transform.rotation = Quaternion.Euler(new Vector3(-16, -43, 18)); brickObject.transform.position = Vector3.zero; SetLayer(brickObject, layer); //position object to fit whole frame var instanciatedBrick = brickObject.GetComponent <Brick>(); var bounds = instanciatedBrick.GetBounds(); var corners = instanciatedBrick.GetBoundCorners(); float boundSphereRadius = corners.Select(x => Vector3.Distance(x, bounds.center)).Max(); float fov = Mathf.Deg2Rad * camera.fieldOfView; float camDistance = boundSphereRadius * 0.7f / Mathf.Tan(fov / 2f); camera.transform.position = new Vector3( bounds.center.x, bounds.center.y, bounds.center.z - camDistance ); //render texture camera.Render(); //cleanup camera.targetTexture = null; UnityEngine.Object.DestroyImmediate(brickObject); UnityEngine.Object.DestroyImmediate(cameraObject); return(texture); }
public override void OnPreviewGUI(Rect r, GUIStyle background) { //if (!Application.isPlaying) // return; InitPreview(); var asset = Asset; bool fog = RenderSettings.fog; Unsupported.SetRenderSettingsUseFogNoDirty(false); previewRender.BeginPreview(r, background); previewRender.ambientColor = asset.ambientColor; previewRender.lights[0].intensity = asset.lightIntensity; Camera camera = previewRender.camera; if (skeleton) { camera.transform.position = skeleton.transform.position + Quaternion.Euler(angels) * skeleton.transform.rotation * new Vector3(0, height, distance); camera.transform.LookAt(skeleton.transform.position + new Vector3(0, height * 1f, 0), skeleton.transform.up); } camera.Render(); previewRender.EndAndDrawPreview(r); Unsupported.SetRenderSettingsUseFogNoDirty(fog); Rect itemRect = new Rect(r.x, r.y, selectedWidth, selectedHeight); if (avatarRes == null) { GUI.Label(itemRect, "Avatars Empty"); return; } if (GUI.Toggle(new Rect(itemRect.x, itemRect.y, selectedWidth, selectedHeight), combine, "Combine") != combine) { combine = !combine; } itemRect.y += itemRect.height + spaceWidth; itemRect = GUIOptions(itemRect, avatarRes.name, () => { SelectAvatar(selectedAvatarIndex - 1); }, () => { SelectAvatar(selectedAvatarIndex + 1); }); itemRect = GUIOptions(itemRect, avatarRes.animationNames.Count > 0 ? avatarRes.animationNames[avatarRes.selectedAnimationIndex] : "(None)", () => { SelectAnimation(avatarRes.selectedAnimationIndex - 1); }, () => { SelectAnimation(avatarRes.selectedAnimationIndex + 1); }); var parts = avatarRes.awatarParts; for (int i = 0; i < parts.Length; i++) { itemRect = AddCategory(itemRect, i, parts[i].partName); } OnGUIDrag(r); if (Event.current.type == EventType.Repaint) { if (skeleton) { UpdateAnimation(skeleton); Repaint(); } } }
public void setupRenderTexture(Camera camera) { if(renderTextureCamera != camera) { if(renderTextureCamera && renderTextureCamera.targetTexture == tex) renderTextureCamera.targetTexture = null; camera.targetTexture = tex; renderTextureCamera = camera; camera.Render(); } useRenderTexture = true; colorTex = Color.white; hasColorTex = false; placeholder = false; }
// This is called when it's known that the object will be rendered by some // camera. We render reflections / refractions and do other updates here. // Because the script executes in edit mode, reflections for the scene view // camera will just work! public void OnWillRenderObject() { if (!enabled || !ourRenderer || !ourRenderer.sharedMaterial || !ourRenderer.enabled) { return; } UnityEngine.Camera cam = UnityEngine.Camera.current; if (!cam) { return; } // Safeguard from recursive water reflections. if (s_InsideWater) { return; } s_InsideWater = true; // Actual water rendering mode depends on both the current setting AND // the hardware support. There's no point in rendering refraction textures // if they won't be visible in the end. m_HardwareWaterSupport = FindHardwareWaterSupport(); WaterMode mode = GetWaterMode(); UnityEngine.Camera reflectionCamera, refractionCamera; CreateWaterObjects(cam, out reflectionCamera, out refractionCamera); // find out the reflection plane: position and normal in world space Vector3 pos = transform.position; Vector3 normal = transform.up; // Optionally disable pixel lights for reflection/refraction int oldPixelLightCount = QualitySettings.pixelLightCount; if (disablePixelLights) { QualitySettings.pixelLightCount = 0; } UpdateCameraModes(cam, reflectionCamera); UpdateCameraModes(cam, refractionCamera); // Render reflection if needed if (mode >= WaterMode.Reflective) { // Reflect camera around reflection plane float d = -Vector3.Dot(normal, pos) - clipPlaneOffset; Vector4 reflectionPlane = new Vector4(normal.x, normal.y, normal.z, d); Matrix4x4 reflection = Matrix4x4.zero; CalculateReflectionMatrix(ref reflection, reflectionPlane); Vector3 oldpos = cam.transform.position; Vector3 newpos = reflection.MultiplyPoint(oldpos); reflectionCamera.worldToCameraMatrix = cam.worldToCameraMatrix * reflection; // Setup oblique projection matrix so that near plane is our reflection // plane. This way we clip everything below/above it for free. Vector4 clipPlane = CameraSpacePlane(reflectionCamera, pos, normal, 1.0f); reflectionCamera.projectionMatrix = cam.CalculateObliqueMatrix(clipPlane); reflectionCamera.cullingMask = ~(1 << 4) & reflectLayers.value; // never render water layer reflectionCamera.targetTexture = m_ReflectionTexture; GL.invertCulling = true; reflectionCamera.transform.position = newpos; Vector3 euler = cam.transform.eulerAngles; reflectionCamera.transform.eulerAngles = new Vector3(-euler.x, euler.y, euler.z); reflectionCamera.Render(); reflectionCamera.transform.position = oldpos; GL.invertCulling = false; GetComponent <Renderer>().sharedMaterial.SetTexture("_ReflectionTex", m_ReflectionTexture); } // Render refraction if (mode >= WaterMode.Refractive) { refractionCamera.worldToCameraMatrix = cam.worldToCameraMatrix; // Setup oblique projection matrix so that near plane is our reflection // plane. This way we clip everything below/above it for free. Vector4 clipPlane = CameraSpacePlane(refractionCamera, pos, normal, -1.0f); refractionCamera.projectionMatrix = cam.CalculateObliqueMatrix(clipPlane); refractionCamera.cullingMask = ~(1 << 4) & refractLayers.value; // never render water layer refractionCamera.targetTexture = m_RefractionTexture; refractionCamera.transform.position = cam.transform.position; refractionCamera.transform.rotation = cam.transform.rotation; refractionCamera.Render(); GetComponent <Renderer>().sharedMaterial.SetTexture("_RefractionTex", m_RefractionTexture); } // Restore pixel light count if (disablePixelLights) { QualitySettings.pixelLightCount = oldPixelLightCount; } // Setup shader keywords based on water mode switch (mode) { case WaterMode.Simple: Shader.EnableKeyword("WATER_SIMPLE"); Shader.DisableKeyword("WATER_REFLECTIVE"); Shader.DisableKeyword("WATER_REFRACTIVE"); break; case WaterMode.Reflective: Shader.DisableKeyword("WATER_SIMPLE"); Shader.EnableKeyword("WATER_REFLECTIVE"); Shader.DisableKeyword("WATER_REFRACTIVE"); break; case WaterMode.Refractive: Shader.DisableKeyword("WATER_SIMPLE"); Shader.DisableKeyword("WATER_REFLECTIVE"); Shader.EnableKeyword("WATER_REFRACTIVE"); break; } s_InsideWater = false; }
void RenderReflectionFor(Camera cam, Camera reflectCamera) { if (!reflectCamera) { return; } if (m_SharedMaterial && !m_SharedMaterial.HasProperty(reflectionSampler)) { return; } reflectCamera.cullingMask = reflectionMask & ~(1 << LayerMask.NameToLayer("Water")); SaneCameraSettings(reflectCamera); reflectCamera.backgroundColor = clearColor; reflectCamera.clearFlags = reflectSkybox ? CameraClearFlags.Skybox : CameraClearFlags.SolidColor; if (reflectSkybox) { if (cam.gameObject.GetComponent(typeof(Skybox))) { Skybox sb = (Skybox)reflectCamera.gameObject.GetComponent(typeof(Skybox)); if (!sb) { sb = (Skybox)reflectCamera.gameObject.AddComponent(typeof(Skybox)); } sb.material = ((Skybox)cam.GetComponent(typeof(Skybox))).material; } } GL.invertCulling = true; Transform reflectiveSurface = transform; //waterHeight; Vector3 eulerA = cam.transform.eulerAngles; reflectCamera.transform.eulerAngles = new Vector3(-eulerA.x, eulerA.y, eulerA.z); reflectCamera.transform.position = cam.transform.position; Vector3 pos = reflectiveSurface.transform.position; pos.y = reflectiveSurface.position.y; Vector3 normal = reflectiveSurface.transform.up; float d = -Vector3.Dot(normal, pos) - clipPlaneOffset; Vector4 reflectionPlane = new Vector4(normal.x, normal.y, normal.z, d); Matrix4x4 reflection = Matrix4x4.zero; reflection = CalculateReflectionMatrix(reflection, reflectionPlane); m_Oldpos = cam.transform.position; Vector3 newpos = reflection.MultiplyPoint(m_Oldpos); reflectCamera.worldToCameraMatrix = cam.worldToCameraMatrix * reflection; Vector4 clipPlane = CameraSpacePlane(reflectCamera, pos, normal, 1.0f); Matrix4x4 projection = cam.projectionMatrix; projection = CalculateObliqueMatrix(projection, clipPlane); reflectCamera.projectionMatrix = projection; reflectCamera.transform.position = newpos; Vector3 euler = cam.transform.eulerAngles; reflectCamera.transform.eulerAngles = new Vector3(-euler.x, euler.y, euler.z); reflectCamera.Render(); GL.invertCulling = false; }
public static void ShareRenderTexture(Camera renderCamera, RenderTexture thisRenderTexture, string subject, string body, bool unique, TextureFormat textureFormat) { #if UNITY_ANDROID if(renderCamera == null) { Debug.Log ("Render Camera must be set"); } else if(thisRenderTexture == null) { Debug.Log ("Render Texture must be set"); } else if(subject == null || body == null) { Debug.Log("Body and subject cannot be null"); } else { RenderTexture currentRT = RenderTexture.active; RenderTexture.active = thisRenderTexture; renderCamera.Render(); Texture2D screenTexture = new Texture2D(thisRenderTexture.width, thisRenderTexture.height, textureFormat, false); screenTexture.ReadPixels(new Rect(0, 0, screenTexture.width, screenTexture.height), 0, 0); screenTexture.Apply(); RenderTexture.active = currentRT; ShareByteArray(screenTexture.EncodeToPNG(), subject, body, unique); } #else Debug.Log("NOT an Android device"); #endif }
public Texture2D TakeScreenshot(Camera camera) { var width = this.targetTexture.width; var height = this.targetTexture.height; Texture2D screenshot = new Texture2D(width, height, TextureFormat.RGB24, false); camera.targetTexture = this.targetTexture; camera.Render(); RenderTexture.active = this.targetTexture; screenshot.ReadPixels(new Rect(0, 0, width, height), 0, 0); screenshot.Apply(false); camera.targetTexture = null; RenderTexture.active = null; return screenshot; }
internal static void DrawCameraImpl(Rect position, Camera camera, DrawCameraMode drawMode, bool drawGrid, DrawGridParameters gridParam, bool finish) { Event current = Event.current; if (current.type == EventType.Repaint) { if (camera.targetTexture == null) { Rect rect = GUIClip.Unclip(position); camera.pixelRect = new Rect(rect.xMin, (float)Screen.height - rect.yMax, rect.width, rect.height); } else { camera.rect = new Rect(0f, 0f, 1f, 1f); } if (drawMode == DrawCameraMode.Normal) { RenderTexture targetTexture = camera.targetTexture; camera.targetTexture = RenderTexture.active; camera.Render(); camera.targetTexture = targetTexture; } else { if (drawGrid) { Handles.Internal_DrawCameraWithGrid(camera, (int)drawMode, ref gridParam); } else { Handles.Internal_DrawCamera(camera, (int)drawMode); } if (finish) { Handles.Internal_FinishDrawingCamera(camera); } } } else { Handles.Internal_SetCurrentCamera(camera); } }
void RenderToCubeMap(Cubemap dest, Camera cam) { // get MIP level 0 to be pure reflection /* for (int i=0;i<6;i++) { BeginCubeFaceRender(dest, (CubemapFace)i, 0, cam); cam.Render(); EndCubeFaceRender(dest, (CubemapFace)i, 0, cam, false); }*/ dest.Apply(false); // blur each mip level Material blurMaterial = new Material(Shader.Find("Custom/MirrorBlurCubemap")); blurMaterial.SetTexture("MyCube", dest); for (int mip=1; (dest.width>>mip)>0;mip++) { //specular[mip] // blurMaterial.SetFloat("_SpecPwr", specular[mip]); // FIXME what is specular[mip]? blurMaterial.SetFloat("_SpecPwr", mip*8); // blur each face by rendering a cube that does a tons of samples for (int i=0; i<6; i++) { BeginCubeFaceRender(dest, (CubemapFace)i, mip, cam); // FIXME: what is outsideCubeMesh? Graphics.DrawMesh(outsideCubeMesh, cam.transform.position, Quaternion.identity, blurMaterial, 1, cam, 0); cam.Render(); EndCubeFaceRender(dest, (CubemapFace)i, mip, cam, false); } } // upload final version dest.Apply(false); }
void RenderSheet(Texture2D copyTex, Camera renderCamera, Texture2D texture) { float timeValue = 0.0f; int j, k; // Establish a variable for our columns & rows int timeSamples = 5; foreach(SpritePackerAnimation a in providers) { a.Init(); a.Sample(0, Vector3.zero); } foreach(Animation a in animations) { a.gameObject.SampleAnimation(a.clip, 0); } foreach(ParticleSystem e in particleSystems) { e.Clear(true); e.Pause(); } for (int i = 0; i < finalCellCount*timeSamples; i++) { float time = i * (increment/timeSamples) + 0.035f; if(rotate) { systemParent.rotation = Quaternion.AngleAxis(time/finalTime * 360.0f * rotationCycles, rotationAxis.normalized); } foreach(ParticleSystem e in particleSystems) { e.Simulate (time, true); } foreach(Animation a in animations) { a.gameObject.SampleAnimation(a.clip, time*a.clip.length); } foreach(SpritePackerAnimation a in providers) { a.Sample(time, renderCamera.transform.position); } // SnapShot if((i/timeSamples)*timeSamples == i) { int usedI = i/timeSamples; j = usedI % numberOfColumns; // ... find the remainder by width to get the column... k = usedI / numberOfColumns; // ... devide by width to get the row... renderCamera.Render (); RenderTexture.active = renderCamera.targetTexture; copyTex.ReadPixels (new Rect (0, 0, cellSizeX, cellSizeY), 0, 0); Color[] pixels = copyTex.GetPixels (0, 0, cellSizeX, cellSizeY); texture.SetPixels (j * cellSizeX, ((numberOfRows - k) * cellSizeY) - cellSizeY, cellSizeX, cellSizeY, pixels); } } foreach(ParticleSystem e in particleSystems) { e.Clear(true); e.Pause(); } foreach(SpritePackerAnimation a in providers) { a.Sample(0, Vector3.zero); } }
void RenderCamToTexture (Camera cam, Texture2D tex){ cam.enabled=true; cam.Render(); WriteScreenImageToTexture(tex); cam.enabled=false; }
public void Render( Mesh mesh, Vector3 position, Quaternion rotation, Camera camera, IEnumerable<Light> lights, Color ambient, Material material, RenderTexture target, bool drawBackground) { var oldTarget = camera.targetTexture; var oldFlags = camera.clearFlags; camera.targetTexture = target; _internalRenderUtilityType.InvokeMember( "SetCustomLighting", BindingFlags.InvokeMethod | BindingFlags.Instance | BindingFlags.Public | BindingFlags.Static, null, null, new object[] { lights.ToArray(), ambient } ); var time = (float)EditorApplication.timeSinceStartup; if( material.HasProperty( "_EditorTime") ) { var vTime = new Vector4( time / 20, time, time*2, time*3); material.SetVector( "_EditorTime", vTime ); } if( material.HasProperty( "_EditorSinTime") ) { var sinTime = new Vector4( Mathf.Sin( time / 8f ), Mathf.Sin( time / 4f ), Mathf.Sin( time /2f ), Mathf.Sin( time ) ); material.SetVector( "_EditorSinTime", sinTime ); } if( material.HasProperty( "_EditorCosTime") ) { var cosTime = new Vector4( Mathf.Cos( time / 8f ), Mathf.Cos( time / 4f ), Mathf.Cos( time /2f ), Mathf.Cos( time ) ); material.SetVector( "_EditorCosTime", cosTime ); } if( drawBackground ) { Graphics.DrawMesh(BackPlane, Matrix4x4.identity, CheckerMat, 1, camera, 0); camera.Render(); camera.clearFlags = CameraClearFlags.Nothing; } if( mesh != null ) { for (int i = 0; i < mesh.subMeshCount; i++) { Graphics.DrawMesh(mesh, position, rotation, material, 1, camera, i); } } camera.Render(); camera.clearFlags = oldFlags; _internalRenderUtilityType.InvokeMember( "RemoveCustomLighting", BindingFlags.InvokeMethod | BindingFlags.Instance | BindingFlags.Public | BindingFlags.Static, null, null, null ); camera.targetTexture = oldTarget; return; }
public IEnumerator GLTFScenarios([ValueSource("SceneFilePaths")] string scenePath) { SceneManager.LoadScene(Path.GetFileNameWithoutExtension(scenePath)); //wait one frame for loading to complete yield return(null); var objects = GameObject.FindObjectsOfType(typeof(GameObject)); foreach (GameObject o in objects) { if (o.name.Contains("GLTF")) { GLTFComponent gltfcomponent = o.GetComponent <GLTFComponent>(); gltfcomponent.Load(); } } //wait one seconds for textures to load yield return(null); Camera mainCamera = Camera.main; Debug.Assert(mainCamera != null, "Make sure you have a main camera"); RenderTexture rt = new RenderTexture(512, 512, 24); mainCamera.targetTexture = rt; Texture2D actualContents = new Texture2D(rt.width, rt.height, TextureFormat.RGB24, false); mainCamera.Render(); RenderTexture.active = rt; actualContents.ReadPixels(new Rect(0, 0, 512, 512), 0, 0); byte[] pngActualfile = actualContents.EncodeToPNG(); string outputpath = Path.GetDirectoryName(scenePath); string outputfullpath = GLTF_SCENARIO_OUTPUT_PATH + outputpath; Directory.CreateDirectory(outputfullpath); string filename = Path.GetFileNameWithoutExtension(scenePath); string expectedFilePath = outputfullpath + "/" + filename + "_EXPECTED.png"; string actualFilePath = outputfullpath + "/" + filename + "_ACTUAL.png"; //uncomment to reggenerate master images if (GENERATE_REFERENCEDATA) { File.WriteAllBytes(expectedFilePath, pngActualfile); } else { if (File.Exists(expectedFilePath)) { byte[] pngActualfileContents = File.ReadAllBytes(expectedFilePath); File.WriteAllBytes(actualFilePath, pngActualfile); //compare against expected Texture2D expectedContents = new Texture2D(rt.width, rt.height, TextureFormat.RGB24, false); expectedContents.LoadImage(pngActualfileContents); Color[] expectedPixels = expectedContents.GetPixels(); Color[] actualPixels = actualContents.GetPixels(); Assert.AreEqual(expectedPixels.Length, actualPixels.Length); string errormessage = "\r\nExpectedPath: " + expectedFilePath + "\r\n ActualPath: " + actualFilePath; for (int i = 0; i < expectedPixels.Length; i++) { Assert.AreEqual(expectedPixels[i], actualPixels[i], errormessage); } } } }
void RenderToTexture(Camera camera, Texture2D tex) { camera.enabled = true; camera.Render(); tex.ReadPixels(new Rect(0, 0, screenWidth, screenHeight), 0, 0); tex.Apply(); camera.enabled = false; }
public void RenderBlob(Camera blobCam, BlobSizeComputation ComputeBlobSize, float falloff, float blurFactor, float textureSize) { // Resize blob, remember to counter root scale Vector2 blobSize = ComputeBlobSize(ModelBounds); Vector3 rootScale = transform.localScale; BlobRenderer.transform.localScale = new Vector3(blobSize.x / rootScale.x, 1.0f / rootScale.y, blobSize.y / rootScale.z); // Resize blob texture int width, height; float textureDownScale; CalcTextureSize(blobSize, textureSize, out width, out height, out textureDownScale); if (BlobShadow == null || BlobShadow.width != width || BlobShadow.height != height) { if (BlobShadow != null) Texture2D.DestroyImmediate(BlobShadow); BlobShadow = new Texture2D(width, height); BlobShadow.filterMode = FilterMode.Trilinear; BlobShadow.hideFlags = HideFlags.HideInInspector | HideFlags.DontSave; BlobRenderer.sharedMaterial.mainTexture = BlobShadow; } // Setup blob camera RenderTexture targetTex = blobCam.targetTexture; Vector2 blobTexScale = new Vector2((float)targetTex.width / (float)BlobShadow.width, (float)targetTex.height / (float)BlobShadow.height); // Magic! Can't remember whats gaoing on here anymore. The margin start // copmutes the distance to where the margin should start (I think!) but // I've no idea what margin end is. An inverse lerp is used in the // shader to compute the intensity. Vector2 marginEnd = new Vector2(1.0f - (float)(targetTex.width - BlobShadow.width) / (float)targetTex.width, 1.0f - (float)(targetTex.height - BlobShadow.height) / (float)targetTex.height); Vector2 marginStart = blobCam.GetComponent<BlobBlur>().MarginStart = new Vector2(marginEnd.x / blobSize.x * ModelBounds.size.x, marginEnd.y / blobSize.y * ModelBounds.size.z); blobCam.GetComponent<BlobBlur>().MarginEnd = new Vector2(Mathf.Max(marginEnd.x, marginStart.x), Mathf.Max(marginEnd.y, marginStart.y)); // Place camera so it can see the model blobCam.transform.position = new Vector3(ModelBounds.center.x, ModelBounds.min.y - 0.01f * ModelBounds.size.y, ModelBounds.center.z); blobCam.orthographicSize = blobSize.y * 0.5f * blobTexScale.y; blobCam.aspect = 1.0f; // Convert texture size to mipmap max offset. // If the texture size had to be scaled due to originally exceeding 4096 pixels, // the scale is also applied to the texture size to keep the shadow relatively consistent. float modelSize = textureSize * Mathf.Max(ModelBounds.size.x, ModelBounds.size.z); blobCam.GetComponent<BlobBlur>().MipOffset = Mathf.Log(blurFactor * modelSize * textureDownScale, 2.0f); // Render blob Shader.SetGlobalFloat("_BlobFalloffExponent", falloff); blobCam.Render(); // Copy from target tex to BlobShadow RenderTexture oldActive = RenderTexture.active; RenderTexture.active = targetTex; BlobShadow.ReadPixels(new Rect((targetTex.width - BlobShadow.width) / 2, (targetTex.height - BlobShadow.height) / 2, BlobShadow.width, BlobShadow.height), 0, 0); BlobShadow.Apply(); RenderTexture.active = oldActive; }
protected override IEnumerator workerMethod() { for (int atlasIndex = 0; atlasIndex < umaData.atlasList.atlas.Count; atlasIndex++) { //Rendering Atlas int moduleCount = 0; //Process all necessary TextureModules for (int i = 0; i < umaData.atlasList.atlas[atlasIndex].atlasMaterialDefinitions.Count; i++) { if (!umaData.atlasList.atlas[atlasIndex].atlasMaterialDefinitions[i].isRectShared) { moduleCount++; moduleCount = moduleCount + umaData.atlasList.atlas[atlasIndex].atlasMaterialDefinitions[i].source.overlays.Length; } } while (umaGenerator.textureMerge.textureModuleList.Count < moduleCount) { Transform tempModule = UnityEngine.Object.Instantiate(umaGenerator.textureMerge.textureModule, new Vector3(0, 0, 3), Quaternion.identity) as Transform; tempModule.gameObject.renderer.sharedMaterial = UnityEngine.Object.Instantiate(umaGenerator.textureMerge.material) as Material; umaGenerator.textureMerge.textureModuleList.Add(tempModule); } textureModuleList = umaGenerator.textureMerge.textureModuleList.ToArray(); for (int i = 0; i < moduleCount; i++) { textureModuleList[i].localEulerAngles = new Vector3(textureModuleList[i].localEulerAngles.x, 180.0f, textureModuleList[i].localEulerAngles.z); textureModuleList[i].parent = umaGenerator.textureMerge.myTransform; textureModuleList[i].name = "tempModule"; textureModuleList[i].gameObject.SetActive(true); } moduleCount = 0; resultingTextures = new Texture[umaGenerator.textureNameList.Length]; Rect nullRect = new Rect(0, 0, 0, 0); for (int textureType = 0; textureType < umaGenerator.textureNameList.Length; textureType++) { if (umaData.atlasList.atlas[atlasIndex].atlasMaterialDefinitions[0].source.materialSample.HasProperty(umaGenerator.textureNameList[textureType])) { for (int i = 0; i < umaData.atlasList.atlas[atlasIndex].atlasMaterialDefinitions.Count; i++) { UMAData.AtlasMaterialDefinition atlasElement = umaData.atlasList.atlas[atlasIndex].atlasMaterialDefinitions[i]; resolutionScale = umaData.atlasList.atlas[atlasIndex].resolutionScale * umaData.atlasList.atlas[atlasIndex].atlasMaterialDefinitions[i].source.slotData.overlayScale; Vector2 offsetAdjust = new Vector2(umaGenerator.atlasResolution / 1024, umaGenerator.atlasResolution / 1024); if (!atlasElement.isRectShared) { if (textureType == 0) { textureModuleList[moduleCount].localScale = new Vector3(atlasElement.atlasRegion.width / umaGenerator.atlasResolution, atlasElement.atlasRegion.height / umaGenerator.atlasResolution, 1); textureModuleList[moduleCount].localPosition = new Vector3(Mathf.Lerp(-1, 1, (offsetAdjust.x + atlasElement.atlasRegion.x + atlasElement.atlasRegion.width * 0.5f) / umaGenerator.atlasResolution), Mathf.Lerp(-1, 1, (offsetAdjust.y + atlasElement.atlasRegion.y + atlasElement.atlasRegion.height * 0.5f) / umaGenerator.atlasResolution), 3.0f); } // Material tempMaterial = UnityEngine.Object.Instantiate(umaGenerator.textureMerge.material) as Material; // textureModuleList[moduleCount].renderer.material = tempMaterial; if (atlasElement.source.baseTexture[textureType]) { atlasElement.source.baseTexture[textureType].filterMode = FilterMode.Point; atlasElement.source.baseTexture[0].filterMode = FilterMode.Point; } textureModuleList[moduleCount].renderer.sharedMaterial.SetTexture("_MainTex", atlasElement.source.baseTexture[textureType]); textureModuleList[moduleCount].renderer.sharedMaterial.SetTexture("_ExtraTex", atlasElement.source.baseTexture[0]); textureModuleList[moduleCount].renderer.sharedMaterial.SetColor("_Color", atlasElement.source.GetMultiplier(0, textureType)); textureModuleList[moduleCount].renderer.sharedMaterial.SetColor("_AdditiveColor", atlasElement.source.GetAdditive(0, textureType)); textureModuleList[moduleCount].name = atlasElement.source.baseTexture[textureType].name; Transform tempModule = textureModuleList[moduleCount]; moduleCount++; for (int i2 = 0; i2 < atlasElement.source.overlays.Length; i2++) { if (atlasElement.source.rects[i2] != nullRect) { textureModuleList[moduleCount].localScale = new Vector3((atlasElement.source.rects[i2].width / umaGenerator.atlasResolution) * resolutionScale, (atlasElement.source.rects[i2].height / umaGenerator.atlasResolution) * resolutionScale, 1); textureModuleList[moduleCount].localPosition = new Vector3(Mathf.Lerp(-1, 1, (offsetAdjust.x + atlasElement.atlasRegion.x + atlasElement.source.rects[i2].x * resolutionScale + atlasElement.source.rects[i2].width * 0.5f * resolutionScale) / umaGenerator.atlasResolution), Mathf.Lerp(-1, 1, (offsetAdjust.y + atlasElement.atlasRegion.y + atlasElement.source.rects[i2].y * resolutionScale + atlasElement.source.rects[i2].height * 0.5f * resolutionScale) / umaGenerator.atlasResolution), tempModule.localPosition.z - 0.1f - 0.1f * i2); } else { textureModuleList[moduleCount].localScale = tempModule.localScale; textureModuleList[moduleCount].localPosition = new Vector3(tempModule.localPosition.x, tempModule.localPosition.y, tempModule.localPosition.z - 0.1f - 0.1f * i2); } // Material tempGenMaterial = umaGenerator.textureMerge.GenerateMaterial(umaGenerator.textureMerge.material); // textureModuleList[moduleCount].renderer.material = tempGenMaterial; atlasElement.source.overlays[i2].textureList[textureType].filterMode = FilterMode.Point; atlasElement.source.overlays[i2].textureList[0].filterMode = FilterMode.Point; textureModuleList[moduleCount].renderer.sharedMaterial.SetTexture("_MainTex", atlasElement.source.overlays[i2].textureList[textureType]); textureModuleList[moduleCount].renderer.sharedMaterial.SetTexture("_ExtraTex", atlasElement.source.overlays[i2].textureList[0]); textureModuleList[moduleCount].renderer.sharedMaterial.SetColor("_Color", atlasElement.source.GetMultiplier(i2 + 1, textureType)); textureModuleList[moduleCount].renderer.sharedMaterial.SetColor("_AdditiveColor", atlasElement.source.GetAdditive(i2 + 1, textureType)); textureModuleList[moduleCount].name = atlasElement.source.overlays[i2].textureList[textureType].name; moduleCount++; } // yield return null; } } //last element for this textureType moduleCount = 0; umaGenerator.textureMerge.gameObject.SetActive(true); destinationTexture = new RenderTexture(Mathf.FloorToInt(umaData.atlasList.atlas[atlasIndex].cropResolution.x), Mathf.FloorToInt(umaData.atlasList.atlas[atlasIndex].cropResolution.y), 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Default); destinationTexture.filterMode = FilterMode.Point; renderCamera = umaGenerator.textureMerge.myCamera; Vector3 tempPosition = renderCamera.transform.position; renderCamera.orthographicSize = umaData.atlasList.atlas[atlasIndex].cropResolution.y / umaGenerator.atlasResolution; renderCamera.transform.position = tempPosition + (-Vector3.right * (1 - umaData.atlasList.atlas[atlasIndex].cropResolution.x / umaGenerator.atlasResolution)) + (-Vector3.up * (1 - renderCamera.orthographicSize)); renderCamera.targetTexture = destinationTexture; renderCamera.Render(); renderCamera.transform.position = tempPosition; renderCamera.active = false; renderCamera.targetTexture = null; yield return 25; if (umaGenerator.convertRenderTexture) { Texture2D tempTexture; tempTexture = new Texture2D(destinationTexture.width, destinationTexture.height, TextureFormat.ARGB32, umaGenerator.convertMipMaps); int xblocks = destinationTexture.width / 512; int yblocks = destinationTexture.height / 512; if (xblocks == 0 || yblocks == 0) { RenderTexture.active = destinationTexture; tempTexture.ReadPixels(new Rect(0, 0, destinationTexture.width, destinationTexture.height), 0, 0, umaGenerator.convertMipMaps); RenderTexture.active = null; } else { // figures that ReadPixels works differently on OpenGL and DirectX, someday this code will break because Unity fixes this bug! if (IsOpenGL()) { for (int x = 0; x < xblocks; x++) { for (int y = 0; y < yblocks; y++) { RenderTexture.active = destinationTexture; tempTexture.ReadPixels(new Rect(x * 512, y * 512, 512, 512), x * 512, y * 512, umaGenerator.convertMipMaps); RenderTexture.active = null; yield return 8; } } } else { for (int x = 0; x < xblocks; x++) { for (int y = 0; y < yblocks; y++) { RenderTexture.active = destinationTexture; tempTexture.ReadPixels(new Rect(x * 512, destinationTexture.height - 512 - y * 512, 512, 512), x * 512, y * 512, umaGenerator.convertMipMaps); RenderTexture.active = null; yield return 8; } } } } resultingTextures[textureType] = tempTexture as Texture; renderCamera.targetTexture = null; RenderTexture.active = null; destinationTexture.Release(); UnityEngine.GameObject.DestroyImmediate(destinationTexture); umaGenerator.textureMerge.gameObject.SetActive(false); yield return 6; tempTexture = resultingTextures[textureType] as Texture2D; tempTexture.Apply(); resultingTextures[textureType] = tempTexture; } else { destinationTexture.filterMode = FilterMode.Bilinear; resultingTextures[textureType] = destinationTexture; } umaGenerator.textureMerge.gameObject.SetActive(false); } else { } } for (int textureModuleIndex = 0; textureModuleIndex < textureModuleList.Length; textureModuleIndex++) { textureModuleList[textureModuleIndex].gameObject.SetActive(false); // UnityEngine.Object.DestroyImmediate(textureModuleList[textureModuleIndex].gameObject.renderer.material); // UnityEngine.Object.DestroyImmediate(textureModuleList[textureModuleIndex].gameObject); } umaData.atlasList.atlas[atlasIndex].resultingAtlasList = resultingTextures; umaData.atlasList.atlas[atlasIndex].materialSample = UnityEngine.Object.Instantiate(umaData.atlasList.atlas[atlasIndex].atlasMaterialDefinitions[0].source.materialSample) as Material; umaData.atlasList.atlas[atlasIndex].materialSample.name = umaData.atlasList.atlas[atlasIndex].atlasMaterialDefinitions[0].source.materialSample.name; for (int finalTextureType = 0; finalTextureType < umaGenerator.textureNameList.Length; finalTextureType++) { if (umaData.atlasList.atlas[atlasIndex].materialSample.HasProperty(umaGenerator.textureNameList[finalTextureType])) { umaData.atlasList.atlas[atlasIndex].materialSample.SetTexture(umaGenerator.textureNameList[finalTextureType], resultingTextures[finalTextureType]); } } } }
}//GetVirtualCamera() /// <summary> /// Save the rectangle from the camera view /// as a texture. /// </summary> public static Texture2D Screenshot(Camera camera, Rect rect) { //create temp textures, one for the camera to render RenderTexture renderTexture = new RenderTexture((int)rect.width, (int)rect.height, 16, RenderTextureFormat.ARGB32); //and one for the file Texture2D texture = new Texture2D((int)rect.width, (int)rect.height, TextureFormat.ARGB32, false); //Texture2D texture = new Texture2D((int)rect.width, (int)rect.height, TextureFormat.RGB24, false); //set renderTexture RenderTexture.active = renderTexture; //RenderTexture oldTexture = camera.targetTexture; camera.targetTexture = renderTexture; camera.Render(); //read the content of the camera into the texture texture.ReadPixels(rect, 0, 0); texture.Apply(); //release the renders RenderTexture.active = null; //camera.targetTexture = null; //camera.targetTexture = oldTexture; return texture; }//Screenshot()
protected override IEnumerator workerMethod() { var textureMerge = umaGenerator.textureMerge; for (int atlasIndex = umaData.generatedMaterials.materials.Count-1; atlasIndex >= 0; atlasIndex--) { var atlas = umaData.generatedMaterials.materials[atlasIndex]; //Rendering Atlas int moduleCount = 0; //Process all necessary TextureModules for (int i = 0; i < atlas.materialFragments.Count; i++) { if (!atlas.materialFragments[i].isRectShared) { moduleCount++; moduleCount = moduleCount + atlas.materialFragments[i].overlays.Length; } } textureMerge.EnsureCapacity(moduleCount); var slotData = atlas.materialFragments[0].slotData; resultingTextures = new Texture[slotData.asset.material.channels.Length]; for (int textureType = slotData.asset.material.channels.Length - 1; textureType >= 0; textureType--) { switch(slotData.asset.material.channels[textureType].channelType ) { case UMAMaterial.ChannelType.Texture: case UMAMaterial.ChannelType.NormalMap: { textureMerge.Reset(); for (int i = 0; i < atlas.materialFragments.Count; i++) { textureMerge.SetupModule(atlas, i, textureType); } //last element for this textureType moduleCount = 0; umaGenerator.textureMerge.gameObject.SetActive(true); int width = Mathf.FloorToInt(atlas.cropResolution.x); int height = Mathf.FloorToInt(atlas.cropResolution.y); destinationTexture = new RenderTexture(Mathf.FloorToInt(atlas.cropResolution.x * umaData.atlasResolutionScale), Mathf.FloorToInt(atlas.cropResolution.y * umaData.atlasResolutionScale), 0, slotData.asset.material.channels[textureType].textureFormat, RenderTextureReadWrite.Linear); destinationTexture.filterMode = FilterMode.Point; destinationTexture.useMipMap = umaGenerator.convertMipMaps && !umaGenerator.convertRenderTexture; renderCamera = umaGenerator.textureMerge.myCamera; renderCamera.targetTexture = destinationTexture; renderCamera.orthographicSize = height >> 1; var camTransform = renderCamera.GetComponent<Transform>(); camTransform.localPosition = new Vector3(width >> 1, height >> 1, 3); camTransform.localRotation = Quaternion.Euler(0, 180, 180); renderCamera.Render(); renderCamera.gameObject.SetActive(false); renderCamera.targetTexture = null; if (umaGenerator.convertRenderTexture) { #region Convert Render Textures yield return 25; Texture2D tempTexture; tempTexture = new Texture2D(destinationTexture.width, destinationTexture.height, TextureFormat.ARGB32, umaGenerator.convertMipMaps); int xblocks = destinationTexture.width / 512; int yblocks = destinationTexture.height / 512; if (xblocks == 0 || yblocks == 0) { RenderTexture.active = destinationTexture; tempTexture.ReadPixels(new Rect(0, 0, destinationTexture.width, destinationTexture.height), 0, 0, umaGenerator.convertMipMaps); RenderTexture.active = null; } else { // figures that ReadPixels works differently on OpenGL and DirectX, someday this code will break because Unity fixes this bug! if (IsOpenGL()) { for (int x = 0; x < xblocks; x++) { for (int y = 0; y < yblocks; y++) { RenderTexture.active = destinationTexture; tempTexture.ReadPixels(new Rect(x * 512, y * 512, 512, 512), x * 512, y * 512, umaGenerator.convertMipMaps); RenderTexture.active = null; yield return 8; } } } else { for (int x = 0; x < xblocks; x++) { for (int y = 0; y < yblocks; y++) { RenderTexture.active = destinationTexture; tempTexture.ReadPixels(new Rect(x * 512, destinationTexture.height - 512 - y * 512, 512, 512), x * 512, y * 512, umaGenerator.convertMipMaps); RenderTexture.active = null; yield return 8; } } } } resultingTextures[textureType] = tempTexture as Texture; renderCamera.targetTexture = null; RenderTexture.active = null; destinationTexture.Release(); UnityEngine.GameObject.DestroyImmediate(destinationTexture); umaGenerator.textureMerge.gameObject.SetActive(false); yield return 6; tempTexture = resultingTextures[textureType] as Texture2D; tempTexture.Apply(); tempTexture.wrapMode = TextureWrapMode.Repeat; tempTexture.filterMode = FilterMode.Bilinear; resultingTextures[textureType] = tempTexture; atlas.material.SetTexture(slotData.asset.material.channels[textureType].materialPropertyName, tempTexture); #endregion } else { destinationTexture.filterMode = FilterMode.Bilinear; destinationTexture.wrapMode = TextureWrapMode.Repeat; resultingTextures[textureType] = destinationTexture; atlas.material.SetTexture(slotData.asset.material.channels[textureType].materialPropertyName, destinationTexture); } umaGenerator.textureMerge.gameObject.SetActive(false); break; } case UMAMaterial.ChannelType.MaterialColor: { atlas.material.SetColor(slotData.asset.material.channels[textureType].materialPropertyName, atlas.materialFragments[0].baseColor); break; } case UMAMaterial.ChannelType.TintedTexture: { for (int i = 0; i < atlas.materialFragments.Count; i++) { var fragment = atlas.materialFragments[i]; if (fragment.isRectShared) continue; for (int j = 0; j < fragment.baseTexture.Length; j++) { if (fragment.baseTexture[j] != null) { atlas.material.SetTexture(slotData.asset.material.channels[j].materialPropertyName, fragment.baseTexture[j]); if (j == 0) { atlas.material.color = fragment.baseColor; } } } foreach (var overlay in fragment.overlays) { for (int j = 0; j < overlay.textureList.Length; j++) { if (overlay.textureList[j] != null) { atlas.material.SetTexture(slotData.asset.material.channels[j].materialPropertyName, overlay.textureList[j]); } } } } break; } } } atlas.resultingAtlasList = resultingTextures; } }
void ClearMirrorTexture(Camera Cam) { CameraClearFlags oldClearFlags = Cam.clearFlags; Color oldBackgroundColor = Cam.backgroundColor; Rect oldRect = Cam.rect; int oldCullingMask = Cam.cullingMask; Cam.clearFlags = CameraClearFlags.SolidColor; Cam.backgroundColor = new Color(1.0f, 1.0f, 1.0f, 1.0f); Cam.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f); Cam.cullingMask = 0; Cam.Render(); Cam.clearFlags = oldClearFlags; Cam.backgroundColor = oldBackgroundColor; Cam.rect = oldRect; Cam.cullingMask = oldCullingMask; }