/// <summary> /// Batch the given render buffer /// </summary> public void Batch(RenderBuffer renderBuffer) { // Skip empty inputs if (renderBuffer.IsEmpty()) return; // Lets create a separate batch if the number of vertices is too great if (m_renderBuffer.Positions.Count + renderBuffer.Positions.Count >= 65000) { Debug.LogWarning("Too many vertices :O"); Flush(); } // Add data to main buffer int vOffset = m_renderBuffer.Positions.Count; // Further calls to batch need to offset each triangle value by the number of triangles previously present if (vOffset != 0) { for (int j = 0; j < renderBuffer.Triangles.Count; j++) renderBuffer.Triangles[j] += vOffset; } m_renderBuffer.Positions.AddRange(renderBuffer.Positions); m_renderBuffer.Normals.AddRange(renderBuffer.Normals); m_renderBuffer.UVs.AddRange(renderBuffer.UVs); m_renderBuffer.UV2.AddRange(renderBuffer.UV2); m_renderBuffer.Colors.AddRange(renderBuffer.Colors); m_renderBuffer.Triangles.AddRange(renderBuffer.Triangles); }
public DrawCallBatcher() { m_renderBuffer = new RenderBuffer(); m_drawCalls = new List<GameObject>(); m_drawCallRenderers = new List<Renderer>(); m_visible = false; }
private static void GetActiveColorBuffer(out RenderBuffer res){}
void ConvertToImage() { if (objectToRender == null) { return; } var camObj = GameObject.Find("billboardCam"); if (camObj) { _captureCam = camObj.GetComponent <Camera>(); } if (_captureCam == null) { GameObject go = new GameObject("billboardCam"); //create the cameraObject //go.hideFlags = HideFlags.HideAndDontSave; _captureCam = go.AddComponent <Camera>(); } _captureCam.CopyFrom(Camera.main); RenderTexture[] rts = new RenderTexture[2] { new RenderTexture((int)imageWidth, (int)imageHeight, 0), new RenderTexture((int)imageWidth, (int)imageHeight, 0) }; RenderBuffer[] rbs = new RenderBuffer[2]; rbs[0] = rts[0].colorBuffer; rbs[1] = rts[1].colorBuffer; //RenderTexture rt = new RenderTexture((int)imageWidth, (int)imageHeight, 0); //_captureCam.targetTexture = rt; _captureCam.SetTargetBuffers(rbs, rts[0].depthBuffer); _captureCam.orthographic = true; _captureCam.clearFlags = CameraClearFlags.Nothing; //_captureCam.backgroundColor = new Color(0, 0, 0, 0); _captureCam.enabled = true; //grab size of object to render - place/size camera to fit Bounds bb = objectToRender.GetComponent <Renderer>().bounds; float maxSize = Mathf.Max(bb.max.z - bb.min.z, bb.max.x - bb.min.x); //place camera looking at centre of object - and backwards down the z-axis from it _captureCam.transform.position = bb.center; _captureCam.transform.position = new Vector3(_captureCam.transform.position.x + maxSize / 2, _captureCam.transform.position.y, _captureCam.transform.position.z); _captureCam.nearClipPlane = 0.0f; _captureCam.farClipPlane = maxSize; _captureCam.orthographicSize = Mathf.Max(Mathf.Max((bb.max.y - bb.min.y) / 2.0f, (bb.max.x - bb.min.x) / 2.0f), Mathf.Max((bb.max.z - bb.min.z) / 2.0f)); //_captureCam.transform.LookAt(bb.center); _captureCam.transform.LookAt(new Vector3(objectToRender.transform.position.x, bb.center.y, objectToRender.transform.position.z)); var baker = GetComponent <BillboardBaker>(); baker.width = (bb.max.x - bb.min.x); float ratio = (bb.max.y - bb.min.y) / (bb.max.x - bb.min.x); if (ratio > 1) { baker.sizeU = 0.3333f / ratio; baker.sizeV = 0.3333f; baker.offsetU = (baker.sizeV - baker.sizeU) / 2; baker.offsetV = 0; } else { baker.sizeU = 0.3333f; baker.sizeV = 0.3333f * ratio; baker.offsetU = 0; baker.offsetV = (baker.sizeU - baker.sizeV) / 2; } RenderTexture.active = rts[0]; GL.Clear(false, true, new Color(0.2745098f, 0.3019608f, 0.227451f, 0)); RenderTexture.active = rts[1]; GL.Clear(false, true, new Color(0.5f, 0.5f, 1, 0)); // Render to Atlas textures renderToTextures(); RenderTexture desRenderTexture; Blur(rts[0], out desRenderTexture); var tex = new Texture2D(imageWidth, imageHeight, TextureFormat.ARGB32, false); string path; path = AssetDatabase.GetAssetPath(m_BlurMat); path = path.Remove(path.Length - (m_BlurMat.name.Length + 4)); // Read pixels RenderTexture.active = desRenderTexture; tex.ReadPixels(new Rect(0, 0, imageWidth, imageHeight), 0, 0); tex.Apply(); // Encode texture into PNG byte[] bytes = tex.EncodeToPNG(); System.IO.File.WriteAllBytes(path + "resource/Billboard_Albedo.png", bytes); // Read pixels Blur(rts[1], out desRenderTexture); RenderTexture.active = desRenderTexture; tex.ReadPixels(new Rect(0, 0, imageWidth, imageHeight), 0, 0); tex.Apply(); // Encode texture into PNG bytes = tex.EncodeToPNG(); System.IO.File.WriteAllBytes(path + "resource/Billboard_Normal.png", bytes); RenderTexture.active = null; _captureCam.enabled = false; //SafeDestroy(rt); SafeDestroy(tex); SafeDestoryArray(rts); }
private static void Internal_SetRTBuffer(out RenderBuffer colorBuffer, out RenderBuffer depthBuffer){}
public int PeformFFT(RenderTexture[] data0, RenderTexture[] data1, RenderTexture[] data2, RenderTexture[] data3) { if (this.m_butterflyLookupTable == null) { return(-1); } if (SystemInfo.supportedRenderTargetCount < 4) { throw new InvalidOperationException("System does not support at least 4 render targets"); } this.m_pass0RT4[0] = data0[0].colorBuffer; this.m_pass0RT4[1] = data1[0].colorBuffer; this.m_pass0RT4[2] = data2[0].colorBuffer; this.m_pass0RT4[3] = data3[0].colorBuffer; this.m_pass1RT4[0] = data0[1].colorBuffer; this.m_pass1RT4[1] = data1[1].colorBuffer; this.m_pass1RT4[2] = data2[1].colorBuffer; this.m_pass1RT4[3] = data3[1].colorBuffer; RenderBuffer depthBuffer = data0[0].depthBuffer; RenderBuffer depthBuffer2 = data0[1].depthBuffer; int num = 0; int num2 = 0; int i = 0; while (i < this.m_passes) { num = num2 % 2; int num3 = (num2 + 1) % 2; this.m_fourier.SetTexture("Ceto_ButterFlyLookUp", this.m_butterflyLookupTable[i]); this.m_fourier.SetTexture("Ceto_ReadBuffer0", data0[num3]); this.m_fourier.SetTexture("Ceto_ReadBuffer1", data1[num3]); this.m_fourier.SetTexture("Ceto_ReadBuffer2", data2[num3]); this.m_fourier.SetTexture("Ceto_ReadBuffer3", data3[num3]); if (num == 0) { RTUtility.MultiTargetBlit(this.m_pass0RT4, depthBuffer, this.m_fourier, 6); } else { RTUtility.MultiTargetBlit(this.m_pass1RT4, depthBuffer2, this.m_fourier, 6); } i++; num2++; } i = 0; while (i < this.m_passes) { num = num2 % 2; int num3 = (num2 + 1) % 2; this.m_fourier.SetTexture("Ceto_ButterFlyLookUp", this.m_butterflyLookupTable[i]); this.m_fourier.SetTexture("Ceto_ReadBuffer0", data0[num3]); this.m_fourier.SetTexture("Ceto_ReadBuffer1", data1[num3]); this.m_fourier.SetTexture("Ceto_ReadBuffer2", data2[num3]); this.m_fourier.SetTexture("Ceto_ReadBuffer3", data3[num3]); if (num == 0) { RTUtility.MultiTargetBlit(this.m_pass0RT4, depthBuffer, this.m_fourier, 7); } else { RTUtility.MultiTargetBlit(this.m_pass1RT4, depthBuffer2, this.m_fourier, 7); } i++; num2++; } return(num); }
public Pipeline() { _bufferSize = CRenderSettings.Resolution; _bufferSizeF = (Vector2)_bufferSize; RenderTarget = new RenderBuffer <float>(_bufferSize.X, _bufferSize.Y, channelCount: 3); }
private void DrawDebug(RenderBuffer data) { var pass = _visualizationEffect.RenderScenePass0; _visualizationEffect.World.SetMatrix(Matrix.Identity); _visualizationEffect.View.SetMatrix(this.Camera.View); _visualizationEffect.Projection.SetMatrix(this.Camera.Projection); this.GraphicsDevice.InputAssembler.SetInputLayout(_inputLayout); pass.Apply(); if (data.NumberOfPoints > 0) { var vertices = new VertexPositionColor[data.Points.Length]; for (int i = 0; i < data.Points.Length; i++) { var point = data.Points[i]; vertices[i * 2 + 0] = new VertexPositionColor(point.Point.As<Vector3>(), Color.FromArgb(point.Color)); } DrawVertices(vertices, PrimitiveTopology.PointList); } if (data.NumberOfLines > 0) { var vertices = new VertexPositionColor[data.Lines.Length * 2]; for (int x = 0; x < data.Lines.Length; x++) { DebugLine line = data.Lines[x]; vertices[x * 2 + 0] = new VertexPositionColor(line.Point0.As<Vector3>(), Color.FromArgb(line.Color0)); vertices[x * 2 + 1] = new VertexPositionColor(line.Point1.As<Vector3>(), Color.FromArgb(line.Color1)); } DrawVertices(vertices, PrimitiveTopology.LineList); } if (data.NumberOfTriangles > 0) { var vertices = new VertexPositionColor[data.Triangles.Length * 3]; for (int x = 0; x < data.Triangles.Length; x++) { DebugTriangle triangle = data.Triangles[x]; vertices[x * 3 + 0] = new VertexPositionColor(triangle.Point0.As<Vector3>(), Color.FromArgb(triangle.Color0)); vertices[x * 3 + 1] = new VertexPositionColor(triangle.Point1.As<Vector3>(), Color.FromArgb(triangle.Color1)); vertices[x * 3 + 2] = new VertexPositionColor(triangle.Point2.As<Vector3>(), Color.FromArgb(triangle.Color2)); } DrawVertices(vertices, PrimitiveTopology.TriangleList); } }
private static void catchDeltaDepth() { var obj = Selection.activeGameObject; MeshFilter mesh = obj.GetComponent <MeshFilter> (); if (mesh != null) { Renderer _render = obj.GetComponent <Renderer> (); Vector3 [] vectors = mesh.sharedMesh.vertices; GameObject CameraObj = new GameObject("myCamera"); Camera cam = CameraObj.AddComponent <Camera>(); cam.transform.parent = obj.transform; cam.transform.localPosition = new Vector3(0, 5, 0); cam.transform.localScale = Vector3.one; cam.transform.localRotation = Quaternion.Euler(90, 0, 0); //cam.depth = ; //cam.cullingMask = 1 << CAM_LAYER; // cam.gameObject.layer = CAM_LAYER; cam.clearFlags = CameraClearFlags.SolidColor; cam.orthographic = true; //投射方式:orthographic正交// cam.orthographicSize = 1; //投射区域大小// cam.nearClipPlane = 0.01f; //前距离// cam.farClipPlane = 10; //后距离// cam.rect = new Rect(0, 0, 1f, 1f); cam.transform.parent = null; cam.transform.localScale = Vector3.one; float maxX = 0.0f; float maxY = 0.0f; for (int i = 0, len = vectors.Length; i < len; i++) { Vector3 w = obj.transform.localToWorldMatrix.MultiplyPoint(vectors [i]); Vector3 l = cam.transform.worldToLocalMatrix.MultiplyPoint(w); maxX = Mathf.Max(Mathf.Abs(l.x), maxX); maxY = Mathf.Max(Mathf.Abs(l.y), maxY); } cam.orthographicSize = maxY; cam.aspect = maxX / maxY; var path = EditorUtility.SaveFilePanel( "Save texture as PNG", "", obj.name + "_depth.png", "png"); if (path.Length != 0) { var m = new Material(Shader.Find("Rolan/WriteDepthToRT2")); var old = _render.sharedMaterial; _render.sharedMaterial = m; RenderTexture tex = new RenderTexture((int)maxY * 100, (int)maxY * 100, 16, RenderTextureFormat.ARGB32); RenderTexture tex2 = new RenderTexture(tex.width, tex.height, 0, RenderTextureFormat.ARGB32); RenderTexture.active = tex2; GL.Clear(false, true, Color.red, 0); RenderBuffer [] buff = new RenderBuffer[] { tex.colorBuffer, tex2.colorBuffer }; cam.SetTargetBuffers(buff, tex.depthBuffer); cam.depthTextureMode |= DepthTextureMode.Depth; //cam.depthTextureMode = DepthTextureMode.Depth; cam.Render(); cam.Render(); SaveRenderTextureToPNG(tex, path + "1.png"); SaveRenderTextureToPNG(tex2, path); cam.targetTexture = null; GameObject.DestroyImmediate(CameraObj); GameObject.DestroyImmediate(tex); GameObject.DestroyImmediate(tex2); _render.sharedMaterial = old; GameObject.DestroyImmediate(m); return; } GameObject.DestroyImmediate(cam); } else { EditorUtility.DisplayDialog("提示", "请选择一个panel", "确定"); } }
public GenericVector <float> Sample(RenderBuffer <float> source, Vector2 uv) { uv.X = (int)(source.Width * uv.X); uv.Y = (int)(source.Height * uv.Y); return(Sample(source, uv)); }
void OnPreRender() { if (maskCamera != null) { RenderBuffer prevColor = Graphics.activeColorBuffer; RenderBuffer prevDepth = Graphics.activeDepthBuffer; // single pass not supported in RenderWithShader() as of Unity 2018.1; do multi-pass bool singlePassStereo = false; #if UNITY_2017_2_OR_NEWER if (referenceCamera.stereoEnabled) { singlePassStereo = (XRSettings.eyeTextureDesc.vrUsage == VRTextureUsage.TwoEyes); maskCamera.SetStereoViewMatrix(Camera.StereoscopicEye.Left, referenceCamera.GetStereoViewMatrix(Camera.StereoscopicEye.Left)); maskCamera.SetStereoViewMatrix(Camera.StereoscopicEye.Right, referenceCamera.GetStereoViewMatrix(Camera.StereoscopicEye.Right)); maskCamera.SetStereoProjectionMatrix(Camera.StereoscopicEye.Left, referenceCamera.GetStereoProjectionMatrix(Camera.StereoscopicEye.Left)); maskCamera.SetStereoProjectionMatrix(Camera.StereoscopicEye.Right, referenceCamera.GetStereoProjectionMatrix(Camera.StereoscopicEye.Right)); } #endif UpdateRenderTextures(singlePassStereo); UpdateCameraProperties(); Graphics.SetRenderTarget(maskTexture); GL.Clear(true, true, ClearColor); #if UNITY_2017_2_OR_NEWER if (singlePassStereo) { maskCamera.worldToCameraMatrix = referenceCamera.GetStereoViewMatrix(Camera.StereoscopicEye.Left); maskCamera.projectionMatrix = referenceCamera.GetStereoProjectionMatrix(Camera.StereoscopicEye.Left); maskCamera.rect = new Rect(0, 0, 0.5f, 1); } #endif foreach (var layer in RenderLayers) { Shader.SetGlobalColor("_COLORMASK_Color", layer.color); maskCamera.cullingMask = layer.mask; maskCamera.RenderWithShader(colorMaskShader, "RenderType"); } #if UNITY_2017_2_OR_NEWER if (singlePassStereo) { maskCamera.worldToCameraMatrix = referenceCamera.GetStereoViewMatrix(Camera.StereoscopicEye.Right); maskCamera.projectionMatrix = referenceCamera.GetStereoProjectionMatrix(Camera.StereoscopicEye.Right); maskCamera.rect = new Rect(0.5f, 0, 0.5f, 1); foreach (var layer in RenderLayers) { Shader.SetGlobalColor("_COLORMASK_Color", layer.color); maskCamera.cullingMask = layer.mask; maskCamera.RenderWithShader(colorMaskShader, "RenderType"); } } #endif Graphics.SetRenderTarget(prevColor, prevDepth); } }
public unsafe void ResetSimulation() { if (mainState != null) { NBodyC.DestroyGalaxy(mainState); mainState = null; } if (prevState != null) { NBodyC.DestroyGalaxy(prevState); prevState = null; } if (_trailState != null) { NBodyC.DestroyGalaxy(_trailState); _trailState = null; } _trails.Clear(); _trailMesh.Clear(); simulationTime = 0; mainState = NBodyC.CreateGalaxy(blackHoleCount); mainState->time = 0; mainState->frames = 0; _initialBlackHoleCount = blackHoleCount; { Random.InitState(_seed); BlackHole *dst = mainState->blackHoles; int nextId = 1; for (int i = 0; i < blackHoleCount; i++, dst++) { Vector3 position = Random.onUnitSphere * blackHoleSpawnRadius; *dst = new BlackHole() { position = position, velocity = Vector3.Slerp(Vector3.zero - position, Random.onUnitSphere, initialDirVariance).normalized *blackHoleVelocity, mass = Random.Range(1 - blackHoleMassVariance, 1 + blackHoleMassVariance), id = nextId, rotation = Random.rotationUniform }; nextId = nextId << 1; } } Texture2D tex = new Texture2D(2048, 1, TextureFormat.RFloat, mipmap: false, linear: true); for (int i = 0; i < tex.width; i++) { float t = 1 - i / 2047f; t = Mathf.Pow(t, pow); tex.SetPixel(i, 0, new Color(t, 0, 0, 0)); //tex.SetPixel(i, 0, new Color(radiusDistribution.Evaluate(i / 2048.0f), 0, 0, 0)); } tex.Apply(); tex.filterMode = FilterMode.Bilinear; tex.wrapMode = TextureWrapMode.Clamp; simulateMat.SetTexture("_RadiusDistribution", tex); updateShaderConstants(); { BlackHole *src = mainState->blackHoles; for (int i = 0; i < mainState->numBlackHoles; i++, src++) { _vectorArray[i] = src->velocity; } simulateMat.SetVectorArray("_PlanetVelocities", _vectorArray); } { BlackHole *src = mainState->blackHoles; _floatArray.Fill(0); for (int i = 0; i < mainState->numBlackHoles; i++, src++) { _floatArray[i] = Mathf.Lerp(1, src->mass, blackHoleMassAffectsDensity); } simulateMat.SetFloatArray("_PlanetDensities", _floatArray); simulateMat.SetFloat("_TotalDensity", _floatArray.Query().Fold((a, b) => a + b)); } { BlackHole *src = mainState->blackHoles; for (int i = 0; i < mainState->numBlackHoles; i++, src++) { _floatArray[i] = Mathf.Lerp(1, src->mass, blackHoleMassAffectsSize); } simulateMat.SetFloatArray("_PlanetSizes", _floatArray); } GL.LoadPixelMatrix(0, 1, 0, 1); prevPos.DiscardContents(); currPos.DiscardContents(); RenderBuffer[] buffer = new RenderBuffer[2]; buffer[0] = prevPos.colorBuffer; buffer[1] = currPos.colorBuffer; Graphics.SetRenderTarget(buffer, prevPos.depthBuffer); simulateMat.SetPass(1); GL.Begin(GL.QUADS); GL.TexCoord2(0, 0); GL.Vertex3(0, 0, 0); GL.TexCoord2(1, 0); GL.Vertex3(1, 0, 0); GL.TexCoord2(1, 1); GL.Vertex3(1, 1, 0); GL.TexCoord2(0, 1); GL.Vertex3(0, 1, 0); GL.End(); prevState = NBodyC.Clone(mainState); prevState->time = mainState->time - 1.0f / REFERENCE_FRAMERATE; ResetTrails(forceReset: true); if (OnReset != null) { OnReset(); } }
public override bool ImageEffect_RenderImage(RenderTexture source, RenderTexture destination, RenderBuffer depthBuffer) { if (!ImageEffects.ImageEffectManager.AdvanceImangeEffectEnabled) { return(false); } if (!IsOnValidLOD()) { return(false); } RenderTexture temp = RenderTexture.GetTemporary(source.width, source.height); //用Bilinear过度更平滑,更好看 temp.filterMode = FilterMode.Bilinear; Graphics.Blit(source, temp); mat.SetFloat("_DrakNess", DrakNess); int rtWidth = source.width; int rtHeight = source.height; float spread = 1.0f; int iterations = 2; for (int i = 0; i < 6; i++) { if (i > 1) { spread = 1.0f; } else { spread = 0.5f; } if (i == 2) { spread = 0.75f; } for (int j = 0; j < iterations; j++) { mat.SetFloat("_BlurSize", (BlurSize * 0.5f + j) * spread); //垂直 RenderTexture rt2 = RenderTexture.GetTemporary(rtWidth, rtHeight); rt2.filterMode = FilterMode.Bilinear; Graphics.Blit(temp, rt2, mat, 1); Graphics.Blit(rt2, temp, mat, 2); RenderTexture.ReleaseTemporary(rt2); } } Graphics.Blit(temp, destination, mat, 0); RenderTexture.ReleaseTemporary(temp); return(true); }
public static void DrawProteinsShadowMap(Material renderProteinsMaterial, Camera camera, RenderBuffer eyeDepthBuffer, RenderBuffer depthBuffer, int pass) { // Protein params renderProteinsMaterial.SetInt("_EnableLod", Convert.ToInt32(GlobalProperties.Get.EnableLod)); renderProteinsMaterial.SetFloat("_Scale", GlobalProperties.Get.Scale); renderProteinsMaterial.SetFloat("_FirstLevelBeingRange", GlobalProperties.Get.FirstLevelOffset); renderProteinsMaterial.SetVector("_CameraForward", camera.transform.forward); renderProteinsMaterial.SetBuffer("_LodLevelsInfos", GPUBuffers.Get.LodInfo); renderProteinsMaterial.SetBuffer("_ProteinInstanceInfo", GPUBuffers.Get.ProteinInstancesInfo); renderProteinsMaterial.SetBuffer("_ProteinInstancePositions", GPUBuffers.Get.ProteinInstancePositions); renderProteinsMaterial.SetBuffer("_ProteinInstanceRotations", GPUBuffers.Get.ProteinInstanceRotations); renderProteinsMaterial.SetBuffer("_ProteinColors", GPUBuffers.Get.IngredientsColors); renderProteinsMaterial.SetBuffer("_ProteinAtomPositions", GPUBuffers.Get.ProteinAtoms); renderProteinsMaterial.SetBuffer("_ProteinClusterPositions", GPUBuffers.Get.ProteinAtomClusters); renderProteinsMaterial.SetBuffer("_ProteinSphereBatchInfos", GPUBuffers.Get.SphereBatches); Graphics.SetRenderTarget(eyeDepthBuffer, depthBuffer); renderProteinsMaterial.SetPass(2); Graphics.DrawProceduralIndirect(MeshTopology.Points, GPUBuffers.Get.ArgBuffer); }
public static void DrawProteinsAtoms(Material renderProteinsMaterial, Camera camera, RenderBuffer instanceId, RenderBuffer atomId, RenderBuffer depthBuffer, int pass) { // Protein params renderProteinsMaterial.SetInt("_EnableLod", Convert.ToInt32(GlobalProperties.Get.EnableLod)); renderProteinsMaterial.SetFloat("_Scale", GlobalProperties.Get.Scale); renderProteinsMaterial.SetFloat("_FirstLevelBeingRange", GlobalProperties.Get.FirstLevelOffset); renderProteinsMaterial.SetVector("_CameraForward", camera.transform.forward); renderProteinsMaterial.SetBuffer("_LodLevelsInfos", GPUBuffers.Get.LodInfo); renderProteinsMaterial.SetBuffer("_ProteinInstanceInfo", GPUBuffers.Get.ProteinInstancesInfo); renderProteinsMaterial.SetBuffer("_ProteinInstancePositions", GPUBuffers.Get.ProteinInstancePositions); renderProteinsMaterial.SetBuffer("_ProteinInstanceRotations", GPUBuffers.Get.ProteinInstanceRotations); renderProteinsMaterial.SetBuffer("_ProteinColors", GPUBuffers.Get.IngredientsColors); renderProteinsMaterial.SetBuffer("_ProteinAtomInfo", GPUBuffers.Get.ProteinAtomInfo); renderProteinsMaterial.SetBuffer("_ProteinAtomPositions", GPUBuffers.Get.ProteinAtoms); renderProteinsMaterial.SetBuffer("_ProteinClusterPositions", GPUBuffers.Get.ProteinAtomClusters); renderProteinsMaterial.SetBuffer("_ProteinSphereBatchInfos", GPUBuffers.Get.SphereBatches); /****/ renderProteinsMaterial.SetInt("_NumCutObjects", SceneManager.Get.NumCutObjects); renderProteinsMaterial.SetInt("_NumIngredientTypes", SceneManager.Get.NumAllIngredients); renderProteinsMaterial.SetBuffer("_CutInfos", GPUBuffers.Get.CutInfo); renderProteinsMaterial.SetBuffer("_CutScales", GPUBuffers.Get.CutScales); renderProteinsMaterial.SetBuffer("_CutPositions", GPUBuffers.Get.CutPositions); renderProteinsMaterial.SetBuffer("_CutRotations", GPUBuffers.Get.CutRotations); /****/ Graphics.SetRenderTarget(new[] { instanceId, atomId }, depthBuffer); renderProteinsMaterial.SetPass(1); Graphics.DrawProceduralIndirect(MeshTopology.Points, GPUBuffers.Get.ArgBuffer); }
//### main render methodes ############################################################################################################# //this methode reenders vertexIds and baycentric coordinates in uv space for each object //it's called by the Render(...) at the beginning //Input: renderContext: ScriptableRenderContext // camera: Camera in scene //Output: List<ObjData>, list of all objects with renderer in scene List <ObjData> UVRenderer(ScriptableRenderContext context, Camera camera) //initialize list { List <ObjData> sceneObjects = new List <ObjData>(); //used as return value //object id int id = 0; //iterate objects foreach (MeshFilter mesh in GameObject.FindObjectsOfType <MeshFilter>()) { //setup _______________________________________________________________________________________________________________________________________________ //set camera to only render uv render layer int camCullMask = camera.cullingMask; camera.cullingMask = 1 << 9; context.SetupCameraProperties(camera); //setup struct for object ObjData obj = new ObjData(); obj.obj = mesh.gameObject; obj.obj.GetComponent <Renderer>().material.SetInt("_ID", id++); obj.tileMask = CreateRenderTexture(MAX_TEXTURE_SIZE / TILE_SIZE * 2, MAX_TEXTURE_SIZE / TILE_SIZE, RenderTextureFormat.R8); //move object to uv render layer int objLayer = obj.obj.layer; obj.obj.layer = 9; //Initilize RenderTexture for baycentric coordinate/vertex id RenderTexture bayCent = new RenderTexture(MAX_TEXTURE_SIZE, MAX_TEXTURE_SIZE, 0, RenderTextureFormat.ARGB32); bayCent.filterMode = FilterMode.Point; bayCent.anisoLevel = 0; bayCent.Create(); RenderTexture vertexIds = new RenderTexture(MAX_TEXTURE_SIZE, MAX_TEXTURE_SIZE, 24, RenderTextureFormat.ARGBInt); vertexIds.filterMode = FilterMode.Point; vertexIds.anisoLevel = 0; vertexIds.Create(); obj.obj.GetComponent <Renderer>().material.SetTexture("_TextureAtlas", CreateRenderTexture(MAX_TEXTURE_SIZE * 2, MAX_TEXTURE_SIZE, RenderTextureFormat.ARGB32)); //culling _____________________________________________________________________________________________________________________________________________ ScriptableCullingParameters cullingParameters; if (!CullResults.GetCullingParameters(camera, out cullingParameters)) { return(null); } CullResults.Cull(ref cullingParameters, context, ref cull); //set render target ___________________________________________________________________________________________________________________________________ RenderBuffer[] cBuffer = new RenderBuffer[2] { bayCent.colorBuffer, vertexIds.colorBuffer }; camera.SetTargetBuffers(cBuffer, bayCent.depthBuffer); //clearing ____________________________________________________________________________________________________________________________________________ //clearing render target cameraBuffer.ClearRenderTarget(true, true, new Color(0, 0, 0, 0)); context.ExecuteCommandBuffer(cameraBuffer); cameraBuffer.Clear(); //drawing _____________________________________________________________________________________________________________________________________________ //setting DrawRendererSettings drawSettings = new DrawRendererSettings(camera, new ShaderPassName("SRPDefaultUnlit")); drawSettings.sorting.flags = SortFlags.CommonOpaque; FilterRenderersSettings filterSettings = new FilterRenderersSettings(true); drawSettings.SetOverrideMaterial(new Material(Shader.Find("Custom/UVRenderer")), 0); //draw unlit opaque materials context.DrawRenderers(cull.visibleRenderers, ref drawSettings, filterSettings); context.Submit(); //finish _____________________________________________________________________________________________________________________________________________ //move object and camera back to original layer obj.obj.layer = objLayer; camera.cullingMask = camCullMask; //set object properties obj.obj.GetComponent <Renderer>().material.SetTexture("_BaycentCoords", bayCent); obj.bayCent = bayCent; obj.obj.GetComponent <Renderer>().material.SetTexture("_VertexIDs", vertexIds); //add object to result list sceneObjects.Add(obj); //reset render taget Graphics.SetRenderTarget(null); } //return list return(sceneObjects); }
private void GetColorBuffer(out RenderBuffer res){}
internal static Texture2D RenderTexture2D(this RenderBuffer Buffer, Texture2D Texture) { TextureFormatConverter.DirectBlit(Buffer, ref Texture); return(Texture); }
/// <summary> /// Draws the contents of a <see cref="RenderBuffer" /> to a <see cref="Bitmap"/>. /// </summary> /// <param name="rBuffer"> /// The pixel buffer of a view. You can get this by calling <see cref="WebView.Render" />, /// when a view is dirty. /// </param> /// <param name="b"> /// A reference to the <see cref="Bitmap"/> that is filled with the contents of the specified /// pixel buffer. This can be a null reference in which case a new bitmap will be created. /// Keep a reference to this bitmap for subsequent calls to this method. This prevents useless /// overhead when the size of the pixel buffer has not changed and a creation of a new bitmap /// is not required. /// </param> /// <remarks> /// You do not need to check for changes to the size of the pixel buffer before calling this method. /// </remarks> /// <seealso cref="WebView.IsDirty"/> /// <seealso cref="WebView.Render"/> public static void DrawBuffer( RenderBuffer rBuffer, ref Bitmap b ) { if ( b == null ) b = new Bitmap( rBuffer.Width, rBuffer.Height, PixelFormat.Format32bppArgb ); else if ( ( b.Width != rBuffer.Width ) || ( b.Height != rBuffer.Height ) ) { b.Dispose(); b = new Bitmap( rBuffer.Width, rBuffer.Height, PixelFormat.Format32bppArgb ); } BitmapData bits = b.LockBits( new Rectangle( 0, 0, rBuffer.Width, rBuffer.Height ), ImageLockMode.ReadWrite, b.PixelFormat ); unsafe { UInt64* ptrBase = (UInt64*)( (byte*)bits.Scan0 ); UInt64* datBase = (UInt64*)rBuffer.Buffer; UInt32 lOffset = 0; UInt32 lEnd = (UInt32)b.Height * (UInt32)( b.Width / 8 ); // copy 64 bits at a time, 4 times (since we divided by 8) for ( lOffset = 0; lOffset < lEnd; lOffset++ ) { *ptrBase++ = *datBase++; *ptrBase++ = *datBase++; *ptrBase++ = *datBase++; *ptrBase++ = *datBase++; } } b.UnlockBits( bits ); }
/// <summary> /// Render highlighting to the highlightingBuffer using frameBuffer.depthBuffer. /// </summary> /// <param name="frameBuffer">Frame buffer RenderTexture, depthBuffer of which will be used to occlude highlighting.</param> public void RenderHighlighting(RenderTexture frameBuffer) { // Release highlightingBuffer if it wasn't released already if (highlightingBuffer != null) { RenderTexture.ReleaseTemporary(highlightingBuffer); highlightingBuffer = null; } if (!isSupported || !enabled || !go.activeInHierarchy) { return; } int aa = QualitySettings.antiAliasing; if (aa == 0) { aa = 1; } bool depthAvailable = true; // Check if frameBuffer.depthBuffer is not available, contains garbage (when MSAA is enabled) or doesn't have stencil bits (when depth is 16 or 0) if (frameBuffer == null || frameBuffer.depth < 24) { depthAvailable = false; } // Reset aa value to 1 in case mainCam is in DeferredLighting Rendering Path if (refCam.actualRenderingPath == RenderingPath.DeferredLighting) { aa = 1; } // In case MSAA is enabled in forward/vertex lit rendeirng paths - depth buffer contains garbage else if (aa > 1) { depthAvailable = false; } // Check if framebuffer depth data availability has changed if (isDepthAvailable != depthAvailable) { isDepthAvailable = depthAvailable; // Update ZWrite value for all highlighting shaders correspondingly (isDepthAvailable ? ZWrite Off : ZWrite On) Highlighter.SetZWrite(isDepthAvailable ? 0f : 1f); if (isDepthAvailable) { Debug.LogWarning("HighlightingSystem : Framebuffer depth data is available back again and will be used to occlude highlighting. Highlighting occluders disabled."); } else { Debug.LogWarning("HighlightingSystem : Framebuffer depth data is not available and can't be used to occlude highlighting. Highlighting occluders enabled."); } } // Set global depth offset properties for highlighting shaders to the values which has this HighlightingBase component Highlighter.SetOffsetFactor(offsetFactor); Highlighter.SetOffsetUnits(offsetUnits); // Set this component as currently active HighlightingBase before enabling Highlighters current = this; // Turn on highlighting shaders on all highlighter components int count = 0; for (int i = 0; i < highlighters.Count; i++) { if (highlighters[i].Highlight()) { count++; } } // Do nothing in case no Highlighters is currently visible if (count == 0) { current = null; return; } // If frameBuffer.depthBuffer is not available int w = Screen.width; int h = Screen.height; int depth = 24; // because stencil will be rendered to the highlightingBuffer.depthBuffer // If frameBuffer.depthBuffer is available if (isDepthAvailable) { w = frameBuffer.width; h = frameBuffer.height; depth = 0; // because stencil will be rendered to frameBuffer.depthBuffer } // Setup highlightingBuffer RenderTexture highlightingBuffer = RenderTexture.GetTemporary(w, h, depth, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Default, aa); if (!highlightingBuffer.IsCreated()) { highlightingBuffer.filterMode = FilterMode.Point; highlightingBuffer.useMipMap = false; highlightingBuffer.wrapMode = TextureWrapMode.Clamp; } // Clear highlightingBuffer colorBuffer and clear depthBuffer only in case frameBuffer depth data is not available RenderTexture.active = highlightingBuffer; GL.Clear((isDepthAvailable ? false : true), true, Color.clear); // Use depth data from frameBuffer in case it is available. Use highlightingBuffer.depthBuffer otherwise RenderBuffer depthBuffer = isDepthAvailable ? frameBuffer.depthBuffer : highlightingBuffer.depthBuffer; if (!shaderCameraGO) { shaderCameraGO = new GameObject("HighlightingCamera"); shaderCameraGO.hideFlags = HideFlags.HideAndDontSave; shaderCamera = shaderCameraGO.AddComponent <Camera>(); shaderCamera.enabled = false; } shaderCamera.CopyFrom(refCam); //shaderCamera.projectionMatrix = mainCam.projectionMatrix; // Uncomment this line if you have problems using Highlighting System with custom projection matrix on your camera shaderCamera.cullingMask = layerMask; shaderCamera.rect = new Rect(0f, 0f, 1f, 1f); shaderCamera.renderingPath = RenderingPath.Forward; shaderCamera.depthTextureMode = DepthTextureMode.None; shaderCamera.allowHDR = false; shaderCamera.useOcclusionCulling = false; shaderCamera.backgroundColor = new Color(0, 0, 0, 0); shaderCamera.clearFlags = CameraClearFlags.Nothing; shaderCamera.SetTargetBuffers(highlightingBuffer.colorBuffer, depthBuffer); // Get rid of "Tiled GPU Perf warning" if we're not in debug mode #if !DEBUG_ENABLED frameBuffer.MarkRestoreExpected(); #endif shaderCamera.Render(); // Extinguish all highlighters for (int i = 0; i < highlighters.Count; i++) { highlighters[i].Extinguish(); } // Highlighting buffer rendering finished. Reset currently active HighlightingBase current = null; // Create two buffers for blurring the image int width = highlightingBuffer.width / _downsampleFactor; int height = highlightingBuffer.height / _downsampleFactor; RenderTexture buffer = RenderTexture.GetTemporary(width, height, 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Default, 1); RenderTexture buffer2 = RenderTexture.GetTemporary(width, height, 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Default, 1); if (!buffer.IsCreated()) { buffer.useMipMap = false; buffer.wrapMode = TextureWrapMode.Clamp; } if (!buffer2.IsCreated()) { buffer2.useMipMap = false; buffer2.wrapMode = TextureWrapMode.Clamp; } // Copy highlighting buffer to the smaller texture Graphics.Blit(highlightingBuffer, buffer, blitMaterial); // Blur the small texture bool oddEven = true; for (int i = 0; i < iterations; i++) { if (oddEven) { FourTapCone(buffer, buffer2, i); } else { FourTapCone(buffer2, buffer, i); } oddEven = !oddEven; } // Upscale blurred texture and cut stencil from it Graphics.SetRenderTarget(highlightingBuffer.colorBuffer, depthBuffer); cutMaterial.SetTexture(ShaderPropertyID._MainTex, oddEven ? buffer : buffer2); DoubleBlit(cutMaterial, 0, cutMaterial, 1); // Cleanup RenderTexture.ReleaseTemporary(buffer); RenderTexture.ReleaseTemporary(buffer2); }
public override bool ImageEffect_RenderImage(RenderTexture source, RenderTexture destination, RenderBuffer depthBuffer) { if (CheckResources() == false || !IsOnValidLOD()) { return(false); } ccMaterial.SetFloat("_Saturation", saturation); Graphics.Blit(source, destination, ccMaterial); return(true); }
private static void RenderCamera(Camera tempCam, int cullingMask, RenderBuffer colorBuffer, RenderBuffer depthBuffer) { tempCam.set_clearFlags((CameraClearFlags)4); tempCam.set_cullingMask(cullingMask); tempCam.SetTargetBuffers(colorBuffer, depthBuffer); tempCam.Render(); }
public virtual GenericVector <T> Sample <T>(RenderBuffer <T> source, Vector2Int uv) where T : unmanaged { return(source.GetPixel(_repeatModeX.GetUV(uv.X, source.Width), _repeatModeY.GetUV(uv.Y, source.Height))); }
void OnRenderImage(RenderTexture src, RenderTexture dst) { if (transparentMode == TransparentMode.ODT) { Graphics.Blit(src, dst); } else { m_opaqueTex = RenderTexture.GetTemporary(Screen.width, Screen.height, 24, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear); m_depthTexs[0] = RenderTexture.GetTemporary(Screen.width, Screen.height, 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear); m_depthTexs[1] = RenderTexture.GetTemporary(Screen.width, Screen.height, 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear); RenderTexture[] colorTexs = new RenderTexture[layers]; colorTexs[0] = RenderTexture.GetTemporary(Screen.width, Screen.height, 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear); // First render all opaque objects m_transparentCamera.targetTexture = m_opaqueTex; m_transparentCamera.backgroundColor = m_camera.backgroundColor; m_transparentCamera.clearFlags = m_camera.clearFlags; m_transparentCamera.cullingMask = ~(1 << LayerMask.NameToLayer("Transparent")); m_transparentCamera.Render(); // First iteration to render the scene as normal RenderBuffer[] mrtBuffers = new RenderBuffer[2]; mrtBuffers[0] = colorTexs[0].colorBuffer; mrtBuffers[1] = m_depthTexs[0].colorBuffer; m_transparentCamera.SetTargetBuffers(mrtBuffers, m_opaqueTex.depthBuffer); m_transparentCamera.backgroundColor = new Color(1.0f, 1.0f, 1.0f, 0.0f); m_transparentCamera.clearFlags = CameraClearFlags.Color; m_transparentCamera.cullingMask = 1 << LayerMask.NameToLayer("Transparent"); m_transparentCamera.RenderWithShader(initializationShader, null); // Peel away the depth for (int i = 1; i < layers; i++) { colorTexs[i] = RenderTexture.GetTemporary(Screen.width, Screen.height, 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear); mrtBuffers[0] = colorTexs[i].colorBuffer; mrtBuffers[1] = m_depthTexs[i % 2].colorBuffer; m_transparentCamera.SetTargetBuffers(mrtBuffers, m_opaqueTex.depthBuffer); m_transparentCamera.backgroundColor = new Color(1.0f, 1.0f, 1.0f, 0.0f); m_transparentCamera.cullingMask = 1 << LayerMask.NameToLayer("Transparent"); Shader.SetGlobalTexture("_PrevDepthTex", m_depthTexs[1 - i % 2]); m_transparentCamera.RenderWithShader(depthPeelingShader, null); } // Blend all the layers RenderTexture colorAccumTex = RenderTexture.GetTemporary(Screen.width, Screen.height, 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear); Graphics.Blit(m_opaqueTex, colorAccumTex); for (int i = layers - 1; i >= 0; i--) { RenderTexture tmpAccumTex = RenderTexture.GetTemporary(Screen.width, Screen.height, 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear); m_blendMat.SetTexture("_LayerTex", colorTexs[i]); Graphics.Blit(colorAccumTex, tmpAccumTex, m_blendMat, 1); RenderTexture.ReleaseTemporary(colorAccumTex); colorAccumTex = tmpAccumTex; } Graphics.Blit(colorAccumTex, dst); RenderTexture.ReleaseTemporary(colorAccumTex); RenderTexture.ReleaseTemporary(m_opaqueTex); RenderTexture.ReleaseTemporary(m_depthTexs[0]); RenderTexture.ReleaseTemporary(m_depthTexs[1]); for (int i = 0; i < layers; i++) { RenderTexture.ReleaseTemporary(colorTexs[i]); } } }
public override bool ImageEffect_RenderImage(RenderTexture source, RenderTexture destination, RenderBuffer depthBuffer) { if (!ImageEffects.ImageEffectManager.AdvanceImangeEffectEnabled) { return(false); } if (!CheckResources() || !IsOnValidLOD()) { return(false); } // clamp & prepare values so they make sense if (aperture < 0.0f) { aperture = 0.0f; } if (maxBlurSize < 0.1f) { maxBlurSize = 0.1f; } focalSize = Mathf.Clamp(focalSize, 0.0f, 2.0f); internalBlurWidth = Mathf.Max(maxBlurSize, 0.0f); // focal & coc calculations ImageEffectManager result = null; ImageEffectManager.ImageEffectManagerTable.TryGetValue(m_Camera, out result); if (result != null) { if (result.hero != null) { focalTransform = result.hero.transform; } } focalDistance01 = (focalTransform) ? (m_Camera.WorldToViewportPoint(focalTransform.position)).z / (m_Camera.farClipPlane) : FocalDistance01(focalLength); dofHdrMaterial.SetVector("_CurveParams", new Vector4(1.0f, focalSize, aperture / 10.0f, focalDistance01)); if (ShowFocus) { WriteCoc(source, true); Graphics.Blit(source, destination, dofHdrMaterial, 2); } else { source.filterMode = FilterMode.Bilinear; WriteCoc(source, true); dofHdrMaterial.SetTexture("_FgOverlap", null); dofHdrMaterial.SetVector("_Offsets", Vector4.one * (2 * internalBlurWidth)); Graphics.Blit(source, destination, dofHdrMaterial, 1); } return(true); }
/// <summary> /// indexに対応した駒台を描画します。 /// </summary> /// <param name="index">0なら駒箱、1なら先手用、2なら後手用の駒台となります。</param> private void AddRenderPieceBox(RenderBuffer renderBuffer, int index) { // テクスチャがないときは帰ります。 if (this.pieceTexture == null || this.pieceTexture.TextureName == 0) { return; } // 駒箱の場合はキャンセルするかもしれません if (index == 0 && !IsKomaBoxVisible) { return; } // 盤面が反転している場合は、見た目の先後を入れ替えます。 var viewIndex = ( ViewSide != BWType.Black ? (index == 0 ? 0 : index == 1 ? 2 : 1) : index); var pieceBoxBounds = this.pieceBoxBounds[viewIndex]; // 駒箱テクスチャ renderBuffer.AddRender( this.pieceBoxTexture, BlendType.Diffuse, pieceBoxBounds, Transform, ShogiZOrder.BoardZ, BoardOpacity); // 駒台の上に対局者名を描画します。 { var y = (viewIndex == 2 ? pieceBoxBounds.Bottom - 5 - 15 : pieceBoxBounds.Top + 5); var bounds = new RectangleF( pieceBoxBounds.Left + 5, y, pieceBoxBounds.Width - 10, 15); // 名前の背景に色をつけます。 var color = ( Board.Turn == (BWType)index ? TebanPlayerNameBackgroundColor : UnTebanPlayerNameBackgroundColor); renderBuffer.AddRender( BlendType.Diffuse, bounds, Transform, color, ShogiZOrder.PostBoardZ); // 対局者名を描画 var name = ( index == 1 ? BlackPlayerName : index == 2 ? WhitePlayerName : "駒箱"); if (name.HankakuLength() > 17) { name = name.HankakuSubstring(14) + "..."; } bounds.Inflate(-1, -1); AddRenderText( renderBuffer, name, this.nameFont, bounds, ShogiZOrder.PostBoardZ); } // 合計時間や消費時間の描画を行います。 // 局面編集中など駒箱が表示されているときは残り時間を表示しません。 if (IsTimeVisible && !IsKomaBoxVisible) { var y = (viewIndex == 2 ? pieceBoxBounds.Bottom : pieceBoxBounds.Top - 15); var bounds = new RectangleF( pieceBoxBounds.Left, y, pieceBoxBounds.Width, 15); renderBuffer.AddRender( BlendType.Diffuse, bounds, Transform, TimeBackgroundColor, ShogiZOrder.PostBoardZ); // 消費時間などを描画 // 時間のフォーマットは '消費時間 / 合計時間' となっています。 var totalTime = (index == 1 ? BlackTotalTime : WhiteTotalTime); var time = (index == 1 ? BlackTime : WhiteTime); var str = string.Format( "{0:000}:{1:00} / {2:000}:{3:00}", (int)time.TotalMinutes, time.Seconds, (int)totalTime.TotalMinutes, totalTime.Seconds); bounds.Inflate(-4, -1); AddRenderText( renderBuffer, str, this.timeFont, bounds, ShogiZOrder.PostBoardZ); } }
public override bool ImageEffect_RenderImage(RenderTexture source, RenderTexture destination, RenderBuffer depthBuffer) { if (!ImageEffects.ImageEffectManager.AdvanceImangeEffectEnabled) { return(false); } return(false); //if (enableToneMapping == false) //{ // return false; //} //if (CheckResources() == false) { // return false; //} //#if UNITY_EDITOR //validRenderTextureFormat = true; //if (source.format != RenderTextureFormat.ARGBHalf) { // validRenderTextureFormat = false; //} //#endif //// clamp some values to not go out of a valid range //exposureAdjustment = exposureAdjustment < 0.001f ? 0.001f : exposureAdjustment; //// SimpleReinhard tonemappers (local, non adaptive) //if (type == TonemapperType.UserCurve) { // float rangeScale = UpdateCurve (); // tonemapMaterial.SetFloat("_RangeScale", rangeScale); // tonemapMaterial.SetTexture("_Curve", curveTex); // Graphics.Blit(source, destination, tonemapMaterial, 4); // return true; //} //if (type == TonemapperType.SimpleReinhard) { // tonemapMaterial.SetFloat("_ExposureAdjustment", exposureAdjustment); // Graphics.Blit(source, destination, tonemapMaterial, 6); // return true; //} //if (type == TonemapperType.Hable) { // tonemapMaterial.SetFloat("_ExposureAdjustment", exposureAdjustment); // Graphics.Blit(source, destination, tonemapMaterial, 5); // return true; //} //if (type == TonemapperType.Photographic) { // tonemapMaterial.SetFloat("_ExposureAdjustment", exposureAdjustment); // Graphics.Blit(source, destination, tonemapMaterial, 8); // return true; //} //if (type == TonemapperType.OptimizedHejiDawson) { // tonemapMaterial.SetFloat("_ExposureAdjustment", 0.5f * exposureAdjustment); // Graphics.Blit(source, destination, tonemapMaterial, 7); // return true; //} //// still here? //// => adaptive tone mapping: //// builds an average log luminance, tonemaps according to //// middle grey and white values (user controlled) //// AdaptiveReinhardAutoWhite will calculate white value automagically //bool freshlyBrewedInternalRt = CreateInternalRenderTexture (); // this retrieves rtFormat, so should happen before rt allocations //Graphics.Blit(source, destination, tonemapMaterial, 11); //Graphics.Blit(destination, source); //RenderTexture rtSquared = RenderTexture.GetTemporary((int)adaptiveTextureSize, (int)adaptiveTextureSize, 0, rtFormat); //Graphics.Blit(source, rtSquared); //int downsample = (int)Mathf.Log(rtSquared.width * 1.0f, 2); //int div = 2; //RenderTexture[] rts = new RenderTexture[downsample]; //for (int i = 0; i < downsample; i++) { // rts[i] = RenderTexture.GetTemporary(rtSquared.width / div, rtSquared.width / div, 0, rtFormat); // div *= 2; //} //float ar = (source.width * 1.0f) / (source.height * 1.0f); //// downsample pyramid //var lumRt = rts[downsample-1]; //Graphics.Blit(rtSquared, rts[0], tonemapMaterial, 1); //if (type == TonemapperType.AdaptiveReinhardAutoWhite) { // for(int i = 0; i < downsample-1; i++) { // Graphics.Blit(rts[i], rts[i+1], tonemapMaterial, 9); // lumRt = rts[i+1]; // } //} //else if (type == TonemapperType.AdaptiveReinhard) { // for(int i = 0; i < downsample-1; i++) { // Graphics.Blit(rts[i], rts[i+1]); // lumRt = rts[i+1]; // } //} //// we have the needed values, let's apply adaptive tonemapping //adaptionSpeed = adaptionSpeed < 0.001f ? 0.001f : adaptionSpeed; //tonemapMaterial.SetFloat ("_AdaptionSpeed", adaptionSpeed); //rt.MarkRestoreExpected(); // keeping luminance values between frames, RT restore expected //#if UNITY_EDITOR // if(Application.isPlaying && !freshlyBrewedInternalRt) // Graphics.Blit (lumRt, rt, tonemapMaterial, 2); // else // Graphics.Blit (lumRt, rt, tonemapMaterial, 3); //#else // Graphics.Blit (lumRt, rt, tonemapMaterial, freshlyBrewedInternalRt ? 3 : 2); //#endif //middleGrey = middleGrey < 0.001f ? 0.001f : middleGrey; //tonemapMaterial.SetVector ("_HdrParams", new Vector4 (middleGrey, middleGrey, middleGrey, white*white)); //tonemapMaterial.SetTexture ("_SmallTex", rt); //if (type == TonemapperType.AdaptiveReinhard) { // Graphics.Blit (source, destination, tonemapMaterial, 0); //} //else if (type == TonemapperType.AdaptiveReinhardAutoWhite) { // Graphics.Blit (source, destination, tonemapMaterial, 10); //} //else { // Debug.LogError ("No valid adaptive tonemapper type found!"); // Graphics.Blit (source, destination); // at least we get the TransformToLDR effect //} //// cleanup for adaptive //for(int i = 0; i < downsample; i++) { // RenderTexture.ReleaseTemporary (rts[i]); //} //RenderTexture.ReleaseTemporary (rtSquared); //return true; }
/// <summary> /// Draws the contents of a <see cref="RenderBuffer" /> to a <see cref="Bitmap"/> and draws the bitmap /// to a drawing surface. /// </summary> /// <param name="rBuffer"> /// The pixel buffer of a view. You can get this by calling <see cref="WebView.Render" />, /// when a view is dirty. /// </param> /// <param name="g"> /// A <see cref="Graphics" /> instance representing the drawing surface used to paint. /// </param> /// <param name="color"> /// <see cref="System.Drawing.Color" /> structure that represents the background color of the /// drawing surface. The method clears the entire drawing surface and fills it with the specified background /// color, before drawing the new bitmap. /// </param> /// <param name="b"> /// A reference to the <see cref="Bitmap"/> that is filled with the contents of the specified /// pixel buffer. This can be a null reference in which case a new bitmap will be created. /// Keep a reference to this bitmap for subsequent calls to this method. This prevents useless /// overhead when the size of the pixel buffer has not changed and a creation of a new bitmap /// is not required. /// </param> /// <remarks> /// You do not need to check for changes to the size of the pixel buffer before calling this method. /// <p/> /// <note> /// The specified <see cref="Graphics"/> is not being disposed. You do not need to dispose it /// if you get it from within a managed event handler such as <see cref="Form.OnPaint"/>, but you /// may need to dispose it after calling this method, if you get it from a different device context. /// </note> /// </remarks> /// <seealso cref="WebView.IsDirty"/> /// <seealso cref="WebView.Render"/> public static void DrawBuffer(RenderBuffer rBuffer, Graphics g, Color color, ref Bitmap b) { DrawBuffer(rBuffer, ref b); g.Clear(color); g.DrawImageUnscaled(b, 0, 0); }
void OnRenderImage(RenderTexture source, RenderTexture destination) { if (camera_.orthographic) { Graphics.Blit(source, destination); return; } else if (m_History == null || (m_History.width != source.width || m_History.height != source.height)) { if (m_History) { RenderTexture.ReleaseTemporary(m_History); } m_History = RenderTexture.GetTemporary(source.width, source.height, 0, source.format, RenderTextureReadWrite.Default); m_History.filterMode = FilterMode.Bilinear; m_History.hideFlags = HideFlags.HideAndDontSave; Graphics.Blit(source, m_History); } material.SetVector("_SharpenParameters", new Vector4(settings.sharpenFilterSettings.amount, 0f, 0f, 0f)); material.SetVector("_FinalBlendParameters", new Vector4(settings.blendSettings.stationary, settings.blendSettings.moving, 100f * settings.blendSettings.motionAmplification, 0f)); material.SetTexture("_MainTex", source); material.SetTexture("_HistoryTex", m_History); RenderTexture temporary = RenderTexture.GetTemporary(source.width, source.height, 0, source.format, RenderTextureReadWrite.Default); temporary.filterMode = FilterMode.Bilinear; var effectDestination = destination; var doesNeedExtraBlit = false; if (destination == null) { effectDestination = RenderTexture.GetTemporary(source.width, source.height, 0, source.format, RenderTextureReadWrite.Default); effectDestination.filterMode = FilterMode.Bilinear; doesNeedExtraBlit = true; } var renderTargets = new RenderBuffer[2]; renderTargets[0] = effectDestination.colorBuffer; renderTargets[1] = temporary.colorBuffer; Graphics.SetRenderTarget(renderTargets, effectDestination.depthBuffer); RenderFullScreenQuad(); RenderTexture.ReleaseTemporary(m_History); m_History = temporary; if (doesNeedExtraBlit) { Graphics.Blit(effectDestination, destination); RenderTexture.ReleaseTemporary(effectDestination); } RenderTexture.active = destination; }
public override bool ImageEffect_RenderImage(RenderTexture source, RenderTexture destination, RenderBuffer depthBuffer) { if (glowCam == null) { OnPreCull(); } else if (glowCam.glowManager.CheckSupport()) { glowCam.screenRt = source; glowCam.HostCamera.Render(); Graphics.Blit(source, destination); } return(true); }
public static void SetRenderTarget(RenderBuffer[] colorBuffers, RenderBuffer depthBuffer){}
/// <summary> /// Perform fourier transform on four textures. /// </summary> public int PeformFFT(RenderTexture[] data0, RenderTexture[] data1, RenderTexture[] data2, RenderTexture[] data3) { if (m_butterflyLookupTable == null) { return(-1); } if (SystemInfo.supportedRenderTargetCount < 4) { throw new InvalidOperationException("System does not support at least 4 render targets"); } //RenderTexture[] pass0 = new RenderTexture[] { data0[0], data1[0], data2[0], data3[0] }; //RenderTexture[] pass1 = new RenderTexture[] { data0[1], data1[1], data2[1], data3[1] }; m_pass0RT4[0] = data0[0].colorBuffer; m_pass0RT4[1] = data1[0].colorBuffer; m_pass0RT4[2] = data2[0].colorBuffer; m_pass0RT4[3] = data3[0].colorBuffer; m_pass1RT4[0] = data0[1].colorBuffer; m_pass1RT4[1] = data1[1].colorBuffer; m_pass1RT4[2] = data2[1].colorBuffer; m_pass1RT4[3] = data3[1].colorBuffer; RenderBuffer depth0 = data0[0].depthBuffer; RenderBuffer depth1 = data0[1].depthBuffer; int i; int idx = 0; int idx1; int j = 0; for (i = 0; i < m_passes; i++, j++) { idx = j % 2; idx1 = (j + 1) % 2; m_fourier.SetTexture("Ceto_ButterFlyLookUp", m_butterflyLookupTable[i]); m_fourier.SetTexture("Ceto_ReadBuffer0", data0[idx1]); m_fourier.SetTexture("Ceto_ReadBuffer1", data1[idx1]); m_fourier.SetTexture("Ceto_ReadBuffer2", data2[idx1]); m_fourier.SetTexture("Ceto_ReadBuffer3", data3[idx1]); //if (idx == 0) // RTUtility.MultiTargetBlit(pass0, m_fourier, PASS_X_4); //else // RTUtility.MultiTargetBlit(pass1, m_fourier, PASS_X_4); if (idx == 0) { RTUtility.MultiTargetBlit(m_pass0RT4, depth0, m_fourier, PASS_X_4); } else { RTUtility.MultiTargetBlit(m_pass1RT4, depth1, m_fourier, PASS_X_4); } } for (i = 0; i < m_passes; i++, j++) { idx = j % 2; idx1 = (j + 1) % 2; m_fourier.SetTexture("Ceto_ButterFlyLookUp", m_butterflyLookupTable[i]); m_fourier.SetTexture("Ceto_ReadBuffer0", data0[idx1]); m_fourier.SetTexture("Ceto_ReadBuffer1", data1[idx1]); m_fourier.SetTexture("Ceto_ReadBuffer2", data2[idx1]); m_fourier.SetTexture("Ceto_ReadBuffer3", data3[idx1]); //if (idx == 0) // RTUtility.MultiTargetBlit(pass0, m_fourier, PASS_Y_4); //else // RTUtility.MultiTargetBlit(pass1, m_fourier, PASS_Y_4); if (idx == 0) { RTUtility.MultiTargetBlit(m_pass0RT4, depth0, m_fourier, PASS_Y_4); } else { RTUtility.MultiTargetBlit(m_pass1RT4, depth1, m_fourier, PASS_Y_4); } } return(idx); }
private static void Internal_SetRTBuffers(RenderBuffer[] colorBuffers, out RenderBuffer depthBuffer){}
//[ImageEffectTransformsToLDR] //void OnRenderImage(RenderTexture src, RenderTexture dst) //{ // if (model.isDirty || !IsLogLutValid(model.bakedLut)) // { // GenerateLut(); // model.isDirty = false; // } // if (QualitySettings.activeColorSpace == ColorSpace.Gamma) // { // uberMaterial.EnableKeyword("UNITY_COLORSPACE_GAMMA"); // } // var bakedLut = model.bakedLut; // uberMaterial.SetTexture(Uniforms._LogLut, bakedLut); // uberMaterial.SetVector(Uniforms._LogLut_Params, new Vector3(1f / bakedLut.width, 1f / bakedLut.height, bakedLut.height - 1f)); // float ev = Mathf.Exp(model.settings.basic.postExposure * 0.69314718055994530941723212145818f); // uberMaterial.SetFloat(Uniforms._ExposureEV, ev); // Graphics.Blit(src, dst, uberMaterial, 0); //} public override bool ImageEffect_RenderImage(RenderTexture src, RenderTexture dst, RenderBuffer depthBuffer) { if (!ImageEffects.ImageEffectManager.AdvanceImangeEffectEnabled) { return(false); } if (!CheckResources() || !IsOnValidLOD()) { return(false); } if (model.isDirty || !IsLogLutValid(model.bakedLut)) { GenerateLut(); model.isDirty = false; } if (QualitySettings.activeColorSpace == ColorSpace.Gamma) { uberMaterial.EnableKeyword("UNITY_COLORSPACE_GAMMA"); } var bakedLut = model.bakedLut; uberMaterial.SetTexture(Uniforms._LogLut, bakedLut); uberMaterial.SetVector(Uniforms._LogLut_Params, new Vector3(1f / bakedLut.width, 1f / bakedLut.height, bakedLut.height - 1f)); float ev = Mathf.Exp(model.settings.basic.postExposure * 0.69314718055994530941723212145818f); uberMaterial.SetFloat(Uniforms._ExposureEV, ev); Graphics.Blit(src, dst, uberMaterial, 0); return(true); }
private static void GetActiveDepthBuffer(out RenderBuffer res){}
public override bool ImageEffect_RenderImage(RenderTexture source, RenderTexture destination, RenderBuffer depthBuffer) { if (!IsOnValidLOD()) { return(false); } mat.SetFloat("fSampleDist", fSampleDist); mat.SetFloat("fSampleStrength", fSampleStrength); source.wrapMode = TextureWrapMode.Clamp; Graphics.Blit(source, destination, mat, 0); return(true); }
private void GetDepthBuffer(out RenderBuffer res){}
public override bool ImageEffect_RenderImage(RenderTexture source, RenderTexture destination, RenderBuffer depthBuffer) { if (!ImageEffects.ImageEffectManager.AdvanceImangeEffectEnabled) { return(false); } if (CheckResources() == false || !IsOnValidLOD()) { return(false); } int rtW = source.width; int rtH = source.height; bool doPrepass = (Mathf.Abs(blur) > 0.0f || Mathf.Abs(intensity) > 0.0f); float widthOverHeight = (1.0f * rtW) / (1.0f * rtH); float oneOverBaseSize = 1.0f / 512.0f; RenderTexture color = null; RenderTexture color2a = null; RenderTexture color2b = null; if (doPrepass) { color = RenderTexture.GetTemporary(rtW, rtH, 0, source.format); // Blur corners if (Mathf.Abs(blur) > 0.0f) { int downScale = 4; color2a = RenderTexture.GetTemporary(rtW / downScale, rtH / downScale, 0, source.format); Graphics.Blit(source, color2a, chromAberrationMaterial, 0); for (int i = 0; i < 2; i++) { // maybe make iteration count tweakable separableBlurMaterial.SetVector("offsets", new Vector4(0.0f, blurSpread * oneOverBaseSize, 0.0f, 0.0f)); color2b = RenderTexture.GetTemporary(rtW / downScale, rtH / downScale, 0, source.format); Graphics.Blit(color2a, color2b, separableBlurMaterial); RenderTexture.ReleaseTemporary(color2a); separableBlurMaterial.SetVector("offsets", new Vector4(blurSpread * oneOverBaseSize / widthOverHeight, 0.0f, 0.0f, 0.0f)); color2a = RenderTexture.GetTemporary(rtW / downScale, rtH / downScale, 0, source.format); Graphics.Blit(color2b, color2a, separableBlurMaterial); RenderTexture.ReleaseTemporary(color2b); } } vignetteMaterial.SetFloat("_Intensity", intensity); // intensity for vignette vignetteMaterial.SetFloat("_Blur", blur); // blur intensity vignetteMaterial.SetTexture("_VignetteTex", color2a); // blurred texture Graphics.Blit(source, color, vignetteMaterial, 0); // prepass blit: darken & blur corners } chromAberrationMaterial.SetFloat("_ChromaticAberration", chromaticAberration); chromAberrationMaterial.SetFloat("_AxialAberration", axialAberration); chromAberrationMaterial.SetVector("_BlurDistance", new Vector2(-blurDistance, blurDistance)); chromAberrationMaterial.SetFloat("_Luminance", 1.0f / Mathf.Max(Mathf.Epsilon, luminanceDependency)); if (doPrepass) { color.wrapMode = TextureWrapMode.Clamp; } else { source.wrapMode = TextureWrapMode.Clamp; } Graphics.Blit(doPrepass ? color : source, destination, chromAberrationMaterial, mode == AberrationMode.Advanced ? 2 : 1); RenderTexture.ReleaseTemporary(color); RenderTexture.ReleaseTemporary(color2a); return(true); }
/// <summary> /// Draws the contents of a <see cref="RenderBuffer" /> to a <see cref="Bitmap"/> and draws the bitmap /// to a drawing surface. /// </summary> /// <param name="rBuffer"> /// The pixel buffer of a view. You can get this by calling <see cref="WebView.Render" />, /// when a view is dirty. /// </param> /// <param name="g"> /// A <see cref="Graphics" /> instance representing the drawing surface used to paint. /// </param> /// <param name="color"> /// <see cref="System.Drawing.Color" /> structure that represents the background color of the /// drawing surface. The method clears the entire drawing surface and fills it with the specified background /// color, before drawing the new bitmap. /// </param> /// <param name="b"> /// A reference to the <see cref="Bitmap"/> that is filled with the contents of the specified /// pixel buffer. This can be a null reference in which case a new bitmap will be created. /// Keep a reference to this bitmap for subsequent calls to this method. This prevents useless /// overhead when the size of the pixel buffer has not changed and a creation of a new bitmap /// is not required. /// </param> /// <remarks> /// You do not need to check for changes to the size of the pixel buffer before calling this method. /// <p/> /// <note> /// The specified <see cref="Graphics"/> is not being disposed. You do not need to dispose it /// if you get it from within a managed event handler such as <see cref="Form.OnPaint"/>, but you /// may need to dispose it after calling this method, if you get it from a different device context. /// </note> /// </remarks> /// <seealso cref="WebView.IsDirty"/> /// <seealso cref="WebView.Render"/> public static void DrawBuffer( RenderBuffer rBuffer, Graphics g, Color color, ref Bitmap b ) { DrawBuffer( rBuffer, ref b ); g.Clear( color ); g.DrawImageUnscaled( b, 0, 0 ); }
public void OnRenderObject() { currentCamera = Camera.current; if (!m_MinRequirements || !CheckCamera() || !IsVisible()) { return; } // Prepare RenderBuffer depthBuffer = Graphics.activeDepthBuffer; RenderBuffer colorBuffer = Graphics.activeColorBuffer; InitResources(); Vector4 lightPos = GetLightViewportPos(); bool lightOnScreen = lightPos.x >= -1 && lightPos.x <= 1 && lightPos.y >= -1 && lightPos.y <= 1; SetKeyword(lightOnScreen, "LIGHT_ON_SCREEN", "LIGHT_OFF_SCREEN"); int width = Screen.width; int height = Screen.height; // Render the buffers, raymarch, interpolate along rays UpdateShadowmap(); SetKeyword(directional, "DIRECTIONAL_SHAFTS", "SPOT_SHAFTS"); RenderCoords(width, height, lightPos); RenderInterpolationTexture(lightPos); Raymarch(width, height, lightPos); InterpolateAlongRays(lightPos); ShowSamples(width, height, lightPos); // Final interpolation and blending onto the screen FlipWorkaround(); SetFrustumRays(m_FinalInterpolationMaterial); m_FinalInterpolationMaterial.SetTexture("_InterpolationEpi", m_InterpolationEpi); m_FinalInterpolationMaterial.SetTexture("_DepthEpi", m_DepthEpi); m_FinalInterpolationMaterial.SetTexture("_Shadowmap", m_Shadowmap); m_FinalInterpolationMaterial.SetTexture("_Coord", m_CoordEpi); m_FinalInterpolationMaterial.SetTexture("_SamplePositions", m_SamplePositions); m_FinalInterpolationMaterial.SetTexture("_RaymarchedLight", m_InterpolateAlongRaysEpi); m_FinalInterpolationMaterial.SetVector("_CoordTexDim", new Vector4(m_CoordEpi.width, m_CoordEpi.height, 1.0f / m_CoordEpi.width, 1.0f / m_CoordEpi.height)); m_FinalInterpolationMaterial.SetVector("_ScreenTexDim", new Vector4(width, height, 1.0f / width, 1.0f / height)); m_FinalInterpolationMaterial.SetVector("_LightPos", lightPos); m_FinalInterpolationMaterial.SetFloat("_DepthThreshold", GetDepthThresholdAdjusted()); bool renderAsQuad = directional || IntersectsNearPlane(); m_FinalInterpolationMaterial.SetFloat("_ZTest", (float) (renderAsQuad ? CompareFunction.Always : CompareFunction.Less)); SetKeyword(renderAsQuad, "QUAD_SHAFTS", "FRUSTUM_SHAFTS"); Graphics.SetRenderTarget(colorBuffer, depthBuffer); m_FinalInterpolationMaterial.SetPass(0); if (renderAsQuad) { RenderQuad(); } else { RenderSpotFrustum(); } ReleaseResources(); }
protected void Blit(Texture mainTex, RenderBuffer[] renderTargets, RenderBuffer depthTarget, Material material, int pass = -1) { Blit(mainTex, new RenderTargetSetup(renderTargets, depthTarget), material, pass); }
protected void Blit(RenderBuffer[] renderTargets, RenderBuffer depthTarget, Material material, int pass = -1) { Blit(null, renderTargets, depthTarget, material, pass); }
/// <summary> /// Renders the given scene to a bitmap, using one thread per line of pixels in the image. /// </summary> /// <param name="scene">The scene to render</param> /// <returns>A bitmap of the rendered scene.</returns> public void RenderSceneToBitmapThreaded(Scene scene, RenderBuffer renderBuffer, int width = -1, int height = -1) { if (width == -1 || height == -1) { width = renderSize.Width; height = renderSize.Height; } else { renderSize = new Size(width, height); } var before = DateTime.UtcNow; List<Task> tasks = new List<Task>(); for (int yCounter = height - 1; yCounter >= 0; yCounter--) { var y = yCounter; var task = Task.Run(() => { for (int xCounter = 0; xCounter < width; xCounter++) { var x = xCounter; var viewPortX = ((2 * x) / (float)width) - 1; var viewPortY = ((2 * y) / (float)height) - 1; var color = TraceRayAgainstScene(GetRay(viewPortX, viewPortY), scene); renderBuffer.SetColor(x, height - y - 1, ref color); } }); tasks.Add(task); } Task.WhenAll(tasks).Wait(); var after = DateTime.UtcNow; System.Diagnostics.Debug.WriteLine("Total render time: " + (after - before).TotalMilliseconds + " ms"); }
/// <summary> /// Renders this <see cref="WebView"/> into an offscreen pixel buffer and clears the dirty state. /// </summary> /// <remarks> /// For maximum efficiency, you should only call this when the <see cref="WebView"/> is dirty /// (see <see cref="IsDirty"/>). /// <p/> /// <note type="tip"> /// The most appropriate time to call this method, is from within your <see cref="IsDirtyChanged"/> handler. /// </note> /// </remarks> /// <returns> /// An instance of the <see cref="RenderBuffer"/> that this <see cref="WebView"/> was rendered to. /// This value may change between renders and may return null if the <see cref="WebView"/> has crashed /// (see <see cref="IsCrashed"/>). /// </returns> /// <exception cref="InvalidOperationException"> /// The member is called on an invalid <see cref="WebView"/> instance /// (see <see cref="IsEnabled"/>). /// </exception> public RenderBuffer Render() { VerifyValid(); RenderBuffer buffer = new RenderBuffer( awe_webview_render( Instance ) ); if ( flushAlpha && ( buffer != null ) ) buffer.FlushAlpha(); return buffer; }