AddCommandBuffer() 개인적인 메소드

private AddCommandBuffer ( CameraEvent evt, CommandBuffer buffer ) : void
evt CameraEvent
buffer UnityEngine.Rendering.CommandBuffer
리턴 void
예제 #1
0
        public void Awake()
        {
            _antennaBuffer = new List <Antenna>(100);
            _cmd           = new CommandBuffer();
            _cmd.name      = "Render Antennae";
            _camera        = GetComponent <UnityEngine.Camera>();
            _camera.AddCommandBuffer(CameraEvent.BeforeLighting, _cmd);
            _sphere            = Resources.Load <Mesh>("Meshes/Sphere");
            _antennaMaterial   = new Material(Shader.Find("Hidden/Antenna"));
            _receptionMaterial = new Material(Shader.Find("Hidden/Reception"));
            _receptionMaterial.SetTexture("_Heatmap", CreateHeatmapTex());

            this.Subscribe <AddAntennaEvent>(OnAddAntenna);
            this.Subscribe <RemoveAntennaEvent>(OnRemoveAntenna);
        }
 static public int AddCommandBuffer(IntPtr l)
 {
     try {
         UnityEngine.Camera self = (UnityEngine.Camera)checkSelf(l);
         UnityEngine.Rendering.CameraEvent a1;
         checkEnum(l, 2, out a1);
         UnityEngine.Rendering.CommandBuffer a2;
         checkType(l, 3, out a2);
         self.AddCommandBuffer(a1, a2);
         pushValue(l, true);
         return(1);
     }
     catch (Exception e) {
         return(error(l, e));
     }
 }
예제 #3
0
 static public int AddCommandBuffer(IntPtr l)
 {
     try {
         UnityEngine.Camera self = (UnityEngine.Camera)checkSelf(l);
         UnityEngine.Rendering.CameraEvent a1;
         checkEnum(l, 2, out a1);
         UnityEngine.Rendering.CommandBuffer a2;
         checkType(l, 3, out a2);
         self.AddCommandBuffer(a1, a2);
         return(0);
     }
     catch (Exception e) {
         LuaDLL.luaL_error(l, e.ToString());
         return(0);
     }
 }
예제 #4
0
    void OnEnable()
    {
        System.IO.Directory.CreateDirectory(m_output_directory);
        m_cam = GetComponent<Camera>();
        m_quad = FrameCapturerUtils.CreateFullscreenQuad();
        m_mat_copy = new Material(m_sh_copy);
        if (m_cam.targetTexture != null)
        {
            m_mat_copy.EnableKeyword("OFFSCREEN");
        }

        if (m_capture_framebuffer)
        {
            int tid = Shader.PropertyToID("_TmpFrameBuffer");
            m_cb = new CommandBuffer();
            m_cb.name = "ExrCapturer: copy frame buffer";
            m_cb.GetTemporaryRT(tid, -1, -1, 0, FilterMode.Point);
            m_cb.Blit(BuiltinRenderTextureType.CurrentActive, tid);
            // tid は意図的に開放しない
            m_cam.AddCommandBuffer(CameraEvent.AfterEverything, m_cb);

            m_frame_buffer = new RenderTexture(m_cam.pixelWidth, m_cam.pixelHeight, 0, RenderTextureFormat.ARGBHalf);
            m_frame_buffer.wrapMode = TextureWrapMode.Repeat;
            m_frame_buffer.Create();
        }

        if (m_capture_gbuffer &&
            m_cam.renderingPath != RenderingPath.DeferredShading &&
            (m_cam.renderingPath == RenderingPath.UsePlayerSettings && PlayerSettings.renderingPath != RenderingPath.DeferredShading))
        {
            Debug.Log("ExrCapturer: Rendering path must be deferred to use capture_gbuffer mode.");
            m_capture_gbuffer = false;
        }
        if(m_capture_gbuffer)
        {
            m_gbuffer = new RenderTexture[4];
            m_rt_gbuffer = new RenderBuffer[4];
            for (int i = 0; i < m_gbuffer.Length; ++i)
            {
                m_gbuffer[i] = new RenderTexture(m_cam.pixelWidth, m_cam.pixelHeight, 0, RenderTextureFormat.ARGBHalf);
                m_gbuffer[i].filterMode = FilterMode.Point;
                m_gbuffer[i].Create();
                m_rt_gbuffer[i] = m_gbuffer[i].colorBuffer;
            }
            {
                RenderTextureFormat format = m_depth_format == DepthFormat.Half ? RenderTextureFormat.RHalf : RenderTextureFormat.RFloat;
                m_depth = new RenderTexture(m_cam.pixelWidth, m_cam.pixelHeight, 0, format);
                m_depth.filterMode = FilterMode.Point;
                m_depth.Create();
            }
        }

        FrameCapturer.fcExrConfig conf;
        conf.max_active_tasks = m_max_active_tasks;
        m_exr = FrameCapturer.fcExrCreateContext(ref conf);
    }
    private void AddCommandBuffersToCamera(Camera setCamera, CommandBuffer normalBuffer) {
        //Need depth texture for depth aware upsample
        setCamera.depthTextureMode |= DepthTextureMode.Depth;

        if (m_copyTransmission != null && !HasCommandBuffer(setCamera, CameraEvent.AfterGBuffer, c_copyTransmissionBufferName)) {
            setCamera.AddCommandBuffer(CameraEvent.AfterGBuffer, m_copyTransmission);
        }

        if (normalBuffer != null && !HasCommandBuffer(setCamera, CameraEvent.BeforeLighting, c_normalBufferName)) {
            setCamera.AddCommandBuffer(CameraEvent.BeforeLighting, normalBuffer);
        }

        if (m_releaseDeferredPlus != null && !HasCommandBuffer(setCamera, CameraEvent.AfterLighting, c_releaseDeferredBuffer)) {
            setCamera.AddCommandBuffer(CameraEvent.AfterLighting, m_releaseDeferredPlus);
        }

        RefreshProperties();
    }
예제 #6
0
    void OnPreCullEvent(Camera camera)
    {
        #if UNITY_EDITOR
        if (Array.IndexOf<Camera>(SceneView.GetAllSceneCameras(), camera) >= 0) {
            // シーンビューのカメラはチェック
            if (this.drawInSceneView == false) {
                return;
            }
        } else if (Camera.current.isActiveAndEnabled == false) {
            // シーンビュー以外のエディタカメラは除外
            return;
        }
        #endif
        RenderPath path;
        if (renderPaths.ContainsKey(camera)) {
            // レンダーパスが有れば使う
            path = renderPaths[camera];
        } else {
            // 無ければ作成
            path = new RenderPath();
            path.renderId = renderPaths.Count;
            path.cameraEvent = cameraEvent;
            // プラグイン描画するコマンドバッファを作成
            path.commandBuffer = new CommandBuffer();
            path.commandBuffer.IssuePluginEvent(Plugin.EffekseerGetRenderFunc(), path.renderId);
            // コマンドバッファをカメラに登録
            camera.AddCommandBuffer(path.cameraEvent, path.commandBuffer);
            renderPaths.Add(camera, path);
        }

        // ビュー関連の行列を更新
        Plugin.EffekseerSetProjectionMatrix(path.renderId, Utility.Matrix2Array(
            GL.GetGPUProjectionMatrix(camera.projectionMatrix, false)));
        Plugin.EffekseerSetCameraMatrix(path.renderId, Utility.Matrix2Array(
            camera.worldToCameraMatrix));
    }
	public void RefreshComBufs(Camera cam, bool isSceneCam) {
		if (cam && combufPreLight!=null && combufPostLight!=null) {
            CommandBuffer[] combufsPreLight = cam.GetCommandBuffers(CameraEvent.BeforeReflections);
            bool found = false;
            foreach (CommandBuffer cbuf in combufsPreLight)
            {
                // instance comparison below DOESN'T work !!! Well, weird isn't it ???
                //if (cbuf == combufPreLight)
                if (cbuf.name == combufPreLight.name)
                {
                    // got it already in command buffers
                    found = true;
                    break;
                }
            }
            if (!found)
            {
                cam.AddCommandBuffer(CameraEvent.BeforeReflections, combufPreLight);
                cam.AddCommandBuffer(CameraEvent.AfterLighting, combufPostLight);
                if (isSceneCam)
                {
                    sceneCamsWithBuffer.Add(cam);
                }
            }
		}
	}
예제 #8
0
		// 
		protected virtual void OnEnable()
		{
			if (!CheckInstance()) { return; }

			Initialize();

			isSupported = CheckSupported();
			if (!isSupported)
			{
				enabled = false;
				Debug.LogWarning("HighlightingSystem : Highlighting System has been disabled due to unsupported Unity features on the current platform!");
				return;
			}

			blurMaterial = new Material(materials[BLUR]);
			
			// Set initial intensity in blur material
			blurMaterial.SetFloat(ShaderPropertyID._Intensity, _blurIntensity);

			renderBuffer = new CommandBuffer();
			renderBuffer.name = renderBufferName;

			cam = GetComponent<Camera>();
			UpdateHighlightingBuffer();

			// Force-rebuild renderBuffer
			isDirty = true;

			cam.AddCommandBuffer(queue, renderBuffer);
		}
예제 #9
0
    /// @cond
    /// <summary>
    /// Initialize the AR Screen.
    /// </summary>
    public void Start()
    {
        m_camera = GetComponent<Camera>();

        TangoApplication tangoApplication = FindObjectOfType<TangoApplication>();
        tangoApplication.OnDisplayChanged += _OnDisplayChanged;
        m_arCameraPostProcess = gameObject.GetComponent<ARCameraPostProcess>();
        if (tangoApplication != null)
        {
            tangoApplication.Register(this);

            // If already connected to a service, then do initialization now.
            if (tangoApplication.IsServiceConnected)
            {
                OnTangoServiceConnected();
            }

            CommandBuffer buf = VideoOverlayProvider.CreateARScreenCommandBuffer();
            m_camera.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, buf);
            m_camera.AddCommandBuffer(CameraEvent.BeforeGBuffer, buf);
        }

        if (m_enableOcclusion) 
        {
            TangoPointCloud pointCloud = FindObjectOfType<TangoPointCloud>();
            if (pointCloud != null)
            {
                Renderer renderer = pointCloud.GetComponent<Renderer>();
                renderer.enabled = true;
                renderer.material.shader = m_occlusionShader;
                pointCloud.m_updatePointsMesh = true;
            }
            else
            {
                Debug.Log("Point Cloud data is not available, occlusion is not possible.");
            }
        }
    }
예제 #10
0
        //
        protected virtual void OnEnable()
        {
            Initialize();

            if (!CheckSupported(true))
            {
                enabled = false;
                Debug.LogError("HighlightingSystem : Highlighting System has been disabled due to unsupported Unity features on the current platform!");
                return;
            }

            blurMaterial = new Material(materials[BLUR]);
            cutMaterial = new Material(materials[CUT]);
            compMaterial = new Material(materials[COMP]);

            // Set initial intensity in blur material
            blurMaterial.SetFloat(ShaderPropertyID._Intensity, _blurIntensity);

            renderBuffer = new CommandBuffer();
            renderBuffer.name = renderBufferName;

            cam = GetComponent<Camera>();

            cameras.Add(cam);

            cam.AddCommandBuffer(queue, renderBuffer);

            if (_blitter != null)
            {
                _blitter.Register(this);
            }
        }
예제 #11
0
        /// <summary>
        /// Return a new command buffer.
        /// This will be called the first time
        /// the mesh is rendered for each camera 
        /// that renders the ocean.
        /// </summary>
        public override CommandBuffer Create(Camera cam)
        {
            CommandBuffer cmd = new CommandBuffer();
            cmd.name = "Ceto DepthGrab Cmd: " + cam.name;

            //int width = cam.pixelWidth;
            //int height = cam.pixelHeight;

            //int scale = ResolutionToNumber(Resolution);
            //width /= scale;
            //height /= scale;

            RenderTextureFormat format;

            //screen grab currently disabled.
            /*
            if (cam.hdr)
                format = RenderTextureFormat.ARGBHalf;
            else
                format = RenderTextureFormat.ARGB32;

            //Copy screen into temporary RT.
            int grabID = Shader.PropertyToID("Ceto_GrabCopyTexture");
            cmd.GetTemporaryRT(grabID, width, height, 0, FilterMode.Bilinear, format, RenderTextureReadWrite.Default);
            cmd.Blit(BuiltinRenderTextureType.CurrentActive, grabID);
            cmd.SetGlobalTexture(GrabName, grabID);

            */
            if (SystemInfo.SupportsRenderTextureFormat(RenderTextureFormat.RFloat))
                format = RenderTextureFormat.RFloat;
            else
                format = RenderTextureFormat.RHalf;

            //Copy depths into temporary RT.
            int depthID = Shader.PropertyToID("Ceto_DepthCopyTexture");
            cmd.GetTemporaryRT(depthID, cam.pixelWidth, cam.pixelHeight, 0, FilterMode.Point, format, RenderTextureReadWrite.Linear);
            cmd.Blit(BuiltinRenderTextureType.CurrentActive, depthID, m_copyDepthMat, 0);
            cmd.SetGlobalTexture(DepthName, depthID);

            cam.AddCommandBuffer(Event, cmd);

            CommandData data = new CommandData();

            data.command = cmd;
            data.width = cam.pixelWidth;
            data.height = cam.pixelHeight;

            if (m_data.ContainsKey(cam))
                m_data.Remove(cam);

            m_data.Add(cam, data);

            return cmd;
        }
예제 #12
0
    void OnEnable()
    {
        m_cam = GetComponent<Camera>();
        m_quad = FrameCapturerUtils.CreateFullscreenQuad();
        m_mat_copy = new Material(m_sh_copy);
        if (m_cam.targetTexture != null)
        {
            m_mat_copy.EnableKeyword("OFFSCREEN");
        }

        {
            int tid = Shader.PropertyToID("_TmpFrameBuffer");
            m_cb = new CommandBuffer();
            m_cb.name = "GifCapturer: copy frame buffer";
            m_cb.GetTemporaryRT(tid, -1, -1, 0, FilterMode.Point);
            m_cb.Blit(BuiltinRenderTextureType.CurrentActive, tid);
            // tid は意図的に開放しない
            m_cam.AddCommandBuffer(CameraEvent.AfterEverything, m_cb);
        }
        ResetRecordingState();
    }
    void ReconstructLightBuffers(Camera camera, bool toCull)
    {
        CommandBuffer cameraBuffer = null;
        buffers.TryGetValue (camera, out cameraBuffer);
        if (cameraBuffer != null) {
            cameraBuffer.Clear ();
        } else {
            cameraBuffer = new CommandBuffer ();
            cameraBuffer.name = "Deferred custom lights";
            camera.AddCommandBuffer (CameraEvent.BeforeImageEffectsOpaque, cameraBuffer);
            buffers.Add (camera, cameraBuffer);
        }
        var system = PipLightSystem.instance;
        Bounds bounds = new Bounds ();
        Plane[] frustrumPlanes = null;

        if (toCull) {
            frustrumPlanes = GeometryUtility.CalculateFrustumPlanes (camera);
        }
        foreach (var light in system.m_Lights) {
            bool toRenderThisLight = true;
            light.UpdateLOD ();
            if (toCull) {
                bounds.center = light.transform.position;
                bounds.extents = Vector3.one * light.range;
                toRenderThisLight = GeometryUtility.TestPlanesAABB (frustrumPlanes, bounds);
            }
            if (toRenderThisLight) {
                light.UpdateIfNeeded ();
                cameraBuffer.DrawMesh (
                    lightSphereMesh,
                    Matrix4x4.TRS (light.transform.position, Quaternion.identity, Vector3.one * light.range * 2f),
                    GetMaterial (light),
                    0,
                    0,
                    light.GetMaterialPropertyBlock ()
                );
            }
        }
    }