Create() public method

Actually creates the RenderTexture.

public Create ( ) : bool
return bool
コード例 #1
0
    /// <summary>
    /// Uses the GPU to generate a RenderTexture where the pixels in the texture represent noise.
    /// Set the octaves variable before calling this to a desired value.
    /// </summary>
    /// <returns>RenderTexture</returns>
    /// <param name="width"> Width of the texture to generate. </param>
    /// <param name="height"> Height of the texture to generate. </param>
    /// <param name="noiseOffsetX"> X Coordinate of the offset for the noise on the texture. </param>
    /// <param name="noiseOffsetY"> Y Coordinate of the offset for the noise on the texture. </param>
    /// <param name="noiseScale"> Value to scale the noise coordinates by. </param>
    /// <param name="normalize"> Whether or not to remap the noise from (-1, 1) to (0, 1). </param>
    public static UnityEngine.RenderTexture GetNoiseRenderTexture(int width, int height, float noiseOffsetX = 0, float noiseOffsetY = 0, float noiseScale = 0.01f, bool normalize = true)
    {
        UnityEngine.RenderTexture retTex = new UnityEngine.RenderTexture(width, height, 0);
        retTex.enableRandomWrite = true;
        retTex.Create();

        UnityEngine.ComputeShader shader = UnityEngine.Resources.Load(shaderPath) as UnityEngine.ComputeShader;
        if (shader == null)
        {
            UnityEngine.Debug.LogError(noShaderMsg);
            return(null);
        }

        int[] resInts = { width, height };

        int kernel = shader.FindKernel("ComputeNoise");

        shader.SetTexture(kernel, "Result", retTex);
        SetShaderVars(shader, new UnityEngine.Vector2(noiseOffsetX, noiseOffsetY), normalize, noiseScale);
        shader.SetInts("reses", resInts);

        UnityEngine.ComputeBuffer permBuffer = new UnityEngine.ComputeBuffer(perm.Length, 4);
        permBuffer.SetData(perm);
        shader.SetBuffer(kernel, "perm", permBuffer);

        shader.Dispatch(kernel, width / 14, height / 15, 1);

        permBuffer.Release();

        return(retTex);
    }
コード例 #2
0
ファイル: Interface.cs プロジェクト: leon196/TributeToMyst
	void Start () 
	{
		mouseLook = GetComponent<MouseLook>();
		
		textureUI = new RenderTexture((int)Screen.width, (int)Screen.height, 24, RenderTextureFormat.ARGB32);
		textureUI.Create();
		cameraUI.targetTexture = textureUI;
		Shader.SetGlobalTexture("_UITexture", textureUI);

		textureNextPanorama = new RenderTexture((int)Screen.width, (int)Screen.height, 24, RenderTextureFormat.ARGB32);
		textureNextPanorama.Create();
		cameraNextPanorama.targetTexture = textureNextPanorama;
		Shader.SetGlobalTexture("_PanoramaNextTexture", textureNextPanorama);
		
		textureNone = new Texture2D(1, 1);
		textureNone.SetPixel(0, 0, new Color(1,0,0,0));
    textureNone.Apply();

		cursorRender = GetComponentInChildren<Renderer>();
		cursorRender.material.mainTexture = textureNone;

		Gate[] gateArray = GameObject.FindObjectsOfType<Gate>();
		List<Vector3> positionList = new List<Vector3>();
		for (int i = 0; i < gateArray.Length; ++i) {
			Gate gate = gateArray[i];
			if (gate.anotherGate != null && gate.transitionType == Gate.TransitionType.Fade) {
				positionList.Add(gate.transform.position);
			}
		}
		Vector3[] positionArray = positionList.ToArray();
		GameObject.FindObjectOfType<ParticleCloud>().SetPositionArray(positionArray);
	}
コード例 #3
0
    public void CreateTextures()
    {
        if (texW == 0) texW = 1;//TODO rm
        if (texH == 0) texH = 1;
        if (texW == 0) throw new System.Exception("texW must be > 0");
        if (texH == 0) throw new System.Exception("texH must be > 0");
        texelSize = new Vector2(1f / texW, 1f / texH);
        maxTexelDistance = texelSize.magnitude;

        //Create textures
        texCur = new RenderTexture(texW, texH, 0, RenderTextureFormat.RFloat);
        texCur.enableRandomWrite = true;
        texCur.generateMips = false;
        texCur.Create();
        texDest = new RenderTexture(texW, texH, 0, RenderTextureFormat.RFloat);
        texDest.enableRandomWrite = true;
        texDest.generateMips = false;
        texCur.filterMode = FilterMode.Point;
        texDest.Create();

        //Clear textures
        Graphics.SetRenderTarget(texCur);
        GL.Clear(true, true, Color.white);
        Graphics.SetRenderTarget(texDest);
        GL.Clear(true, true, Color.white);
        Graphics.SetRenderTarget(null);

        //Instantiate material
        renderMaterial = new Material(visibilityOverlayMaterial);
        renderMaterial.SetTexture("_Mask", texCur);
    }
コード例 #4
0
 // Called after all rendering is complete to render image. Postprocessing effects.
 protected void OnRenderImage(RenderTexture sourceTexture, RenderTexture destTexture)
 {
     RenderTexture tex1 = new RenderTexture(Screen.width, Screen.height, 24, RenderTextureFormat.ARGB32);
     tex1.Create();
     Graphics.Blit(sourceTexture, tex1, Mat1);
     Graphics.Blit(tex1, destTexture);
 }
コード例 #5
0
ファイル: CactEyeCamera.cs プロジェクト: belug23/CactEye-2
        /*
         * Constructor
         * Input: The owning part's transform.
         * Purpose: This constructor will start up the owning part's camera object. The idea behind this
         * was to allow for multiple telescopes on the same craft.
         */
        public CactEyeCamera(Transform Position)
        {
            this.CameraTransform = Position;

            CameraWidth = (int)(Screen.width*0.4f);
            CameraHeight = (int)(Screen.height*0.4f);

            ScopeRenderTexture = new RenderTexture(CameraWidth, CameraHeight, 24);
            ScopeRenderTexture.Create();

            FullResolutionTexture = new RenderTexture(Screen.width, Screen.height, 24);
            FullResolutionTexture.Create();

            ScopeTexture2D = new Texture2D(CameraWidth, CameraHeight);
            FullTexture2D = new Texture2D(Screen.width, Screen.height);

            CameraSetup(0, "GalaxyCamera"); //As of KSP 1.0, the GalaxyCamera object was added. Thanks to MOARDv for figuring this one out.
            CameraSetup(1, "Camera ScaledSpace");
            CameraSetup(2, "Camera 01");
            CameraSetup(3, "Camera 00");
            CameraSetup(4, "Camera VE Underlay");
            CameraSetup(5, "Camera VE Overlay");

            skyboxRenderers = (from Renderer r in (FindObjectsOfType(typeof(Renderer)) as IEnumerable<Renderer>) where (r.name == "XP" || r.name == "XN" || r.name == "YP" || r.name == "YN" || r.name == "ZP" || r.name == "ZN") select r).ToArray<Renderer>();
            if (skyboxRenderers == null)
            {
                Debug.Log("CactEye 2: Logical Error: skyboxRenderers is null!");
            }

            scaledSpaceFaders = FindObjectsOfType(typeof(ScaledSpaceFader)) as ScaledSpaceFader[];
            if (scaledSpaceFaders == null)
            {
                Debug.Log("CactEye 2: Logical Error: scaledSpaceFaders is null!");
            }
        }
コード例 #6
0
    // Use this for initialization
    void Start () {

        destTexture = new RenderTexture( srcTexture.width, srcTexture.height, 0, RenderTextureFormat.ARGB32 );
        destTexture.enableRandomWrite = true;
        destTexture.Create();

    }
コード例 #7
0
	// Use this for initialization
	void Start ()
	{		
//		storedScreenWidth = Screen.width;
//		storedScreenHeight = Screen.height;
//		myRT = new RenderTexture(Screen.width, Screen.height, 24);
		
		storedScreenWidth = 1024;
		storedScreenHeight = 1024;
		myRT = new RenderTexture(1024, 1024, 24);
		
		myRT.format = RenderTextureFormat.ARGB32;
		myRT.filterMode = FilterMode.Point;
		myRT.isPowerOfTwo = false;
		myRT.isCubemap = false;
		myRT.Create();
		
		mat = new Material (
		"Shader \"Hidden/Invert\" {" +
		"SubShader {" +
		" Pass {" +
		" ZTest Always Cull Off ZWrite Off" +
		" SetTexture [_RenderTexy] { combine texture }" +
		" }" +
		"}" +
		"}"
		);
	}
コード例 #8
0
        public static VideoStreamTrack CaptureStreamTrack(this Camera cam, int width, int height, int bitrate,
                                                          RenderTextureDepth depth = RenderTextureDepth.DEPTH_24)
        {
            switch (depth)
            {
            case RenderTextureDepth.DEPTH_16:
            case RenderTextureDepth.DEPTH_24:
            case RenderTextureDepth.DEPTH_32:
                break;

            default:
                throw new InvalidEnumArgumentException(nameof(depth), (int)depth, typeof(RenderTextureDepth));
            }

            if (width == 0 || height == 0)
            {
                throw new ArgumentException("width and height are should be greater than zero.");
            }

            int depthValue = (int)depth;
            var format     = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
            var rt         = new UnityEngine.RenderTexture(width, height, depthValue, format);

            rt.Create();
            cam.targetTexture = rt;
            return(new VideoStreamTrack(cam.name, rt));
        }
コード例 #9
0
    void Update()
    {
        if (m_maincamera == null) m_maincamera = Camera.main;
        if (m_maincamera == null) return;

        Camera cam = GetComponent<Camera>();
        if(m_rt==null || m_resolution_scale!=m_resolution_scale_prev)
        {
            if (m_resolution_scale.x == 0.0f || m_resolution_scale.y==0.0f)
            {
                return;
            }

            m_resolution_scale_prev = m_resolution_scale;
            if(m_rt!=null)
            {
                m_rt.Release();
                m_rt = null;
            }
            m_rt = new RenderTexture(
                (int)(cam.pixelWidth * m_resolution_scale.x),
                (int)(cam.pixelHeight * m_resolution_scale.y),
                32,
                m_maincamera.hdr ? RenderTextureFormat.ARGBHalf : RenderTextureFormat.ARGB32);
            m_rt.filterMode = FilterMode.Trilinear;
            m_rt.Create();
            m_maincamera.targetTexture = m_rt;
            Debug.Log("resolution changed: " + m_rt.width + ", " + m_rt.height);
        }
    }
	public void createOrResizeRenderTexture(){
		if(!testCameraExists()){
			return;
		}

		//if the render texture exists already, release it, and resize it.
		if(customRenderTexture != null){
				RenderTexture.active = null;
				customRenderTexture.Release();
				customRenderTexture.width = renderWidth;
				customRenderTexture.height = renderHeight;
				RenderTexture.active = customRenderTexture;
				GL.Clear(false, true, new Color(0, 0, 0, 0));
		}
		customRenderTexture = new RenderTexture(renderWidth, renderHeight, 0, RenderTextureFormat.ARGB32);
		customRenderTexture.filterMode = FilterMode.Point;
		customRenderTexture.Create();
		Syphon.SafeMaterial.SetPass(0);
		RenderTexture.active = customRenderTexture;
		GL.Clear(false, true, new Color(0, 0, 0, 0));
		RenderTexture.active = null;


		cameraInstance.targetTexture = customRenderTexture;


	}
コード例 #11
0
ファイル: drawing.cs プロジェクト: Blueteak/GGJ2016
    IEnumerator Start () 
    {
        //disable Eraser on start
        isEraser=0;

        //Intialise brush size on start
        erasersize=pencilsize=currSize=2;

        //Intitalise brush color
        currColor = Color.red;

        //Initialise three points list
        three_point = new List<Vector3>();

        colorCircleRect = new Rect(Screen.width - 230f, Screen.height - 200f, 230f, 200f);
        UIRect = new Rect(0f, Screen.height - 100f,400f, 100f);
      //  GL.Clear(false, true, new Color(0.0f, 0.0f, 0.0f, 0.0f));
        //Create render texture and asssign it to camera..
        Cam.targetTexture = null;
      
        rt =new  RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.Default);
      
        yield return rt.Create();
		Debug.Log((int)img.rectTransform.sizeDelta.x);
        Texture2D t = new Texture2D((int)img.rectTransform.sizeDelta.x, (int)img.rectTransform.sizeDelta.y);
        Graphics.Blit(t, rt);
        img.sprite = Sprite.Create(t, new Rect(Vector2.zero, new Vector2(t.width,t.height)), Vector2.one/2);
        yield return 0;
        Cam.targetTexture = rt;
        mCurr = CreatePlaneMesh(currSize *0.12f);
       // antialising.enabled = true;
        //bg.SetActive(false);
    }
コード例 #12
0
    public void ResetRenderTexture()
    {
        if (renderTexture != null)
            renderTexture.DiscardContents();

        renderTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.Default);

        renderTexture.generateMips = false;
        renderTexture.useMipMap = false;
        renderTexture.wrapMode = TextureWrapMode.Clamp;
        //renderTexture.antiAliasing = 1;
        renderTexture.filterMode = FilterMode.Point;
        renderTexture.Create();

        //Graphics.SetRenderTarget (renderTexture);
        //GL.Clear (true, true, Color.clear);
        // XXX: Workaround unity issue, without this trick CameraClearFlags.Nothing won't work.
        renderTextureCamera.clearFlags = CameraClearFlags.SolidColor;
        renderTextureCamera.targetTexture = renderTexture;
        renderTextureCamera.Render();
        renderTextureCamera.clearFlags = CameraClearFlags.Nothing;
        renderTextureCamera.enabled = false;

        backgroundObject.GetComponent<MeshRenderer>().material.mainTexture = renderTexture;
    }
コード例 #13
0
        public IEnumerator VideoStreamAddTrackAndRemoveTrack()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();
            var stream = new MediaStream();
            var track  = new VideoStreamTrack("video", rt);

            // wait for the end of the initialization for encoder on the render thread.
            yield return(0);

            Assert.AreEqual(TrackKind.Video, track.Kind);
            Assert.AreEqual(0, stream.GetVideoTracks().Count());
            Assert.True(stream.AddTrack(track));
            Assert.AreEqual(1, stream.GetVideoTracks().Count());
            Assert.NotNull(stream.GetVideoTracks().First());
            Assert.True(stream.RemoveTrack(track));
            Assert.AreEqual(0, stream.GetVideoTracks().Count());
            track.Dispose();
            // wait for disposing video track.
            yield return(0);

            stream.Dispose();
            Object.DestroyImmediate(rt);
        }
コード例 #14
0
        public IEnumerator MediaStreamAddTrack()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();
            var stream = new MediaStream();
            var track  = new VideoStreamTrack("video", rt);

            yield return(new WaitForSeconds(0.1f));

            Assert.AreEqual(TrackKind.Video, track.Kind);
            Assert.AreEqual(0, stream.GetVideoTracks().Count());
            Assert.True(stream.AddTrack(track));
            Assert.AreEqual(1, stream.GetVideoTracks().Count());
            Assert.NotNull(stream.GetVideoTracks().First());
            Assert.True(stream.RemoveTrack(track));
            Assert.AreEqual(0, stream.GetVideoTracks().Count());
            track.Dispose();
            yield return(new WaitForSeconds(0.1f));

            stream.Dispose();
            Object.DestroyImmediate(rt);
        }
コード例 #15
0
    public void CreateRendTexture(GameObject item)
    {
        RenderTexture rendTexture = new RenderTexture(256, 256, 16);
        rendTexture.Create();
        GameObject rendCamera = Instantiate(Resources.Load("InventoryCam")) as GameObject;
        rendCamera.transform.parent = position.transform;
        if(nextPos != null)
            rendCamera.transform.position = new Vector3(nextPos.transform.position.x+10f, nextPos.transform.position.y,
                                                        nextPos.transform.position.z);
        else
            rendCamera.transform.position = new Vector3(position.transform.position.x+10f, position.transform.position.y,
                                                  	  position.transform.position.z);

        nextPos = rendCamera;
        GameObject itemClone = Instantiate(item) as GameObject;
        itemClone.transform.parent = nextPos.transform;
        itemClone.transform.position = new Vector3(nextPos.transform.position.x, nextPos.transform.position.y,
                                              nextPos.transform.position.z+0.5f);

        itemClone.transform.rotation = new Quaternion(itemClone.transform.rotation.x, itemClone.transform.rotation.y,
                                                      itemClone.transform.rotation.z, itemClone.transform.rotation.w);

        itemClone.GetComponent<Renderer>().enabled = true;
        rendCamera.GetComponent<Camera>().targetTexture = rendTexture;
        GameObject inventoryImage = new GameObject();
        rendTexture.name = item.name+"texture";
        inventoryImage.name = item.name;
        itemClone.layer = 8;
        inventoryImage.transform.parent = canvasPos.transform;
        inventoryImage.AddComponent<RawImage>();
        inventoryImage.GetComponent<RawImage>().texture = rendTexture;
        itemClick.ItemClick (inventoryImage, true);
        itemClick.ItemClick (itemClone, false);
        rendTextCameras.Add(rendCamera.GetComponent<Camera>());
    }
コード例 #16
0
        /// <summary>
        /// Generates a texture containing the given graph's noise output.
        /// If this is being called very often, create a permanent render target and material and
        ///     use the other version of this method instead for much better performance.
        /// If an error occurred, outputs to the Unity debug console and returns "null".
        /// </summary>
        /// <param name="outputComponents">
        /// The texture output.
        /// For example, pass "rgb" or "xyz" to output the noise into the red, green, and blue channels
        ///     but not the alpha channel.
        /// </param>
        /// <param name="defaultColor">
        /// The color (generally 0-1) of the color components which aren't set by the noise.
        /// </param>
        public static Texture2D GenerateToTexture(Graph g, GraphParamCollection c, int width, int height,
												  string outputComponents, float defaultColor,
												  TextureFormat format = TextureFormat.RGBAFloat)
        {
            //Generate a shader from the graph and have Unity compile it.
            string shaderPath = Path.Combine(Application.dataPath, "gpuNoiseShaderTemp.shader");
            Shader shader = SaveShader(g, shaderPath, "TempGPUNoiseShader", outputComponents, defaultColor);
            if (shader == null)
            {
                return null;
            }

            //Render the shader's output into a render texture and copy the data to a Texture2D.
            RenderTexture target = new RenderTexture(width, height, 16, RenderTextureFormat.ARGBFloat);
            target.Create();
            Texture2D resultTex = new Texture2D(width, height, format, false, true);

            //Create the material and set its parameters.
            Material mat = new Material(shader);
            c.SetParams(mat);

            GraphUtils.GenerateToTexture(target, mat, resultTex);

            //Clean up.
            target.Release();
            if (!AssetDatabase.DeleteAsset(StringUtils.GetRelativePath(shaderPath, "Assets")))
            {
                Debug.LogError("Unable to delete temp file: " + shaderPath);
            }

            return resultTex;
        }
コード例 #17
0
    void UpdateReflection()
    {
        if(!rtex)
        {
            rtex = new RenderTexture(textureSize, textureSize, 16);
            rtex.hideFlags = HideFlags.HideAndDontSave;
            rtex.isPowerOfTwo = true;
            rtex.isCubemap = true;
            rtex.useMipMap = false;
            rtex.Create();

            reflectingMaterial.SetTexture("_Cube", rtex);
        }

        if(!cam)
        {
            GameObject go = new GameObject("CubemapCamera", typeof(Camera));
            go.hideFlags = HideFlags.HideAndDontSave;
            cam = go.camera;
            // cam.nearClipPlane = 0.05f;
            cam.farClipPlane = 150f;
            cam.enabled = false;
            cam.cullingMask = mask;
        }

        cam.transform.position = Camera.main.transform.position;
        cam.transform.rotation = Camera.main.transform.rotation;

        cam.RenderToCubemap(rtex, 63);
    }
コード例 #18
0
ファイル: RenderToTexture.cs プロジェクト: leon196/UnityVJ
 void Start()
 {
     texture = new RenderTexture((int)Screen.width, (int)Screen.height, 24, RenderTextureFormat.ARGB32);
     texture.Create();
     texture.filterMode = FilterMode.Point;
     GetComponent<Camera>().targetTexture = texture;
     Shader.SetGlobalTexture(uniformName, texture);
 }
コード例 #19
0
ファイル: HistEq.cs プロジェクト: miberen/P7-VGIS
    void Start()
    {
        frame = new NPFrame2("Stuff", 3);

        donePow2 = new RenderTexture(Screen.width, Screen.height, 0, frame.GetTextureFormat, RenderTextureReadWrite.Linear);
        donePow2.enableRandomWrite = true;
        donePow2.Create();
    }
コード例 #20
0
    RenderTexture CreateRenderTexture(int w, int h)
    {
        RenderTexture rt = new RenderTexture(w, h, 16, RenderTextureFormat.ARGB32);
        rt.enableRandomWrite = true;
        rt.Create();

        return rt;
    }
コード例 #21
0
        private void Awake()
        {
            texture = new RenderTexture(36, 36, 0, RenderTextureFormat.ARGB32);
            texture.filterMode = FilterMode.Point;
            texture.Create();

            image.texture = texture;
        }
コード例 #22
0
ファイル: CameraToTexture.cs プロジェクト: leon196/UnityVJ
 void Start()
 {
     buffer = new RenderTexture(Screen.width, Screen.height, 24, RenderTextureFormat.ARGB32);
     buffer.antiAliasing = 2;
     buffer.Create();
     GetComponent<Camera>().targetTexture = buffer;
     Shader.SetGlobalTexture(textureName, buffer);
 }
コード例 #23
0
        private static UnityEngine.RenderTexture CreateRenderTexture(int width, int height,
                                                                     UnityEngine.RenderTextureFormat format)
        {
            var tex = new UnityEngine.RenderTexture(width, height, 0, format);

            tex.Create();
            return(tex);
        }
コード例 #24
0
ファイル: LightDetector.cs プロジェクト: JonECG/Spoopy
    void Start()
    {
        RenderTexture r = new RenderTexture(16, 16, 16);
        r.Create();
        GetComponent<Camera>().targetTexture = r;

        blink = FindObjectOfType<Blinker>();
    }
コード例 #25
0
 static RenderTexture CreateDataTexture(int w, int h, RenderTextureFormat f)
 {
     RenderTexture r = new RenderTexture(w, h, 0, f);
     r.filterMode = FilterMode.Point;
     r.useMipMap = false;
     r.generateMips = false;
     r.Create();
     return r;
 }
コード例 #26
0
        public IEnumerator OnAddTrackDelegatesWithEvent()
        {
            var camObj      = new GameObject("Camera");
            var cam         = camObj.AddComponent <Camera>();
            var videoStream = cam.CaptureStream(1280, 720, 1000000);

            yield return(new WaitForSeconds(0.1f));

            var test = new MonoBehaviourTest <SignalingPeers>();

            test.component.SetStream(videoStream);
            yield return(test);

            test.component.CoroutineUpdate();
            yield return(new WaitForSeconds(0.1f));

            bool isCalledOnAddTrack    = false;
            bool isCalledOnRemoveTrack = false;

            videoStream.OnAddTrack = e =>
            {
                Assert.That(e.Track, Is.Not.Null);
                isCalledOnAddTrack = true;
            };
            videoStream.OnRemoveTrack = e =>
            {
                Assert.That(e.Track, Is.Not.Null);
                isCalledOnRemoveTrack = true;
            };

            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();
            var track2 = new VideoStreamTrack("video2", rt);

            videoStream.AddTrack(track2);
            var op1 = new WaitUntilWithTimeout(() => isCalledOnAddTrack, 5000);

            yield return(op1);

            videoStream.RemoveTrack(track2);
            var op2 = new WaitUntilWithTimeout(() => isCalledOnRemoveTrack, 5000);

            yield return(op2);

            test.component.Dispose();
            track2.Dispose();
            // wait for disposing video track.
            yield return(0);

            videoStream.Dispose();
            Object.DestroyImmediate(camObj);
            Object.DestroyImmediate(rt);
        }
コード例 #27
0
ファイル: RenderEffect.cs プロジェクト: sugi-cho/drawTex
	void CreateOutput (RenderTexture s)
	{
		if (output != null) {
			Destroy (output);
		}
		output = new RenderTexture (s.width, s.height, s.depth, s.format);
		output.wrapMode = wrapmode;
		output.Create ();
		Graphics.Blit (s, output);
	}
コード例 #28
0
    // Use this for initialization
    void Awake()
    {
        // create render texture
        var rect = GetComponent<RectTransform>();
        renderTexture = new RenderTexture((int)rect.rect.width, (int)rect.rect.height, 0, RenderTextureFormat.Default);
        renderTexture.Create();

        var camera = transform.GetComponentInChildren<Camera>();
        camera.targetTexture = renderTexture;
    }
コード例 #29
0
 RenderTexture CreateGBufferRT(RenderTextureFormat format, int depth = 0)
 {
     var ret = new RenderTexture(m_camera.pixelWidth, m_camera.pixelHeight, depth, format);
     ret.filterMode = FilterMode.Point;
     ret.useMipMap = false;
     ret.generateMips = false;
     ret.enableRandomWrite = m_enable_uav;
     ret.Create();
     return ret;
 }
コード例 #30
0
 // Use this for initialization
 void Start()
 {
     Camera camera = GetComponent<Camera>();
     renderTexture = new RenderTexture(Screen.width/2, Screen.height/2, 0);
     renderTexture.Create();
     renderTexture.isPowerOfTwo = false;
     camera.targetTexture = renderTexture;
     camera.depth = -10; // force draw earlier than main camera
     GameObject staticArrow = GameObject.Find("Cluster/StaticArrow");
     staticArrow.GetComponent<Renderer>().material.SetTexture("_Detail", renderTexture);
 }
コード例 #31
0
    // Use this for initialization
    void Start()
    {
        testing = new RenderTexture(128, 128, 8);
        testing.enableRandomWrite = true;
        testing.Create();

        // Fill genome with random..
        int kernel = shader.FindKernel("FillRandom");
        shader.SetTexture(kernel, "Result", testing);
        shader.Dispatch(kernel, 32, 32, 1);
    }
コード例 #32
0
ファイル: FreeProbe.cs プロジェクト: keyward/EnemyOfMyEnemy
		private void UpdateFaceTexture() {
			if(_targetCube == null) return;
			if(faceTexture == null || faceTexture.width != _targetCube.width) {
				if(faceTexture) Texture2D.DestroyImmediate(faceTexture);
				faceTexture = new Texture2D(_targetCube.width, _targetCube.width, TextureFormat.ARGB32, true, false);
				
				//attempt to make an HDR render texture for RGBM capture
				RT = RenderTexture.GetTemporary(_targetCube.width, _targetCube.width, 24, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
				RT.Release();
				RT.isCubemap = false;
				RT.useMipMap = false;
				RT.generateMips = false;
				RT.Create();
				if(!RT.IsCreated() && !RT.Create()) {
					Debug.LogWarning("Failed to create HDR RenderTexture, capturing in LDR mode.");
					RenderTexture.ReleaseTemporary(RT);
					RT = null;
				}		
			}
		}
コード例 #33
0
    void Start()
    {
        silhouetteTexture = new RenderTexture(Screen.width, Screen.height, 16, RenderTextureFormat.ARGB32);
        silhouetteTexture.Create();

        camera.targetTexture = silhouetteTexture;

        foreach (GameObject obj in GameObject.FindGameObjectsWithTag("Silhouette Receiver")) {
            obj.renderer.material.SetTexture("_Silhouette",silhouetteTexture as Texture);
        }
    }
コード例 #34
0
        /// <summary>
        /// Create the RenderTexture that will be used by both cameras.
        /// </summary>
        private void CreateRenderTexture()
        {
            screenX = Screen.width;
            screenY = Screen.height;

            mainRenderTexture = new RenderTexture(screenX, screenY, renderTextureDepth, renderTextureFormat);
            mainRenderTexture.Create();

            worldCamera.targetTexture = mainRenderTexture;
            fpvCamera.targetTexture = mainRenderTexture;
        }
コード例 #35
0
 static public int Create(IntPtr l)
 {
     try {
         UnityEngine.RenderTexture self = (UnityEngine.RenderTexture)checkSelf(l);
         var ret = self.Create();
         pushValue(l, ret);
         return(1);
     }
     catch (Exception e) {
         return(error(l, e));
     }
 }
コード例 #36
0
 public void initPixelShader(int X,int Y)
 {
     genTex = new RenderTexture (X,Y, 0,RenderTextureFormat.RFloat);
     cShader.SetTexture (mKernel, "inTex", imgCrater.getTexture());
     genTex.enableRandomWrite = true;
     genTex.Create ();
     cShader.SetTexture (mKernel, "genTex", genTex);
     // set vertices
     verticesBuffer = new ComputeBuffer(X*Y/64, sizeof(float)*3, ComputeBufferType.Default);
     gridGenerated.verticesBuffer = verticesBuffer;
     cShader.SetBuffer(mKernel, "vertices",verticesBuffer);
 }
コード例 #37
0
    public void RunShader() {
        int kernelHandle = shader.FindKernel("CSMain");

        RenderTexture tex = new RenderTexture(256, 256, 24);
        tex.enableRandomWrite = true;
        tex.Create();

        shader.SetTexture(kernelHandle, "Result", tex);
        shader.Dispatch(kernelHandle, 256 / 8, 256 / 8, 1);

        GetComponent<MeshRenderer>().material.mainTexture = tex;
    }
コード例 #38
0
 static public int Create(IntPtr l)
 {
     try{
         UnityEngine.RenderTexture self = (UnityEngine.RenderTexture)checkSelf(l);
         System.Boolean            ret  = self.Create();
         pushValue(l, ret);
         return(1);
     }
     catch (Exception e) {
         LuaDLL.luaL_error(l, e.ToString());
         return(0);
     }
 }
コード例 #39
0
        public void VideoStreamTrackDisposeImmediately()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();
            var track = new VideoStreamTrack("video", rt);

            track.Dispose();
            Object.DestroyImmediate(rt);
        }
コード例 #40
0
 static int QPYX_Create_YXQP(IntPtr L_YXQP)
 {
     try
     {
         ToLua.CheckArgsCount(L_YXQP, 1);
         UnityEngine.RenderTexture QPYX_obj_YXQP = (UnityEngine.RenderTexture)ToLua.CheckObject <UnityEngine.RenderTexture>(L_YXQP, 1);
         bool QPYX_o_YXQP = QPYX_obj_YXQP.Create();
         LuaDLL.lua_pushboolean(L_YXQP, QPYX_o_YXQP);
         return(1);
     }
     catch (Exception e_YXQP)                {
         return(LuaDLL.toluaL_exception(L_YXQP, e_YXQP));
     }
 }
コード例 #41
0
    void Start()
    {
        // Get view component
        var view = GetComponent <NoesisView>();

        // Find the rectangle where texture will be drawn
        var rect = (Rectangle)view.Content.FindName("rtRect");

        // Create render texture
        UnityEngine.RenderTexture renderTexture = new UnityEngine.RenderTexture(
            512, 512, 1, UnityEngine.RenderTextureFormat.Default);
        UnityEngine.RenderTexture.active = renderTexture;

        // Set render texture as camera target
        this._offscreenCamera.targetTexture = renderTexture;
        this._offscreenCamera.aspect        = 1;

        // Create Noesis texture
        renderTexture.Create();
        var tex = Noesis.Texture.WrapTexture(renderTexture, renderTexture.GetNativeTexturePtr(),
                                             renderTexture.width, renderTexture.height, 1);

        // Create brush to store render texture and assign it to the rectangle
        rect.Fill = new ImageBrush()
        {
            ImageSource = new TextureSource(tex),
            Stretch     = Stretch.UniformToFill,
            Opacity     = 0.9f
        };

        // Title bar drag to move
        this._titleBar = (Border)view.Content.FindName("titleBar");
        this._titleBar.MouseLeftButtonDown += this.OnTitleBarMouseDown;
        this._titleBar.MouseLeftButtonUp   += this.OnTitleBarMouseUp;
        this._titleBar.MouseMove           += this.OnTitleBarMouseMove;

        var panel = (Panel)view.Content.FindName("panel");

        this._panelPosition = (TranslateTransform)panel.RenderTransform;

        // Model rotation
        var rotateLeft = (RepeatButton)view.Content.FindName("rotateLeft");

        rotateLeft.Click += this.OnRotateLeft;

        var rotateRight = (RepeatButton)view.Content.FindName("rotateRight");

        rotateRight.Click += this.OnRotateRight;
    }
コード例 #42
0
        public IEnumerator VideoStreamAddTrackAndRemoveTrack()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();
            var stream = new MediaStream();
            var track  = new VideoStreamTrack(rt);

            bool isCalledOnAddTrack    = false;
            bool isCalledOnRemoveTrack = false;

            stream.OnAddTrack = e =>
            {
                Assert.That(e.Track, Is.EqualTo(track));
                isCalledOnAddTrack = true;
            };
            stream.OnRemoveTrack = e =>
            {
                Assert.That(e.Track, Is.EqualTo(track));
                isCalledOnRemoveTrack = true;
            };

            // wait for the end of the initialization for encoder on the render thread.
            yield return(0);

            Assert.That(track.Kind, Is.EqualTo(TrackKind.Video));
            Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0));
            Assert.That(stream.AddTrack(track), Is.True);
            Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(1));
            Assert.That(stream.GetVideoTracks(), Has.All.Not.Null);
            Assert.That(stream.RemoveTrack(track), Is.True);
            Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0));

            var op1 = new WaitUntilWithTimeout(() => isCalledOnAddTrack, 5000);

            yield return(op1);

            var op2 = new WaitUntilWithTimeout(() => isCalledOnRemoveTrack, 5000);

            yield return(op2);

            track.Dispose();

            stream.Dispose();
            Object.DestroyImmediate(rt);
        }
コード例 #43
0
 static int Create(IntPtr L)
 {
     try
     {
         ToLua.CheckArgsCount(L, 1);
         UnityEngine.RenderTexture obj = (UnityEngine.RenderTexture)ToLua.CheckObject(L, 1, typeof(UnityEngine.RenderTexture));
         bool o = obj.Create();
         LuaDLL.lua_pushboolean(L, o);
         return(1);
     }
     catch (Exception e)
     {
         return(LuaDLL.toluaL_exception(L, e));
     }
 }
コード例 #44
0
        public void AddTransceiver()
        {
            var peer   = new RTCPeerConnection();
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();

            var track = new VideoStreamTrack(rt);

            Assert.That(peer.GetTransceivers(), Is.Empty);
            var transceiver = peer.AddTransceiver(track);

            Assert.That(transceiver, Is.Not.Null);
            Assert.That(transceiver.Mid, Is.Null);
            Assert.That(transceiver.CurrentDirection, Is.Null);
            RTCRtpSender sender = transceiver.Sender;

            Assert.That(sender, Is.Not.Null);
            Assert.That(track, Is.EqualTo(sender.Track));

            RTCRtpSendParameters parameters = sender.GetParameters();

            Assert.That(parameters, Is.Not.Null);
            Assert.That(parameters.encodings, Is.Empty);
            Assert.That(parameters.transactionId, Is.Not.Empty);
            Assert.That(peer.GetTransceivers(), Has.Count.EqualTo(1));
            Assert.That(peer.GetTransceivers().First(), Is.Not.Null);
            Assert.That(parameters.codecs, Is.Empty);
            Assert.That(parameters.rtcp, Is.Not.Null);

            // Some platforms return an empty list
            Assert.That(parameters.headerExtensions, Is.Not.Null);
            foreach (var extension in parameters.headerExtensions)
            {
                Assert.That(extension, Is.Not.Null);
                Assert.That(extension.uri, Is.Not.Empty);
            }

            track.Dispose();
            peer.Dispose();
            Object.DestroyImmediate(rt);
        }
コード例 #45
0
        public void GetTransceiversReturnsNotEmptyAfterCallingRemoveTrack()
        {
            // Also, `RTCPeerConnection.AddTrack` and `RTCPeerConnection.RemoveTrack` method is not intuitive.
            var peer   = new RTCPeerConnection();
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();
            var track  = new VideoStreamTrack(rt);
            var sender = peer.AddTrack(track);

            Assert.That(peer.GetTransceivers(), Has.Count.EqualTo(1));
            Assert.That(peer.RemoveTrack(sender), Is.EqualTo(RTCErrorType.None));
            Assert.That(peer.GetTransceivers(), Has.Count.EqualTo(1));
            peer.Dispose();
        }
コード例 #46
0
	static int Create(IntPtr L)
	{
#if UNITY_EDITOR
        ToluaProfiler.AddCallRecord("UnityEngine.RenderTexture.Create");
#endif
		try
		{
			ToLua.CheckArgsCount(L, 1);
			UnityEngine.RenderTexture obj = (UnityEngine.RenderTexture)ToLua.CheckObject<UnityEngine.RenderTexture>(L, 1);
			bool o = obj.Create();
			LuaDLL.lua_pushboolean(L, o);
			return 1;
		}
		catch (Exception e)
		{
			return LuaDLL.toluaL_exception(L, e);
		}
	}
コード例 #47
0
    private System.Collections.IEnumerator _takeSS(GO caller, int sceneIdx)
    {
        AsyncOp op;

        SceneMng.sceneLoaded += this.sceneLoaded;
        op = SceneMng.LoadSceneAsync(sceneIdx, SceneMode.Additive);
        yield return(op);

        SceneMng.sceneLoaded -= this.sceneLoaded;

        TexBuffer tb = new TexBuffer(this.thumbWidth, this.thumbHeight,
                                     ScreenshotLevel.depth, ScreenshotLevel.fmt,
                                     ScreenshotLevel.texMode);

        tb.name         = $"{sceneIdx}_screenshot.tex";
        tb.filterMode   = ScreenshotLevel.filterMode;
        tb.anisoLevel   = ScreenshotLevel.anisoLevel;
        tb.antiAliasing = ScreenshotLevel.antiAliasing;
        tb.wrapMode     = ScreenshotLevel.wrapMode;
        tb.depth        = ScreenshotLevel.depth;
        tb.Create();

        this.bbCamera.targetTexture = tb;
        this.bbCamera.enabled       = true;
        yield return(null);

        Material mat = new Material(this.shader);

        mat.mainTexture = tb;
        mat.name        = $"{sceneIdx}_screenshot.mat";

        this.bbCamera.enabled       = false;
        this.bbCamera.targetTexture = null;

        op = SceneMng.UnloadSceneAsync(sceneIdx);
        yield return(op);

        this.issueEvent <ScreenshotLevelController>(
            (x, y) => x.OnSSTaken(tb, mat), caller);

        this.running = false;
    }
    public UnityEngine.RenderTexture GetOrCreateRenderTexture(int key, int width, int height, int depth)
    {
        lock (padlock)
        {
            if (this.cachedRenderTextures == null)
            {
                this.cachedRenderTextures = new Dictionary <int, UnityEngine.RenderTexture>();
            }

            if (!this.cachedRenderTextures.ContainsKey(key))
            {
                UnityEngine.RenderTexture renderTexture = new UnityEngine.RenderTexture(width, height, depth);
                renderTexture.Create();

                this.cachedRenderTextures.Add(key, renderTexture);
            }

            return(this.cachedRenderTextures[key]);
        }
    }
コード例 #49
0
        public IEnumerator VideoStreamTrackInstantiateMultiple()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt1    = new UnityEngine.RenderTexture(width, height, 0, format);

            rt1.Create();
            var track1 = new VideoStreamTrack("video1", rt1);

            var rt2 = new UnityEngine.RenderTexture(width, height, 0, format);

            rt2.Create();
            var track2 = new VideoStreamTrack("video2", rt2);

            // wait for initialization encoder on render thread.
            yield return(new WaitForSeconds(0.1f));

            track1.Dispose();
            track2.Dispose();
            Object.DestroyImmediate(rt1);
            Object.DestroyImmediate(rt2);
        }