示例#1
0
    public void Destroy()
    {
        appendBuffer.Release();
        argBuffer.Release();

        AsyncTextureReader.ReleaseTempResources(appendBuffer);
        AsyncTextureReader.ReleaseTempResources(argBuffer);
    }
示例#2
0
 public void SwitchResolution(int width, int height)
 {
     videoWidth  = width;
     videoHeight = height;
     renderCam.targetTexture.Release();
     renderCam.targetTexture = new RenderTexture(videoWidth, videoHeight, renderCam.targetTexture.depth, renderCam.targetTexture.format, RenderTextureReadWrite.Default);
     Reader = new AsyncTextureReader(renderCam.targetTexture);
 }
    private void Awake()
    {
        var videoWidth  = 1920;
        var videoHeight = 1080;
        var rtDepth     = 24;
        var rtFormat    = RenderTextureFormat.ARGB32;
        var rtReadWrite = RenderTextureReadWrite.Linear;

        RenderTexture activeRT = new RenderTexture(videoWidth, videoHeight, rtDepth, rtFormat, rtReadWrite)
        {
            dimension       = UnityEngine.Rendering.TextureDimension.Tex2D,
            antiAliasing    = 1,
            useMipMap       = false,
            useDynamicScale = false,
            wrapMode        = TextureWrapMode.Clamp,
            filterMode      = FilterMode.Bilinear
        };

        activeRT.name = "GroundTruthHD";
        activeRT.Create();
        groundTruthCamera.targetTexture = activeRT;

        Reader = new AsyncTextureReader <byte>(groundTruthCamera.targetTexture);
        GetComponentInParent <CameraSettingsManager>().AddCamera(groundTruthCamera);
        AddUIElement(groundTruthCamera);

        radVFOV = groundTruthCamera.fieldOfView * Mathf.Deg2Rad;
        radHFOV = 2 * Mathf.Atan(Mathf.Tan(radVFOV / 2) * groundTruthCamera.aspect);
        degVFOV = groundTruthCamera.fieldOfView;
        degHFOV = Mathf.Rad2Deg * radHFOV;

        float width  = 2 * Mathf.Tan(radHFOV / 2) * maxDistance;
        float height = 3f;
        float depth  = maxDistance;

        BoxCollider camBoxCollider = cameraRangeTrigger.GetComponent <BoxCollider>();

        camBoxCollider.center = new Vector3(0, 0, depth / 2f);
        camBoxCollider.size   = new Vector3(width, height, depth);

        detectedObjects         = new List <Ros.Detection2D>();
        cameraDetectedColliders = new Dictionary <Collider, Ros.Detection2D>();
        cameraPredictedObjects  = new List <Ros.Detection2D>();
        cameraPredictedVisuals  = new List <Ros.Detection2D>();
        cameraRangeTrigger.SetCallback(OnCameraObjectDetected);

        backgroundTexture = Texture2D.whiteTexture;
        textureStyle      = new GUIStyle {
            normal = new GUIStyleState {
                background = backgroundTexture
            }
        };

        if (targetCamera != null)
        {
            targetCameraPreview = targetCamera.GetComponent <VideoToROS>().cameraPreview;
        }
    }
示例#4
0
    // Use this for initialization
    void Start()
    {
        AsyncTextureReader.InitDebugLogs();

        Pixels = new byte[DebugTexture.width * DebugTexture.height * 4];


        Debug.Log("Request Status: " + AsyncTextureReader.RequestTextureData(DebugTexture));
        Debug.Log("Retrieve Status: " + AsyncTextureReader.RetrieveTextureData(DebugTexture, Pixels));
    }
    public void Destroy()
    {
        appendBuffer.Release();
        argBuffer.Release();

#if !UNITY_ASYNC
        if (needToFree)
        {
            AsyncTextureReader.ReleaseTempResources(appendBuffer);
            AsyncTextureReader.ReleaseTempResources(argBuffer);
        }
#endif
    }
示例#6
0
    void Start()
    {
        renderCam   = GetComponent <Camera>();
        videoWidth  = renderCam.targetTexture.width;
        videoHeight = renderCam.targetTexture.height;

        int depth  = renderCam.targetTexture.depth;
        var format = renderCam.targetTexture.format;

        renderCam.targetTexture.Release();
        renderCam.targetTexture = new RenderTexture(videoWidth, videoHeight, depth, format, RenderTextureReadWrite.Default);

        Reader = new AsyncTextureReader(renderCam.targetTexture);
    }
示例#7
0
    public void PathCompute()
    {
        float startTime = Time.realtimeSinceStartup;

        unfulfilledData[0] = 0;
        unfulfilledBuffer.SetData(unfulfilledData);

        int runsThisPass = 0;

        do
        {
            if (AtoB)
            {
                shader.SetBuffer(PathSolverHandle, "pathBufferFrom", pathBufferA);
                shader.SetBuffer(PathSolverHandle, "pathBufferTo", pathBufferB);
            }
            else
            {
                shader.SetBuffer(PathSolverHandle, "pathBufferFrom", pathBufferB);
                shader.SetBuffer(PathSolverHandle, "pathBufferTo", pathBufferA);
            }

            shader.Dispatch(PathSolverHandle, flowWidth / 8, flowHeight / 8, 1);
            currentRuns++;
            runsThisPass++;

            AtoB = !AtoB;
        } while (Time.realtimeSinceStartup - startTime < pathshare && runsThisPass < MaxRunsPerPass);

        if (currentRuns > runsBeforeCheck)
        {
            if (!waitingForRetrieval)
            {
                AsyncTextureReader.RequestBufferData(unfulfilledBuffer);
                waitingForRetrieval = true;
            }
            else
            {
                AsyncTextureReader.Status status = AsyncTextureReader.RetrieveBufferData(unfulfilledBuffer, unfulfilledData);
                if (status == AsyncTextureReader.Status.Succeeded)
                {
                    waitingForRetrieval = false;
                    if (unfulfilledData[0] == 0)
                    {
                        fulfilled = true;
                    }
                }
            }
        }
    }
        public bool StartGeneratingNoise(NoiseGenerationRequest NoiseRequest)
        {
            if (Processing)
            {
                return(false);
            }

            Datas        = new float[NoiseRequest.Lods][];
            UsedTextures = new RenderTexture[NoiseRequest.Lods];
            int kernelHandle = CShader.FindKernel("CSMain");

            for (int i = 0; i < NoiseRequest.Lods; i++)
            {
                Datas[i] = new float[65 * 65 * 65];
                RenderTexture tex = new RenderTexture(65, 65, 24);
                tex.dimension         = UnityEngine.Rendering.TextureDimension.Tex3D;
                tex.volumeDepth       = 65;
                tex.enableRandomWrite = true;

                Util.NoiseInfo data = new Util.NoiseInfo();


                // LOD1 -32, -32

                // LOD2 -64, -64

                float LODOffset = Mathf.Pow(2, 5 + i);

                data.offset    = new Vector3(-LODOffset, -LODOffset, -LODOffset) + NoiseRequest.Center;
                data.frequency = 1.0f / (float)i;

                Util.NoiseInfo[] arrdata = new Util.NoiseInfo[1];
                arrdata[0] = data;

                ComputeBuffer buffer = new ComputeBuffer(1, 16);
                buffer.SetData(arrdata);

                CShader.SetBuffer(kernelHandle, "dataBuffer", buffer);
                CShader.SetTexture(kernelHandle, "Result", tex);
                CShader.Dispatch(kernelHandle, 16, 16, 16);


                //AsyncTextureReader.RequestTextureData(tex.GetNativeTexturePtr());

                AsyncTextureReader.RequestTexture3DDataWPtr(tex.GetNativeTexturePtr());
            }

            return(true);
        }
示例#9
0
    // Update is called once per frame
    void Update()
    {
        if (Pixels == null)
        {
            return;
        }

        AsyncTextureReader.Status status = AsyncTextureReader.RetrieveTextureData(DebugTexture, Pixels);
        Debug.Log("Retrieve Status: " + status);
        if (status == AsyncTextureReader.Status.Succeeded)
        {
            // print RGBA of first pixel
            Debug.LogFormat("Pixel RGBA: {0}; {1}; {2}; {3}", Pixels[0], Pixels[1], Pixels[2], Pixels[3]);
            Pixels = null;
        }
    }
示例#10
0
    private void GetPixels()
    {
        if (Pixels == null)
        {
            return;
        }

        AsyncTextureReader.Status status = AsyncTextureReader.RetrieveTextureData(DebugTexture, Pixels);
        Debug.LogFormat("Frame: {0}; Retrieve Status: {1}", Time.frameCount, status);
        if (status == AsyncTextureReader.Status.Succeeded)
        {
            // print RGBA of first pixel
            Debug.LogFormat("Pixel RGBA: {0}; {1}; {2}; {3}", Pixels[0], Pixels[1], Pixels[2], Pixels[3]);
            Pixels = null;
        }
    }
示例#11
0
    // Use this for initialization
    void Start()
    {
        _buffer = new ComputeBuffer(4, sizeof(float));
        _buffer.SetData(new float[] { 1, 2, 3, 4 });

        AsyncTextureReader.InitDebugLogs();

        Pixels = new byte[DebugTexture.width * DebugTexture.height * 4];

        Debug.LogFormat("Frame: {0}; Request Status: {1}", Time.frameCount, AsyncTextureReader.RequestTextureData(DebugTexture));
        Debug.LogFormat("Frame: {0}; Retrieve Status: {1}", Time.frameCount, AsyncTextureReader.RetrieveTextureData(DebugTexture, Pixels));

#if UNITY_5_5_OR_NEWER
        AsyncTextureReader.RequestBufferData(_buffer);
#endif
    }
示例#12
0
    private void GetData()
    {
#if UNITY_5_5_OR_NEWER
        if (_floats == null)
        {
            return;
        }

        AsyncTextureReader.Status status = AsyncTextureReader.RetrieveBufferData(_buffer, _floats);
        //Debug.LogFormat("Frame: {0}; Retrieve Buffer Status: {1}", Time.frameCount, status);
        if (status == AsyncTextureReader.Status.Succeeded)
        {
            Debug.LogFormat("Buffer Data: {0}; {1}; {2}; {3}", _floats[0], _floats[1], _floats[2], _floats[3]);
            _floats = null;
        }
#endif
    }
示例#13
0
    private IEnumerator AsyncLoadImage(string filePath, TextureFormat textureFormat)
    {
        this.rawImage.texture = this.defaultTexture;
        this.uiController.SetButtonInteractable(false);

        System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
        sw.Start();

        for (int i = 0; i < TRIAL_COUNT; i++)
        {
            AsyncTextureReader.Read(
                new AsyncTextureReader.TextureData {
                filePath = filePath,
                format   = textureFormat,
                width    = 1000,
                height   = 1000
            },
                texture => {
                this.rawImage.texture = texture;
                this.rawImage.SetNativeSize();
                this.uiController.SetProgressText(
                    AsyncTextureReader.Progress.totalCount,
                    AsyncTextureReader.Progress.processedCount
                    );
            }
                );
        }

        WaitUntil waitUntil = new WaitUntil(() => !AsyncTextureReader.Progress.IsRuntime);

        yield return(waitUntil);

        sw.Stop();
        this.uiController.SetProcessedTime(sw.Elapsed.ToString());

        this.uiController.SetButtonInteractable(true);
        Resources.UnloadUnusedAssets();
        GC.Collect();
    }
    public void Launch(ChunkRequest cr)
    {
        free             = false;
        forgetNextResult = false;
        retrievedData    = false;
        retrievedCount   = false;
        cur = cr;   // save current chunk request

        BlitNoise();

        DispatchMC();

#if UNITY_ASYNC
        appendRequest = AsyncGPUReadback.Request(appendBuffer);
        argRequest    = AsyncGPUReadback.Request(argBuffer);
#else
        // this maybe can fail? should prob check for this case
        AsyncTextureReader.RequestBufferData(appendBuffer);
        AsyncTextureReader.RequestBufferData(argBuffer);
        needToFree = true;
#endif
    }
示例#15
0
    public void Launch(Texture density)
    {
        // can be either Texture3D or RenderTexture
        Debug.Assert(density.dimension == UnityEngine.Rendering.TextureDimension.Tex3D);
        // set compute shader references
        Graphics.ClearRandomWriteTargets(); // not sure if needed anymore
        appendBuffer.SetCounterValue(0);

        MarchingCubesCS.SetBuffer(kernelMC, ShaderProps.trianglesRW, appendBuffer);
        MarchingCubesCS.SetTexture(kernelMC, ShaderProps.densityTexture, density);

        MarchingCubesCS.Dispatch(kernelMC, resolution / 8, resolution / 8, resolution / 8);

        argBuffer.SetData(defaultArgs);

        // copy the counter variables from first buffer into second
        ComputeBuffer.CopyCount(appendBuffer, argBuffer, 0);

        AsyncTextureReader.RequestBufferData(appendBuffer);
        AsyncTextureReader.RequestBufferData(argBuffer);

        working = true;
        // launch coroutine to wait and update
    }
    public void Update()
    {
        if (free)
        {
            return;
        }


#if UNITY_ASYNC
        if (!retrievedData)
        {
            if (appendRequest.hasError)
            {
                Debug.Log("Append Request Error");
            }
            else if (appendRequest.done)
            {
                data          = appendRequest.GetData <float>();
                retrievedData = true;
            }
        }

        if (!retrievedCount)
        {
            if (argRequest.hasError)
            {
                Debug.Log("Arg Request Error");
            }
            else if (argRequest.done)
            {
                count          = argRequest.GetData <int>();
                retrievedCount = true;
            }
        }
#else
        if (!retrievedData)
        {
            AsyncTextureReader.Status status = AsyncTextureReader.RetrieveBufferData(appendBuffer, data);
            if (status == AsyncTextureReader.Status.Succeeded)
            {
                retrievedData = true;
            }
        }
        if (!retrievedCount)
        {
            AsyncTextureReader.Status status = AsyncTextureReader.RetrieveBufferData(argBuffer, count);
            if (status == AsyncTextureReader.Status.Succeeded)
            {
                retrievedCount = true;
            }
        }
#endif

        if (retrievedData && retrievedCount)
        {
            bool lastCheck = cur.lastCheck();
            if (!forgetNextResult && lastCheck)
            {
                cur.callback(BuildMeshData(), cur.id);
            }
            if (forgetNextResult)
            {
                Debug.Log("forgot last");
            }
            if (!lastCheck)
            {
                //Debug.Log("last check fail post");
            }

            free = true;
        }
    }
示例#17
0
    bool RenderLasers(int count, float angleStart, float angleUse)
    {
        bool pointCloudUpdated = false;

#if UNITY_EDITOR
        UnityEngine.Profiling.Profiler.BeginSample("Render Lasers");
#endif

        AsyncTextureReader <Vector2> reader = null;
        if (Available.Count == 0)
        {
            var texture = new RenderTexture(RenderTextureWidth, RenderTextureHeight, 24, RenderTextureFormat.RGFloat, RenderTextureReadWrite.Linear);
            reader = new AsyncTextureReader <Vector2>(texture);
        }
        else
        {
            reader = Available.Pop();
        }

        Camera.targetTexture = reader.Texture;
        Camera.RenderWithShader(Shader, "RenderType");
        reader.Start();

        var pos = Camera.transform.position;

        var topLeft     = Camera.ViewportPointToRay(new Vector3(0, 0, 1)).direction;
        var topRight    = Camera.ViewportPointToRay(new Vector3(1, 0, 1)).direction;
        var bottomLeft  = Camera.ViewportPointToRay(new Vector3(0, 1, 1)).direction;
        var bottomRight = Camera.ViewportPointToRay(new Vector3(1, 1, 1)).direction;

        int maxRayCount = (int)(2.0f * MaxAngle * RayCount / FieldOfView);
        var deltaX      = (topRight - topLeft) / count;
        var deltaY      = (bottomLeft - topLeft) / maxRayCount;

        int startRay = 0;
        var start    = topLeft;
        if (CenterAngle < 0.0f)
        {
            startRay = maxRayCount - RayCount;
        }

#if VISUALIZE_LIDAR_CAMERA_BOUNDING_BOX
        var a = start + deltaY * startRay;
        var b = a + deltaX * count;

        Debug.DrawLine(pos, pos + MaxDistance * a, Color.yellow, 1.0f, true);
        Debug.DrawLine(pos, pos + MaxDistance * b, Color.yellow, 1.0f, true);
        Debug.DrawLine(pos + MaxDistance * a, pos + MaxDistance * b, Color.yellow, 1.0f, true);

        a = start + deltaY * (startRay + RayCount);
        b = a + deltaX * count;

        Debug.DrawLine(pos, pos + MaxDistance * a, Color.magenta, 1.0f, true);
        Debug.DrawLine(pos, pos + MaxDistance * b, Color.magenta, 1.0f, true);
        Debug.DrawLine(pos + MaxDistance * a, pos + MaxDistance * b, Color.magenta, 1.0f, true);
#endif

        var req = new ReadRequest()
        {
            Reader      = reader,
            Count       = count,
            MaxRayCount = maxRayCount,
            StartRay    = startRay,
            Origin      = pos,
            Start       = start,
            DeltaX      = deltaX,
            DeltaY      = deltaY,
        };

        req.Reader.Update();
        if (req.Reader.Status == AsyncTextureReaderStatus.Finished)
        {
            pointCloudUpdated = true;
            ReadLasers(req);
            Available.Push(req.Reader);
        }
        else
        {
            Active.Add(req);
        }
#if UNITY_EDITOR
        UnityEngine.Profiling.Profiler.EndSample();
#endif
        return(pointCloudUpdated);
    }
示例#18
0
    public void Init()
    {
        switch (captureType)
        {
        case CaptureType.Capture:
            rtFormat    = RenderTextureFormat.ARGB32;
            rtReadWrite = RenderTextureReadWrite.sRGB;
            rtDepth     = 24;
            break;

        case CaptureType.Segmentation:
            rtFormat    = RenderTextureFormat.ARGB32;
            rtReadWrite = RenderTextureReadWrite.sRGB;
            rtDepth     = 24;
            break;

        case CaptureType.Depth:
            rtFormat    = RenderTextureFormat.ARGB32;
            rtReadWrite = RenderTextureReadWrite.Linear;
            rtDepth     = 24;
            break;

        default:
            break;
        }

        switch (resolutionType)
        {
        case ResolutionType.SD:
            videoWidth  = 640;
            videoHeight = 480;
            break;

        case ResolutionType.HD:
            videoWidth  = 1920;
            videoHeight = 1080;
            break;

        default:
            break;
        }

        RenderTexture activeRT = new RenderTexture(videoWidth, videoHeight, rtDepth, rtFormat, rtReadWrite)
        {
            dimension       = UnityEngine.Rendering.TextureDimension.Tex2D,
            antiAliasing    = 1,
            useMipMap       = false,
            useDynamicScale = false,
            wrapMode        = TextureWrapMode.Clamp,
            filterMode      = FilterMode.Bilinear
        };

        activeRT.name = captureType.ToString() + resolutionType.ToString();
        activeRT.Create();

        renderCam = GetComponent <Camera>();
        renderCam.targetTexture = activeRT;

        if (captureType == CaptureType.Segmentation)
        {
            SegmentColorer segColorer = FindObjectOfType <SegmentColorer>();
            if (segColorer != null)
            {
                renderCam.SetReplacementShader(segColorer.Shader, "SegmentColor"); // TODO needs to be local ref or manager?
                renderCam.backgroundColor = segColorer.SkyColor;                   // TODO needs to be local ref or manager?
                renderCam.clearFlags      = CameraClearFlags.SolidColor;
                renderCam.renderingPath   = RenderingPath.Forward;
            }
        }
        Reader = new AsyncTextureReader <byte>(renderCam.targetTexture);

        GetComponentInParent <CameraSettingsManager>().AddCamera(renderCam);

        // TODO better way
        if (sensorName == "Main Camera")
        {
            GetComponentInParent <RobotSetup>().MainCam = renderCam;
        }

        addUIElement();
    }
示例#19
0
    public void Init()
    {
        switch (captureType)
        {
        case CaptureType.Capture:
            rtFormat    = RenderTextureFormat.ARGB32;
            rtReadWrite = RenderTextureReadWrite.sRGB;
            rtDepth     = 24;
            break;

        case CaptureType.Segmentation:
            rtFormat    = RenderTextureFormat.ARGB32;
            rtReadWrite = RenderTextureReadWrite.sRGB;
            rtDepth     = 24;
            break;

        case CaptureType.Depth:
            rtFormat    = RenderTextureFormat.ARGB32;
            rtReadWrite = RenderTextureReadWrite.Linear;
            rtDepth     = 24;
            break;

        default:
            break;
        }

        switch (resolutionType)
        {
        case ResolutionType.SD:
            videoWidth  = 640;
            videoHeight = 480;
            break;

        case ResolutionType.HD:
            videoWidth  = 1920;
            videoHeight = 1080;
            break;

        default:
            break;
        }

        RenderTexture activeRT = new RenderTexture(videoWidth, videoHeight, rtDepth, rtFormat, rtReadWrite)
        {
            dimension       = UnityEngine.Rendering.TextureDimension.Tex2D,
            antiAliasing    = 1,
            useMipMap       = false,
            useDynamicScale = false,
            wrapMode        = TextureWrapMode.Clamp,
            filterMode      = FilterMode.Bilinear
        };

        activeRT.name = captureType.ToString() + resolutionType.ToString();
        activeRT.Create();

        renderCam = GetComponent <Camera>();
        renderCam.targetTexture = activeRT;

        Reader = new AsyncTextureReader <byte>(renderCam.targetTexture);

        GetComponentInParent <CameraSettingsManager>().AddCamera(renderCam);

        // TODO better way
        if (sensorName == "Main Camera")
        {
            GetComponentInParent <AgentSetup>().MainCam = renderCam;
        }

        addUIElement();
    }
示例#20
0
    // Update is called once per frame
    void Update()
    {
        if (operating)
        {
            if (!fulfilled)
            {
                PathCompute();
            }
            else if (!pathreturn)
            {
                if (!waitingForRetrieval)
                {
                    if (AtoB)
                    {
                        AsyncTextureReader.RequestBufferData(pathBufferB);
                    }
                    else
                    {
                        AsyncTextureReader.RequestBufferData(pathBufferA);
                    }
                    waitingForRetrieval = true;
                }
                else
                {
                    AsyncTextureReader.Status status;

                    if (AtoB)
                    {
                        status = AsyncTextureReader.RetrieveBufferData(pathBufferB, pathDataRaw);
                    }
                    else
                    {
                        status = AsyncTextureReader.RetrieveBufferData(pathBufferA, pathDataRaw);
                    }

                    if (status == AsyncTextureReader.Status.Succeeded)
                    {
                        Debug.Log(Time.realtimeSinceStartup - computestarttime);
                        waitingForRetrieval = false;
                        pathreturn          = true;

                        for (int i = 0; i < pathData.Length; i++)
                        {
                            pathData[i] = new pathInfo(System.BitConverter.ToUInt32(pathDataRaw, i * 16), System.BitConverter.ToInt32(pathDataRaw, i * 16 + 4), System.BitConverter.ToInt32(pathDataRaw, i * 16 + 8), System.BitConverter.ToUInt32(pathDataRaw, i * 16 + 12));
                            //debugArray[i].transform.localScale = new Vector3(1.0f, 1.0f, pathData[i].cost / 10.0f);
                        }

                        if (activePath == 1)
                        {
                            activePath = 0;
                        }
                        else
                        {
                            activePath = 1;
                        }

                        shader.SetInt("activePath", activePath);

                        operating = false;
                        //Debug.Log("hey now");
                    }
                }
            }
        }
        else
        {
            //edit
            if (edits.Count > 0)
            {
                while (edits.Count > 0)
                {
                    for (int i = 0; i < edits[0].blocks.Count; i++)
                    {
                        if (edits[0].blocks[i].x > 0 && edits[0].blocks[i].x < flowWidth && edits[0].blocks[i].y > 0 && edits[0].blocks[i].y < flowHeight)
                        {
                            difficultyData[edits[0].blocks[i].x + edits[0].blocks[i].y * flowWidth] = edits[0].newVal;
                            obstacles[edits[0].blocks[i].x, edits[0].blocks[i].y].SetActive(edits[0].newVal == 0u);
                        }
                    }
                    edits.RemoveAt(0);
                }

                difficultyBuffer.SetData(difficultyData);

                NewDest(lastTarget.x, lastTarget.y);
            }
        }
    }