Ejemplo n.º 1
0
 public void Enqueue(Texture source, int mipIndex, TextureFormat dstFormat, int layer = 0, Action <NativeArray <Color32> > callback = null)
 {
     if (EnqueueCheck())
     {
         Entries.Enqueue(new AsyncGPUReadbackRequestEntry <Color32>(AsyncGPUReadback.Request(source, mipIndex, dstFormat), layer, callback));
     }
 }
Ejemplo n.º 2
0
        private IEnumerator WriteTex(RenderTexture rt, bool alpha)
        {
            //Pull texture off of GPU
            var req = AsyncGPUReadback.Request(rt, 0, 0, rt.width, 0, rt.height, 0, 1, alpha ? TextureFormat.RGBA32 : TextureFormat.RGBAFloat);

            while (!req.done)
            {
                yield return(null);
            }

            RenderTexture.ReleaseTemporary(rt);
            string path = GetCaptureFilename();

            LogScreenshotMessage("Writing rendered screenshot to " + path.Substring(Paths.GameRootPath.Length));

            //Write raw pixel data to a file
            //Uses pngcs Unity fork: https://github.com/andrew-raphael-lukasik/pngcs
            if (alpha)
            {
                using (var buffer = req.GetData <Color32>())
                    yield return(PNG.WriteAsync(buffer.ToArray(), req.width, req.height, 8, true, false, path));
            }
            else
            {
                using (var buffer = req.GetData <Color>())
                    yield return(PNG.WriteAsync(buffer.ToArray(), req.width, req.height, 8, false, false, path));
            }
        }
Ejemplo n.º 3
0
        /// <summary>
        /// Request current contents of cameras shape texture. queue pattern inspired by: https://github.com/keijiro/AsyncCaptureTest
        /// </summary>
        void EnqueueReadbackRequest(RenderTexture target, int lodIndex, LodTransform.RenderData renderData, float previousFrameTime)
        {
            if (!_doReadback)
            {
                return;
            }

            var lodData = _perLodData[renderData._texelWidth];

            // only queue up requests while time is advancing
            if (previousFrameTime <= lodData._resultData._time)
            {
                return;
            }

            if (lodData._requests.Count < MAX_REQUESTS)
            {
                lodData._requests.Enqueue(
                    new ReadbackRequest
                {
                    _request    = AsyncGPUReadback.Request(target, 0, 0, target.width, 0, target.height, lodIndex, 1),
                    _renderData = renderData,
                    _time       = previousFrameTime,
                }
                    );
            }
        }
Ejemplo n.º 4
0
        public AsyncGPUReadbackRequest FromCubeMapAsync(Cubemap cubemap, System.Action <Vector4[]> callback)
        {
            var shcBuffer = new ComputeBuffer(GROUP_X * GROUP_Y * SHC_COUNT, 16);

            computeShader.SetTexture(0, "CubeMap", cubemap);
            computeShader.SetBuffer(0, "shcBuffer", shcBuffer);
            computeShader.SetInts("SampleSize", SAMPLE_SIZE_X, SAMPLE_SIZE_Y);
            computeShader.Dispatch(0, GROUP_X, GROUP_Y, 1);
            return(AsyncGPUReadback.Request(shcBuffer, (req) => {
                if (req.hasError)
                {
                    Debug.LogError("sh project with gpu error");
                    shcBuffer.Release();
                    callback(null);
                    return;
                }
                var groupShc = req.GetData <Vector4>();
                var count = groupShc.Length / SHC_COUNT;
                var shc = new Vector4[SHC_COUNT];
                for (var i = 0; i < count; i++)
                {
                    for (var offset = 0; offset < SHC_COUNT; offset++)
                    {
                        shc[offset] += groupShc[i * SHC_COUNT + offset];
                    }
                }
                shcBuffer.Release();
                callback(shc);
            }));
        }
Ejemplo n.º 5
0
    private void OnRenderImage(RenderTexture source, RenderTexture destination)
    {
        if (!_isRecording.Value)
        {
            Graphics.Blit(source, destination);
            return;
        }

        _time += Time.unscaledDeltaTime;
        if (_time >= _timePerFrame)
        {
            _time -= _timePerFrame;

            if (_requests.Count < 8)
            {
                _requests.Enqueue(AsyncGPUReadback.Request(source));
            }
            else
            {
                Debug.Log("Too many requests.");
            }
            _frameCount++;
        }
        Graphics.Blit(source, destination);
    }
Ejemplo n.º 6
0
        void ReleaseSenderObjects()
        {
            // Total synchronization: This may cause a frame hiccup, but it's
            // needed to dispose the readback buffers safely.
            AsyncGPUReadback.WaitAllRequests();

            // Game view capture method: Leave the sender instance without
            // disposing (we're not the owner) but synchronize it. It's needed to
            // dispose the readback buffers safely too.
            if (SharedInstance.IsGameViewSend(_send))
            {
                _send.SendVideoAsync(); // Sync by null-send
                _send = null;
            }

            // Private objet disposal
            _send?.Dispose();
            _send = null;

            _pool?.Dispose();
            _pool = null;

            _converter?.Dispose();
            _converter = null;

            // We don't dispose _onReadback because it's reusable.
        }
Ejemplo n.º 7
0
    /// <summary>
    /// 回调方式
    /// </summary>
    /// <returns></returns>
    IEnumerator Test()
    {
        Debug.LogError("one");
        while (true)
        {
            //yield return new WaitForSeconds(1);
            yield return(new WaitForEndOfFrame());

            var rt = RenderTexture.GetTemporary(Screen.width, Screen.height, 0, RenderTextureFormat.ARGB32);
            Camera.main.targetTexture = rt;
            RenderTexture.active      = rt;
            //ScreenCapture.CaptureScreenshotIntoRenderTexture(rt);
            RequestTime = new System.Diagnostics.Stopwatch();
            RequestTime.Start();
            ResponseTime = new System.Diagnostics.Stopwatch();
            ResponseTimeList.Add(ResponseTime);
            ResponseTime.Start();
            AsyncGPUReadback.Request(rt, 0, TextureFormat.ARGB32, OnCompleteReadback);
            RequestTime.Stop();
            Debug.LogError("当前帧" + curRequestIndex + "reqeust time :" + RequestTime.ElapsedTicks / 10000.0f + "ms");
            RenderTexture.active = null;
            if (curRequestIndex == 99)
            {
                Camera.main.targetTexture = null;
            }
            RenderTexture.ReleaseTemporary(rt);
            curRequestIndex++;
            if (curRequestIndex == 100)
            {
                yield break;
            }
        }
    }
Ejemplo n.º 8
0
 private void UpdateMesh()
 {
     SeaComputeShader.Dispatch(_updateKernelHandle, _verticesPerSide, _verticesPerSide, 1);
     AsyncGPUReadback.Request(_verticesBuffer, SetPoints);
     _mesh.RecalculateNormals();
     _normals = _mesh.normals;
 }
Ejemplo n.º 9
0
        /// <summary>
        /// </summary>
        /// <param name="src">RenderTexture to capture.</param>
        /// <param name="functor">Completion functor for handling the captured data. The object passed is a byte[] of the captured data.</param>
        /// <returns>AsyncRequest&lt;object&gt;</returns>
        public static AsyncRequest <object> Capture(RenderTexture src, Func <AsyncRequest <object>, AsyncRequest <object> .Result> functor = null)
        {
            var req = Manager.Instance.CreateRequest <AsyncRequest <object> >();

#if !UNITY_2019_2_OR_NEWER && (PLATFORM_STANDALONE_OSX || UNITY_EDITOR)
            req.data = GraphicsUtilities.GetPixelsSlow(src as RenderTexture);
            req.Enqueue(functor);
            req.Execute();
#else
            if (GraphicsUtilities.SupportsAsyncReadback())
            {
                AsyncGPUReadback.Request(src, 0, (AsyncGPUReadbackRequest request) =>
                {
                    if (request.hasError)
                    {
                        req.error = true;
                    }
                    else
                    {
                        req.data = request.GetData <byte>().ToArray();
                        req.Enqueue(functor);
                        req.Execute();
                    }
                });
            }
            else
            {
                req.data = GraphicsUtilities.GetPixelsSlow(src as RenderTexture);
                req.Enqueue(functor);
                req.Execute();
            }
#endif
            return(req);
        }
Ejemplo n.º 10
0
        /// <summary>
        /// Perform async read back from the provided source texture.
        /// </summary>
        /// <param name="src">Texture source to be used for the read back.</param>
        /// <param name="mipIndex">Index of the mipmap to be fetched.</param>
        /// <param name="functor">Functor that will be invoked after the async read back request is complete.</param>
        /// <typeparam name="T">Type for the destination data buffer.</typeparam>
        /// <returns>Returns an AsyncRequest</returns>
        public static AsyncRequest <object> Capture <T>(Texture src, int mipIndex = 0, Func <AsyncRequest <object>, AsyncRequest <object> .Result> functor = null) where T : struct
        {
            var req = Manager.Instance.CreateRequest <AsyncRequest <object> >();

            if (GraphicsUtilities.SupportsAsyncReadback())
            {
                AsyncGPUReadback.Request(src, mipIndex, (AsyncGPUReadbackRequest request) =>
                {
                    req.error = request.hasError;
                    if (!request.hasError)
                    {
                        req.data = request.GetData <T>().ToArray();
                        req.Enqueue(functor);
                        req.Execute();
                    }
                });
            }
            else
            {
                req.data = GraphicsUtilities.GetPixelsSlow(src as RenderTexture);
                req.Enqueue(functor);
                req.Execute();
            }

            return(req);
        }
Ejemplo n.º 11
0
 public void IssueRead()
 {
     if (this.asyncRequests.Count < 10)
     {
         this.asyncRequests.Enqueue(AsyncGPUReadback.Request(this.resultTexture, 0, null));
     }
 }
Ejemplo n.º 12
0
        /// <summary>
        /// Perform async read back from the provided compute buffer with size and offset.
        /// </summary>
        /// <param name="src">Compute buffer source to be used for the read back.</param>
        /// <param name="size">Size in bytes of the data to be retrieved from the ComputeBuffer.</param>
        /// <param name="offset">Offset in bytes in the ComputeBuffer.</param>
        /// <param name="functor">Functor that will be invoked after the async read back request is complete.</param>
        /// <typeparam name="T">Type for the destination data buffer.</typeparam>
        /// <returns>Returns an AsyncRequest</returns>
        public static AsyncRequest <object> Capture <T>(ComputeBuffer src, int size, int offset, Func <AsyncRequest <object>, AsyncRequest <object> .Result> functor = null) where T : struct
        {
            var req = Manager.Instance.CreateRequest <AsyncRequest <object> >();

            if (GraphicsUtilities.SupportsAsyncReadback())
            {
                AsyncGPUReadback.Request(src, size, offset, (AsyncGPUReadbackRequest request) =>
                {
                    req.error = request.hasError;
                    if (!request.hasError)
                    {
                        req.data = request.GetData <T>().ToArray();
                        req.Enqueue(functor);
                        req.Execute();
                    }
                });
            }
            else
            {
                T[] dst = new T[size];
                src.GetData(dst, offset, offset, size);

                req.data = dst;
                req.Enqueue(functor);
                req.Execute();
            }

            return(req);
        }
Ejemplo n.º 13
0
        public void Capture(CaptureTask task)
        {
            switch (task.BlendMode)
            {
            case BlendMode.RGBOnly:
                CommitResult(m_RGBOrigin, task);
                break;

            case BlendMode.VirtualOnly:
                m_Requests.Enqueue(AsyncGPUReadback.Request(VirtualTexture));
                m_Tasks.Enqueue(task);
                break;

            case BlendMode.Blend:
                m_Requests.Enqueue(AsyncGPUReadback.Request(BlendTexture));
                m_Tasks.Enqueue(task);
                break;

            case BlendMode.WidescreenBlend:
                // Do not support..
                Debug.Log("Do not support WidescreenBlend mode now...");
                break;

            default:
                break;
            }
        }
Ejemplo n.º 14
0
 public void Enqueue(Texture source, int mipIndex, int x, int width, int y, int height, int z, int depth, int layer = 0, Action <NativeArray <Color32> > callback = null)
 {
     if (EnqueueCheck())
     {
         Entries.Enqueue(new AsyncGPUReadbackRequestEntry <Color32>(AsyncGPUReadback.Request(source, mipIndex, x, width, y, height, z, depth), layer, callback));
     }
 }
Ejemplo n.º 15
0
    void CheckCapture()
    {
        if (Time.time >= NextCaptureTime)
        {
            //print("Camera Render");

            SensorCamera.Render();

            NativeArray <byte> gpuData;
            while (AvailableGpuDataArrays.TryTake(out gpuData) && gpuData.Length != Width * Height * 4)
            {
                gpuData.Dispose();
            }
            if (!gpuData.IsCreated)
            {
                gpuData = new NativeArray <byte>(Width * Height * 4, Allocator.Persistent);
            }

            var capture = new CameraCapture()
            {
                GpuData     = gpuData,
                CaptureTime = Time.time,
            };
            capture.Request = AsyncGPUReadback.RequestIntoNativeArray(ref capture.GpuData, SensorCamera.targetTexture, 0, TextureFormat.RGBA32);
            // TODO: Replace above AsyncGPUReadback.Request with following AsyncGPUReadback.RequestIntoNativeArray when we upgrade to Unity 2020.1
            // See https://issuetracker.unity3d.com/issues/asyncgpureadback-dot-requestintonativearray-crashes-unity-when-trying-to-request-a-copy-to-the-same-nativearray-multiple-times
            // for the detaisl of the bug in Unity.
            //capture.Request = AsyncGPUReadback.RequestIntoNativeArray(ref capture.GpuData, Distorted ? DistortedTexture : SensorCamera.targetTexture, 0, TextureFormat.RGBA32);
            CaptureQueue.Enqueue(capture);

            NextCaptureTime = Time.time + (1.0f / Frequency);
        }
    }
Ejemplo n.º 16
0
    public void Generate()
    {
        int sizeOfChunk = chunkSize + 2;

        pointsBuffer.SetCounterValue(0);
        faceInfoBuffer.SetCounterValue(0);
        kernel = shader.FindKernel("Basic");

        faceInfoBuffer.SetData(clearArray);

        shader.SetBuffer(kernel, "points", pointsBuffer);
        shader.SetInt("size_of_chunk", sizeOfChunk);
        shader.SetInt("height_of_chunk", chunkHeight + 2);
        shader.SetVector("offset", new Vector3(chunkOffset.x, chunkOffset.y, chunkOffset.z));
        var groups      = Mathf.CeilToInt(sizeOfChunk / 8.0f);
        var heightGroup = Mathf.CeilToInt(chunkHeight / 8.0f);

        shader.Dispatch(kernel, groups, heightGroup, groups);
        kernel = geomShader.FindKernel("Gen");
        geomShader.SetBuffer(kernel, "face_info", faceInfoBuffer);
        geomShader.SetBuffer(kernel, "points", pointsBuffer);
        geomShader.SetInt("size_of_chunk", sizeOfChunk);
        geomShader.SetInt("height_of_chunk", chunkHeight + 2);
        geomShader.Dispatch(kernel, groups, heightGroup, groups);

        request = AsyncGPUReadback.Request(faceInfoBuffer);
        state   = State.Loading;

        if (destroyed)
        {
            ClearBuffers();
        }
    }
Ejemplo n.º 17
0
        void QueueFrame(RenderTexture source)
        {
            if (_frameQueue.Count > 3)
            {
                Debug.LogWarning("Too many GPU readback requests.");
                return;
            }

            // Return the old render texture to the pool.
            if (_converted != null)
            {
                RenderTexture.ReleaseTemporary(_converted);
            }

            // Allocate a new render texture.
            _converted = RenderTexture.GetTemporary(
                source.width / 2, (_alphaSupport ? 3 : 2) * source.height / 2, 0,
                RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear
                );

            // Apply the conversion shader.
            Graphics.Blit(source, _converted, _material, _alphaSupport ? 1 : 0);

            // Request readback.
            _frameQueue.Enqueue(new Frame {
                width    = source.width, height = source.height,
                alpha    = _alphaSupport,
                readback = AsyncGPUReadback.Request(_converted)
            });
        }
Ejemplo n.º 18
0
 public void QueueTexture(Texture source)
 {
     this.frameQueue.Enqueue(new FrameData()
     {
         readback = AsyncGPUReadback.Request(source)
     });
 }
Ejemplo n.º 19
0
        public IEnumerator SemanticSegmentationPass_WithEmptyFrame_ProducesSky([Values(false, true)] bool showVisualizations)
        {
            int timesSegmentationImageReceived = 0;
            var expectedPixelValue             = k_SkyValue;

            void OnSegmentationImageReceived(NativeArray <Color32> data)
            {
                timesSegmentationImageReceived++;
                CollectionAssert.AreEqual(Enumerable.Repeat(expectedPixelValue, data.Length), data.ToArray());
            }

            var cameraObject = SetupCameraSemanticSegmentation(a => OnSegmentationImageReceived(a.data), showVisualizations, expectedPixelValue);

            //TestHelper.LoadAndStartRenderDocCapture(out var gameView);
            yield return(null);

            var segLabeler = (SemanticSegmentationLabeler)cameraObject.GetComponent <PerceptionCamera>().labelers[0];
            var request    = AsyncGPUReadback.Request(segLabeler.targetTexture, callback: r =>
            {
                CollectionAssert.AreEqual(Enumerable.Repeat(expectedPixelValue, segLabeler.targetTexture.width * segLabeler.targetTexture.height), r.GetData <Color32>());
            });

            AsyncGPUReadback.WaitAllRequests();

            //RenderDoc.EndCaptureRenderDoc(gameView);

            //request.WaitForCompletion();
            Assert.IsTrue(request.done);
            Assert.IsFalse(request.hasError);

            //destroy the object to force all pending segmented image readbacks to finish and events to be fired.
            DestroyTestObject(cameraObject);
            Assert.AreEqual(1, timesSegmentationImageReceived);
        }
Ejemplo n.º 20
0
 public void QueueBuffer(ComputeBuffer source)
 {
     this.frameQueue.Enqueue(new FrameData()
     {
         readback = AsyncGPUReadback.Request(source)
     });
 }
Ejemplo n.º 21
0
        /// <summary>
        /// Capture frame coroutine implementation.
        /// </summary>
        private IEnumerator CaptureFrameAsync()
        {
            isCapturingFrame = true;

            yield return(new WaitForEndOfFrame());

#if UNITY_2018_3_OR_NEWER
            if (supportsAsyncGPUReadback)
            {
                // use async gpu readback if possibile
                requestQueue.Enqueue(AsyncGPUReadback.Request(outputTexture));
            }
            else
            {
                CopyFrameTexture();
                yield return(null);

                EnqueueFrameTexture();
            }
#else
            CopyFrameTexture();
            yield return(null);

            EnqueueFrameTexture();
#endif

            if (screenshotStarted && !screenshotSequence)
            {
                screenshotStarted = false;
            }
            isCapturingFrame = false;
        }
Ejemplo n.º 22
0
        protected override bool ProcessNode(CommandBuffer cmd)
        {
            if (texture == null)
            {
                return(false);
            }

            int pixelX = (int)(texture.width * uv.x);
            int pixelY = (int)(texture.height * uv.y);

            pixelX = Mathf.Clamp(pixelX, 0, texture.width - 1);
            pixelY = Mathf.Clamp(pixelY, 0, texture.height - 1);


            if (texture is Texture2D t)
            {
                output = t.GetPixel(pixelX, pixelY);
            }
            else if (texture is RenderTexture rt)
            {
                // TODO: command buffer read pixels
                // This seems to not be working while the CRTs are processes :(
                int depth   = texture.dimension == TextureDimension.Cube ? 6 : 1;
                var request = AsyncGPUReadback.Request(texture, 0, 0, texture.width, 0, texture.height, 0, depth, (r) => {
                    ReadPixel(r);
                });

                request.Update();

                request.WaitForCompletion();
            }
            // TODO: texture 3D and cubemaps with GPU async readback

            return(true);
        }
Ejemplo n.º 23
0
    public void Start()
    {
        Debug.Assert(Status != AsyncTextureReaderStatus.Reading);

        if (Type == ReadType.None)
        {
            return;
        }
        else if (Type == ReadType.Native)
        {
            NativeReadRequest = AsyncGPUReadback.Request(Texture, 0, NativeReadFormat);
        }
        else if (Type == ReadType.LinuxOpenGL)
        {
            if (LinuxId >= 0)
            {
                unsafe
                {
                    AsyncTextureReaderImports.AsyncTextureReaderStart(LinuxId, new IntPtr(Data.GetUnsafePtr()));
                }
                GL.IssuePluginEvent(LinuxUpdate, LinuxId);
            }
        }

        Status = AsyncTextureReaderStatus.Reading;
    }
Ejemplo n.º 24
0
        public static void SaveTexture2DToDisk(string path, Texture2D texture)
        {
            Assert.ArgumentNotNullOrEmptry (path, nameof (path));
            Assert.ArgumentNotNull (texture, nameof (texture));
            Assert.ArgumentTrue (!path.IsDirectory (), $"Path ({path}) is directory");

            var extension = Path.GetExtension (path);

            Assert.ArgumentTrue (IsSupportedExtension (extension),
                                 $"Unsupported extension: {extension}. Full path: {path}\nSupported Extensions: .png, .jpg, .jpeg, .tga");

            var request = AsyncGPUReadback.Request (texture, 0, TextureFormat.RGBA32);
            request.WaitForCompletion ();

            var buffer = request.GetData<Color32> ();
            var newTexture = new Texture2D (texture.width, texture.height, TextureFormat.RGBA32, false);
            newTexture.SetPixels32 (buffer.ToArray());
                    
            byte[] data = null;

            if ( extension == ".tga" )
                data = newTexture.EncodeToTGA ();
            else if ( extension == ".jpeg" || extension == ".jpg" )
                data = newTexture.EncodeToJPG (100);
            else if ( extension == ".png" )
                data = newTexture.EncodeToPNG ();

            File.WriteAllBytes (path, data);
                
            
        }
Ejemplo n.º 25
0
    private void DispatchConvertFrame(Av1Frame av1Frame, GpuJob gpuJob, Texture2D texture2D)
    {
        _stopwatch.Restart();
        Marshal.Copy(av1Frame.Picture._data[0], _lumaBytes, 0, 1920 * 1080);
        gpuJob.LumaBuffer.SetData(_lumaBytes);
        computeShader.SetBuffer(_kernel, "lumaBuffer", gpuJob.LumaBuffer);

        Marshal.Copy(av1Frame.Picture._data[1], _uBytes, 0, 1920 * 1080 / 4);
        gpuJob.UBuffer.SetData(_uBytes);
        computeShader.SetBuffer(_kernel, "uBuffer", gpuJob.UBuffer);

        Marshal.Copy(av1Frame.Picture._data[2], _vBytes, 0, 1920 * 1080 / 4);
        gpuJob.VBuffer.SetData(_vBytes);
        computeShader.SetBuffer(_kernel, "vBuffer", gpuJob.VBuffer);

        _stopwatch.Stop();
        Debug.Log($"Copy {_stopwatch.ElapsedMilliseconds}");
        _stopwatch.Restart();

        computeShader.SetBuffer(_kernel, "rgbaBuffer", gpuJob.RgbaBuffer);

        _stopwatch.Stop();
        Debug.Log($"Copy {_stopwatch.ElapsedMilliseconds}");
        _stopwatch.Restart();

        computeShader.Dispatch(_kernel, 1920 / 4 / 8, 1080 / 8, 1);

        _stopwatch.Stop();
        Debug.Log($"Dispatch {_stopwatch.ElapsedMilliseconds}");
        _stopwatch.Restart();

        gpuJob.request   = AsyncGPUReadback.Request(gpuJob.RgbaBuffer);
        gpuJob.texture2D = texture2D;
        _workingGpuJobs.Add(gpuJob);
    }
Ejemplo n.º 26
0
    public async UniTask <VideoPacket> GetVideo()
    {
        await UniTask.WaitForEndOfFrame();

        var request = AsyncGPUReadback.Request(localRenderTexture);
        await UniTask.WaitUntil(() => request.done);

        rawData = request.GetData <Color32>().ToArray();

        await UniTask.SwitchToThreadPool();

        await UniTask.Run(() =>
        {
            jpgData = ImageConversion.EncodeArrayToJPG(rawData, GraphicsFormat.R8G8B8A8_UNorm, width, height);
        });

        byte[] data = jpgData;

        VideoPacket packet = new VideoPacket();

        packet.Id        = ConfigManager.LOCAL_ID;
        packet.Width     = width;
        packet.Height    = height;
        packet.Timestamp = ConvertDateTimeToLong(DateTime.Now);
        packet.Data      = data;

        return(packet);
    }
Ejemplo n.º 27
0
        public void ReadPixels(Texture2D texture, Action <Texture2D> postReadAction)
        {
            var renderTarget = GetRenderTarget(texture);

            AsyncGPUReadback.Request(src: renderTarget, mipIndex: 0, dstFormat: TextureFormat.ARGB32, callback: request => {
                if (request.hasError)
                {
                    Debug.LogError("Error reading pixels from the RenderTexture");
                }
                else
                {
                    var requestedData = request.GetData <byte>();

                    var rawTexture = texture.GetRawTextureData <byte>();

                    rawTexture.CopyFrom(requestedData);

                    texture.Apply();
                    texture.Compress(false);

                    postReadAction(texture);
                }

                IsReady = true;
            });
        }
Ejemplo n.º 28
0
        void QueueFrame(Texture source)
        {
            if (_readbackQueue.Count > 6)
            {
                Debug.LogWarning("Too many GPU readback requests.");
                return;
            }

            // Lazy initialization of the preprocessing blit shader
            if (_blitMaterial == null)
            {
                var shader = Shader.Find("Hidden/FFmpegOut/Preprocess");
                _blitMaterial = new Material(shader);
            }

            // Blit to a temporary texture and request readback on it.
            var rt = RenderTexture.GetTemporary
                         (source.width, source.height, 0, RenderTextureFormat.ARGB32);

            Graphics.Blit(source, rt, _blitMaterial, 0);

            var platform = UnityEngine.Application.platform;

            if (platform == UnityEngine.RuntimePlatform.OSXPlayer || platform == UnityEngine.RuntimePlatform.LinuxPlayer)
            {
                _readbackQueue.Add(AsyncGPUReadback.Request(rt, 0, TextureFormat.ARGB32));
            }
            else
            {
                _readbackQueue.Add(AsyncGPUReadback.Request(rt));
            }

            RenderTexture.ReleaseTemporary(rt);
        }
Ejemplo n.º 29
0
        /// <summary>
        /// Waits in a coroutine for the GPU to complete uploading the timelapse image, and
        /// then starts a background task to save it.
        /// </summary>
        /// <param name="rt">The texture where the timelapse image was rendered.</param>
        /// <param name="savePath">The path to save the image.</param>
        /// <param name="preview">true if the image is a colony preview.</param>
        private static System.Collections.IEnumerator TimelapseCoroutine(RenderTexture rt,
                                                                         string savePath, bool preview)
        {
            int width = rt.width, height = rt.height;

            if (width > 0 && height > 0)
            {
                var request = AsyncGPUReadback.Request(rt, 0);
                // Wait for texture to be read back from the GPU
                while (!request.done)
                {
                    yield return(null);
                }
                if (request.hasError)
                {
                    PUtil.LogWarning("Error saving background timelapse image!");
                    var oldRT = RenderTexture.active;
                    RenderTexture.active = rt;
                    Game.Instance.timelapser.WriteToPng(rt);
                    RenderTexture.active = oldRT;
                }
                else
                {
                    byte[] rawARGB = request.GetData <byte>().ToArray();
                    if (rawARGB != null)
                    {
                        BackgroundTimelapser.Instance.Start(savePath, TextureToPNG(rawARGB,
                                                                                   width, height), preview);
                    }
                }
            }
        }
Ejemplo n.º 30
0
        private IEnumerator CameraWorker()
        {
            var waitForSeconds = new WaitForSeconds(UpdatePeriod * adjustCapturingRate);

            while (true)
            {
                cam.enabled = true;

                cam.Render();

                var readback = AsyncGPUReadback.Request(cam.targetTexture, 0, readbackDstFormat);

                yield return(new WaitUntil(() => readback.done));

                cam.enabled = false;

                if (readback.hasError)
                {
                    Debug.LogError("Failed to read GPU texture");
                    continue;
                }
                // Debug.Assert(request.done);

                camData.SetTextureData(readback.GetData <byte>());

                if (parameters.save_enabled)
                {
                    var saveName = name + "_" + Time.time;
                    camData.SaveRawImageData(parameters.save_path, saveName);
                    // Debug.LogFormat("{0}|{1} captured", parameters.save_path, saveName);
                }

                yield return(waitForSeconds);
            }
        }