private void Capture(int num) { string imageName = _camera.name + "_" + num; Dictionary <string, int> labelInstances = new Dictionary <string, int>(); // Call Screen Capture var screen = CaptureCamera.Capture(_camera, request => { string path = screenCapturePath + "/" + imageName + ".jpg"; // Convert the screen capture to a byte array Array image = CaptureImageEncoder.Encode(request.data.colorBuffer as Array, 640, 480, GraphicsFormat.R8G8B8A8_UNorm, CaptureImageEncoder.ImageFormat.Jpg, true); // Write the screen capture to a file var result = FileProducer.Write(path, image); // Wait for Async screen capture request to return and then log data point if (result) { labelInstances.Add("Cube", 100); labelInstances.Add("Sphere", 111); labelInstances.Add("Cylinder", 131); string temp = JsonConvert.SerializeObject(labelInstances); InstanceCount instanceCount = new InstanceCount(imageName, temp); // Log data point to file dataLogger.Log(instanceCount); return(AsyncRequest.Result.Completed); } return(AsyncRequest.Result.Error); }); }
public IEnumerator CaptureTest_CaptureColorAndDepthParametric(int depthBpp, GraphicsFormat renderTextureFormat, Action <AsyncRequest <CaptureCamera.CaptureState> > validator) { Debug.Assert(GraphicsUtilities.SupportsRenderTextureFormat(renderTextureFormat), "GraphicsFormat not supported"); var camera = SetupCameraTestWithMaterial(depthBpp, renderTextureFormat, new Vector3(0, 0, 1.0f)); var request = CaptureCamera.Capture ( camera, colorFunctor: AsyncRequest <CaptureCamera.CaptureState> .DontCare, depthFunctor: AsyncRequest <CaptureCamera.CaptureState> .DontCare, depthFormat: GraphicsUtilities.DepthFormatForDepth(depthBpp), forceFlip: ForceFlip.None ); camera.Render(); while (!request.completed) { yield return(null); } Debug.Assert(request.error == false, "Capture request had an error"); validator.Invoke(request); }
private void Capture(int num) { string imageName = _camera.name + "_" + num; // Define Data point object outside async call DataPoint dataPoint = new DataPoint(_cube.name, _cube.transform.rotation, simElapsed, imageName); // Call Screen Capture var screen = CaptureCamera.Capture(_camera, request => { string path = screenCapturePath + "/" + imageName + ".jpg"; // Convert the screen capture to a byte array Array image = CaptureImageEncoder.Encode(request.data.colorBuffer as Array, 640, 480, GraphicsFormat.R8G8B8A8_UNorm, CaptureImageEncoder.ImageFormat.Jpg, true); // Write the screen capture to a file var result = DXFile.Write(path, image); // Wait for Async screen capture request to return and then log data point if (result) { // Log data point to file dataLogger.Log(dataPoint); return(AsyncRequest.Result.Completed); } return(AsyncRequest.Result.Error); }); }
private AsyncRequestWrapper CaptureFrameWithLog( Camera camera, Unity.Simulation.Logger logger, string screenCapturePath, string frameFileNameRoot, int frameIndex ) { // Construct the output file name for the image. string frameFileBaseName = $"{frameFileNameRoot}_{frameIndex}"; string frameFilePath = $"{screenCapturePath}{Path.DirectorySeparatorChar}{frameFileBaseName}.jpg"; void LogData() { logger.Log(new CaptureFrameWithLogDataPoint(frameFileBaseName)); } // Write the frame entry to the log. Write the log line outside the request callback when the // execution context is threaded since threaded requests will be executed asynchronously. if (IsExecutionContextThreaded()) { LogData(); } var req = CaptureCamera.Capture( camera, request => { // Write the frame entry to the log. We can write the log line within the request callback when // the execution context is *not* threaded since they will be executed sequentially. if (!IsExecutionContextThreaded()) { LogData(); } // Get the color buffer data and convert it to an image file. byte[] imgColorData = (byte[])request.data.colorBuffer; byte[] imgFileData = (byte[])CaptureImageEncoder.EncodeArray( imgColorData, 32, 32, GraphicsFormat.R8G8B8A8_UNorm, CaptureImageEncoder.ImageFormat.Jpg ); // Attempt to write the image file to disk. bool fileWritten = FileProducer.Write(frameFilePath, imgFileData); return((fileWritten) ? AsyncRequest.Result.Completed : AsyncRequest.Result.Error); }, flipY: false ); return(new AsyncRequestWrapper(req, frameIndex)); }
AsyncRequest <CaptureCamera.CaptureState> CaptureDataForChannel(Camera camera, Channel channel, GraphicsFormat colorFormat, GraphicsFormat depthFormat, GraphicsFormat motionFormat) { Func <AsyncRequest <CaptureCamera.CaptureState>, AsyncRequest <CaptureCamera.CaptureState> .Result> functor = r => { return(AsyncRequest.Result.Completed); }; return(CaptureCamera.Capture ( camera, channel == Channel.Color ? functor : null, colorFormat, channel == Channel.Depth ? functor : null, depthFormat, channel == Channel.Motion ? functor : null, motionFormat, forceFlip: ForceFlip.None )); }
void CaptureRgbData(Camera cam) { Profiler.BeginSample("CaptureDataFromLastFrame"); if (!captureRgbImages) { return; } var captureFilename = Path.Combine(Manager.Instance.GetDirectoryFor(RgbDirectory), $"{s_RgbFilePrefix}{Time.frameCount}.png"); var dxRootPath = Path.Combine(RgbDirectory, $"{s_RgbFilePrefix}{Time.frameCount}.png"); SensorHandle.ReportCapture(dxRootPath, SensorSpatialData.FromGameObjects(m_EgoMarker == null ? null : m_EgoMarker.gameObject, gameObject), m_PersistentSensorData.Select(kvp => (kvp.Key, kvp.Value)).ToArray()); Func <AsyncRequest <CaptureCamera.CaptureState>, AsyncRequest.Result> colorFunctor; var width = cam.pixelWidth; var height = cam.pixelHeight; var flipY = ShouldFlipY(cam); colorFunctor = r => { using (s_WriteFrame.Auto()) { var dataColorBuffer = (byte[])r.data.colorBuffer; if (flipY) { FlipImageY(dataColorBuffer, height); } byte[] encodedData; using (s_EncodeAndSave.Auto()) { encodedData = ImageConversion.EncodeArrayToPNG(dataColorBuffer, GraphicsFormat.R8G8B8A8_UNorm, (uint)width, (uint)height); } return(!FileProducer.Write(captureFilename, encodedData) ? AsyncRequest.Result.Error : AsyncRequest.Result.Completed); } }; CaptureCamera.Capture(cam, colorFunctor); Profiler.EndSample(); }