private void Capture(int num) { string imageName = _camera.name + "_" + num; Dictionary <string, int> labelInstances = new Dictionary <string, int>(); // Call Screen Capture var screen = CaptureCamera.Capture(_camera, request => { string path = screenCapturePath + "/" + imageName + ".jpg"; // Convert the screen capture to a byte array Array image = CaptureImageEncoder.Encode(request.data.colorBuffer as Array, 640, 480, GraphicsFormat.R8G8B8A8_UNorm, CaptureImageEncoder.ImageFormat.Jpg, true); // Write the screen capture to a file var result = FileProducer.Write(path, image); // Wait for Async screen capture request to return and then log data point if (result) { labelInstances.Add("Cube", 100); labelInstances.Add("Sphere", 111); labelInstances.Add("Cylinder", 131); string temp = JsonConvert.SerializeObject(labelInstances); InstanceCount instanceCount = new InstanceCount(imageName, temp); // Log data point to file dataLogger.Log(instanceCount); return(AsyncRequest.Result.Completed); } return(AsyncRequest.Result.Error); }); }
private void Capture(int num) { string imageName = _camera.name + "_" + num; // Define Data point object outside async call DataPoint dataPoint = new DataPoint(_cube.name, _cube.transform.rotation, simElapsed, imageName); // Call Screen Capture var screen = CaptureCamera.Capture(_camera, request => { string path = screenCapturePath + "/" + imageName + ".jpg"; // Convert the screen capture to a byte array Array image = CaptureImageEncoder.Encode(request.data.colorBuffer as Array, 640, 480, GraphicsFormat.R8G8B8A8_UNorm, CaptureImageEncoder.ImageFormat.Jpg, true); // Write the screen capture to a file var result = DXFile.Write(path, image); // Wait for Async screen capture request to return and then log data point if (result) { // Log data point to file dataLogger.Log(dataPoint); return(AsyncRequest.Result.Completed); } return(AsyncRequest.Result.Error); }); }
private AsyncRequestWrapper CaptureFrameWithLog( Camera camera, Unity.Simulation.Logger logger, string screenCapturePath, string frameFileNameRoot, int frameIndex ) { // Construct the output file name for the image. string frameFileBaseName = $"{frameFileNameRoot}_{frameIndex}"; string frameFilePath = $"{screenCapturePath}{Path.DirectorySeparatorChar}{frameFileBaseName}.jpg"; void LogData() { logger.Log(new CaptureFrameWithLogDataPoint(frameFileBaseName)); } // Write the frame entry to the log. Write the log line outside the request callback when the // execution context is threaded since threaded requests will be executed asynchronously. if (IsExecutionContextThreaded()) { LogData(); } var req = CaptureCamera.Capture( camera, request => { // Write the frame entry to the log. We can write the log line within the request callback when // the execution context is *not* threaded since they will be executed sequentially. if (!IsExecutionContextThreaded()) { LogData(); } // Get the color buffer data and convert it to an image file. byte[] imgColorData = (byte[])request.data.colorBuffer; byte[] imgFileData = (byte[])CaptureImageEncoder.EncodeArray( imgColorData, 32, 32, GraphicsFormat.R8G8B8A8_UNorm, CaptureImageEncoder.ImageFormat.Jpg ); // Attempt to write the image file to disk. bool fileWritten = FileProducer.Write(frameFilePath, imgFileData); return((fileWritten) ? AsyncRequest.Result.Completed : AsyncRequest.Result.Error); }, flipY: false ); return(new AsyncRequestWrapper(req, frameIndex)); }
void Test_Color_Orientation(Camera camera, GraphicsFormat format, byte[] data, string label, string path) { var colors = ArrayUtilities.Cast <Color32>(data); var length = ArrayUtilities.Count <Color32>(colors); var top = (Color)colors[length - 1]; var bot = (Color)colors[0]; // HDRP has all sorts of effects that alter color significantly, so we compare differently. if (SRPSupport.GetCurrentPipelineRenderingType() == RenderingPipelineType.URP || SRPSupport.GetCurrentPipelineRenderingType() == RenderingPipelineType.HDRP) { var failed = !ColorCloser(top, Color.red, Color.black) || !ColorCloser(bot, Color.black, Color.red); if (failed) { Log.I($"{label} : Failed: Colors top {top.ToString()} bottom {bot.ToString()}"); } var texture = ConvertToRGBATexture(format, data, camera.pixelWidth, camera.pixelHeight); File.WriteAllBytes(CaptureImageEncoder.EnforceFileExtension(failed ? path + "_FAILED" : path, CaptureImageEncoder.ImageFormat.Jpg), texture.EncodeToJPG()); Assert.True(ColorCloser(top, Color.red, Color.black)); Assert.True(ColorCloser(bot, Color.black, Color.red)); } else { var tolerance = 0.01f; var failed = !CompareColors(top, Color.red, tolerance) || !CompareColors(bot, Color.black, tolerance); if (failed) { Log.I($"{label} : Failed: Colors top {top.ToString()} bottom {bot.ToString()}"); } var texture = ConvertToRGBATexture(format, data, camera.pixelWidth, camera.pixelHeight); File.WriteAllBytes(CaptureImageEncoder.EnforceFileExtension(failed ? path + "_FAILED" : path, CaptureImageEncoder.ImageFormat.Jpg), texture.EncodeToJPG()); Assert.True(CompareColors(top, Color.red, tolerance)); Assert.True(CompareColors(bot, Color.black, tolerance)); } }
void Test_Depth_Orientation_ushort(Camera camera, GraphicsFormat format, byte[] data, string label, string path) { var depth = ArrayUtilities.Cast <ushort>(data); var length = ArrayUtilities.Count <ushort>(depth); // Depth is laid out left to right, bottom to top. var top = CalculateAverageDepthForLine(depth, camera.pixelWidth, camera.pixelHeight - 1); var bot = CalculateAverageDepthForLine(depth, camera.pixelWidth, 0); // Top should be closer, and both planes should have depth info. var condition = top < bot && top > 0 && top < ushort.MaxValue && bot > 0 && bot < ushort.MaxValue; var texture = ConvertToRGBATexture(format, data, camera.pixelWidth, camera.pixelHeight); File.WriteAllBytes(CaptureImageEncoder.EnforceFileExtension(condition ? path : path + "_FAILED", CaptureImageEncoder.ImageFormat.Jpg), texture.EncodeToJPG()); Log.I($"{label} : Average depth top({top}) bottom({bot})"); Assert.True(condition); }