private void Capture(int num) { string imageName = _camera.name + "_" + num; Dictionary <string, int> labelInstances = new Dictionary <string, int>(); // Call Screen Capture var screen = CaptureCamera.Capture(_camera, request => { string path = screenCapturePath + "/" + imageName + ".jpg"; // Convert the screen capture to a byte array Array image = CaptureImageEncoder.Encode(request.data.colorBuffer as Array, 640, 480, GraphicsFormat.R8G8B8A8_UNorm, CaptureImageEncoder.ImageFormat.Jpg, true); // Write the screen capture to a file var result = FileProducer.Write(path, image); // Wait for Async screen capture request to return and then log data point if (result) { labelInstances.Add("Cube", 100); labelInstances.Add("Sphere", 111); labelInstances.Add("Cylinder", 131); string temp = JsonConvert.SerializeObject(labelInstances); InstanceCount instanceCount = new InstanceCount(imageName, temp); // Log data point to file dataLogger.Log(instanceCount); return(AsyncRequest.Result.Completed); } return(AsyncRequest.Result.Error); }); }
public IEnumerator CaptureTest_CaptureColorAndDepthParametric(int depthBpp, GraphicsFormat renderTextureFormat, Action <AsyncRequest <CaptureCamera.CaptureState> > validator) { Debug.Assert(GraphicsUtilities.SupportsRenderTextureFormat(renderTextureFormat), "GraphicsFormat not supported"); var camera = SetupCameraTestWithMaterial(depthBpp, renderTextureFormat, new Vector3(0, 0, 1.0f)); var request = CaptureCamera.Capture ( camera, colorFunctor: AsyncRequest <CaptureCamera.CaptureState> .DontCare, depthFunctor: AsyncRequest <CaptureCamera.CaptureState> .DontCare, depthFormat: GraphicsUtilities.DepthFormatForDepth(depthBpp), forceFlip: ForceFlip.None ); camera.Render(); while (!request.completed) { yield return(null); } Debug.Assert(request.error == false, "Capture request had an error"); validator.Invoke(request); }
private void Capture(int num) { string imageName = _camera.name + "_" + num; // Define Data point object outside async call DataPoint dataPoint = new DataPoint(_cube.name, _cube.transform.rotation, simElapsed, imageName); // Call Screen Capture var screen = CaptureCamera.Capture(_camera, request => { string path = screenCapturePath + "/" + imageName + ".jpg"; // Convert the screen capture to a byte array Array image = CaptureImageEncoder.Encode(request.data.colorBuffer as Array, 640, 480, GraphicsFormat.R8G8B8A8_UNorm, CaptureImageEncoder.ImageFormat.Jpg, true); // Write the screen capture to a file var result = DXFile.Write(path, image); // Wait for Async screen capture request to return and then log data point if (result) { // Log data point to file dataLogger.Log(dataPoint); return(AsyncRequest.Result.Completed); } return(AsyncRequest.Result.Error); }); }
private AsyncRequestWrapper CaptureFrameWithLog( Camera camera, Unity.Simulation.Logger logger, string screenCapturePath, string frameFileNameRoot, int frameIndex ) { // Construct the output file name for the image. string frameFileBaseName = $"{frameFileNameRoot}_{frameIndex}"; string frameFilePath = $"{screenCapturePath}{Path.DirectorySeparatorChar}{frameFileBaseName}.jpg"; void LogData() { logger.Log(new CaptureFrameWithLogDataPoint(frameFileBaseName)); } // Write the frame entry to the log. Write the log line outside the request callback when the // execution context is threaded since threaded requests will be executed asynchronously. if (IsExecutionContextThreaded()) { LogData(); } var req = CaptureCamera.Capture( camera, request => { // Write the frame entry to the log. We can write the log line within the request callback when // the execution context is *not* threaded since they will be executed sequentially. if (!IsExecutionContextThreaded()) { LogData(); } // Get the color buffer data and convert it to an image file. byte[] imgColorData = (byte[])request.data.colorBuffer; byte[] imgFileData = (byte[])CaptureImageEncoder.EncodeArray( imgColorData, 32, 32, GraphicsFormat.R8G8B8A8_UNorm, CaptureImageEncoder.ImageFormat.Jpg ); // Attempt to write the image file to disk. bool fileWritten = FileProducer.Write(frameFilePath, imgFileData); return((fileWritten) ? AsyncRequest.Result.Completed : AsyncRequest.Result.Error); }, flipY: false ); return(new AsyncRequestWrapper(req, frameIndex)); }
public override void OnInspectorGUI() { CaptureCamera captureCamera = target as CaptureCamera; bool disabled = false; if (captureCamera.transform.parent == null || captureCamera.transform.parent.GetComponent<Camera>() == null) { disabled = true; captureCamera.inheritance = false; } EditorGUI.BeginDisabledGroup(disabled); captureCamera.inheritance = EditorGUILayout.Toggle("Inheritance", captureCamera.inheritance); EditorGUI.EndDisabledGroup(); if (disabled) { EditorGUILayout.HelpBox("親カメラが存在しません。", MessageType.Warning); } else if (captureCamera.inheritance) { EditorGUILayout.HelpBox("親カメラ(" + captureCamera.transform.parent.name + ")のパラメータを継承します。", MessageType.Info); } // 解像度の設定 captureCamera.resolution = EditorGUILayout.Popup( "CaptureResolution", captureCamera.resolution, new string[] { "SD (720x480)", "HD (1280x720)", "FullHD (1920x1080)", "4K (4096x2160)", "8K (7680x4320)", "Free" } ); if (captureCamera.resolution == 4) { EditorGUILayout.HelpBox("8K解像度の場合、PCのスペックによっては実行時にUnityが強制終了する場合があります。", MessageType.Warning); } else if (captureCamera.resolution == 5) { // 書き出し後のサイズ #if UNITY_2017_OR_NEWER EditorGUILayout.Vector2IntField("Screen Size", new Vector2Int(captureCamera.width, captureCamera.height)); #else EditorGUILayout.Vector2Field("Screen Size", new Vector2(captureCamera.width, captureCamera.height)); #endif EditorGUILayout.Space(); } // アンチエイリアスの設定 captureCamera.antiAiliasing = EditorGUILayout.Popup( "Anti-Ailiasing", captureCamera.antiAiliasing, new string[] { "Disabled", "2 samples", "4 samples", "8 samples" } ); }
public IEnumerator CaptureTest_CaptureDepth16ToFile() { var camera = SetupCameraTestWithMaterial(16, GraphicsFormat.R8G8B8A8_UNorm, new Vector3(0, 0, 500.5f), 0.1f, 1000); var depthPath = Path.Combine(Application.persistentDataPath, "depth16.tga"); var request = CaptureCamera.CaptureDepthToFile(camera, GraphicsFormat.R16_UNorm, depthPath); camera.Render(); while (!request.completed) { yield return(null); } Debug.Assert(request.error == false, "Capture request had an error"); }
public void Initialize() { sessionIsActive = false; MotionManager motion = MotionManager.instance; MoleculeManager molecule = MoleculeManager.instance; CameraRig rig = CameraRig.instance; motion.Initialize(); molecule.Initialize(); rig.Initialize(); molecule.editorUI.gameObject.SetActive(false); loginScreen.gameObject.SetActive(true); CaptureCamera capture = CaptureCamera.instance; capture.Initialize(); }
public IEnumerator CaptureScreenshotsNew_ColorOnly() { var requests = new List <AsyncRequest <CaptureCamera.CaptureState> >(); SetupTest(1000, 3); yield return(null); for (int i = 0; i < kNumFramesToRender; ++i) { for (int c = 0; c < _cameras.Length; ++c) { var camera = _cameras[c]; var r = CaptureCamera.CaptureColorToFile( camera, GraphicsFormat.R8G8B8A8_UNorm, string.Format("test_capture_{0}_camera_{1}_color.jpg", i, c)); requests.Add(r); camera.Render(); } yield return(null); } Debug.Log("Finally, wait for any remaining requests to complete."); if (requests.Exists(r => r.completed == false)) { yield return(null); } for (var i = 0; i < requests.Count; ++i) { if (requests[i].error) { Debug.Log(string.Format("Request {0} returned error.", i)); } } Debug.Assert(!requests.Exists(r => r.error == true)); Debug.Log("CaptureScreenshotsNew_ColorOnly elapsed " + Elapsed()); }
AsyncRequest <CaptureCamera.CaptureState> CaptureDataForChannel(Camera camera, Channel channel, GraphicsFormat colorFormat, GraphicsFormat depthFormat, GraphicsFormat motionFormat) { Func <AsyncRequest <CaptureCamera.CaptureState>, AsyncRequest <CaptureCamera.CaptureState> .Result> functor = r => { return(AsyncRequest.Result.Completed); }; return(CaptureCamera.Capture ( camera, channel == Channel.Color ? functor : null, colorFormat, channel == Channel.Depth ? functor : null, depthFormat, channel == Channel.Motion ? functor : null, motionFormat, forceFlip: ForceFlip.None )); }
IEnumerator CaptureColorAndEnsureUpright(bool fastPath) { var camera = SetupCameraWithRenderTexture(2, 2, GraphicsFormat.R8G8B8A8_UNorm); var imagePath = Path.Combine(Application.persistentDataPath, "upright.png"); var useAsyncReadbackIfSupported = CaptureOptions.useAsyncReadbackIfSupported; CaptureOptions.useAsyncReadbackIfSupported = fastPath; var request = CaptureCamera.CaptureColorToFile(camera, GraphicsFormat.R8G8B8A8_UNorm, imagePath, CaptureImageEncoder.ImageFormat.Png); var plane1 = CreatePlaneInFrontOfCamera(Color.black, .1f); //position on the bottom half of the screen, in front of plane2 plane1.transform.localPosition = new Vector3(0, -.5f, .5f); plane1.transform.localScale = new Vector3(1, -1f, .1f); var plane2 = CreatePlaneInFrontOfCamera(Color.red); plane2.transform.localPosition = new Vector3(0, 0, 1f); plane2.transform.localScale = new Vector3(1, -1f, 1f); camera.clearFlags = CameraClearFlags.Nothing; camera.Render(); while (!request.completed) { yield return(null); } CaptureOptions.useAsyncReadbackIfSupported = useAsyncReadbackIfSupported; Assert.True(request.error == false); var texture = new Texture2D(2, 2, TextureFormat.RGB24, false); texture.LoadImage(File.ReadAllBytes(imagePath)); Assert.True(CompareColors(texture.GetPixel(0, 0), Color.black)); Assert.True(CompareColors(texture.GetPixel(1, 0), Color.black)); Assert.True(CompareColors(texture.GetPixel(0, 1), Color.red)); Assert.True(CompareColors(texture.GetPixel(1, 1), Color.red)); }
void CaptureRgbData(Camera cam) { Profiler.BeginSample("CaptureDataFromLastFrame"); if (!captureRgbImages) { return; } var captureFilename = Path.Combine(Manager.Instance.GetDirectoryFor(RgbDirectory), $"{s_RgbFilePrefix}{Time.frameCount}.png"); var dxRootPath = Path.Combine(RgbDirectory, $"{s_RgbFilePrefix}{Time.frameCount}.png"); SensorHandle.ReportCapture(dxRootPath, SensorSpatialData.FromGameObjects(m_EgoMarker == null ? null : m_EgoMarker.gameObject, gameObject), m_PersistentSensorData.Select(kvp => (kvp.Key, kvp.Value)).ToArray()); Func <AsyncRequest <CaptureCamera.CaptureState>, AsyncRequest.Result> colorFunctor; var width = cam.pixelWidth; var height = cam.pixelHeight; var flipY = ShouldFlipY(cam); colorFunctor = r => { using (s_WriteFrame.Auto()) { var dataColorBuffer = (byte[])r.data.colorBuffer; if (flipY) { FlipImageY(dataColorBuffer, height); } byte[] encodedData; using (s_EncodeAndSave.Auto()) { encodedData = ImageConversion.EncodeArrayToPNG(dataColorBuffer, GraphicsFormat.R8G8B8A8_UNorm, (uint)width, (uint)height); } return(!FileProducer.Write(captureFilename, encodedData) ? AsyncRequest.Result.Error : AsyncRequest.Result.Completed); } }; CaptureCamera.Capture(cam, colorFunctor); Profiler.EndSample(); }
public IEnumerator CaptureScreenshotsNew_NewColorAndDepth() { var requests = new List <AsyncRequest <CaptureCamera.CaptureState> >(); SetupTest(1000, 3); yield return(null); for (int i = 0; i < kNumFramesToRender; ++i) { for (int c = 0; c < _cameras.Length; ++c) { var camera = _cameras[c]; var r = CaptureCamera.CaptureColorAndDepthToFile( camera, GraphicsFormat.R8G8B8A8_UNorm, string.Format("test_capture_{0}_camera_{1}_color.jpg", i, c), default(CaptureImageEncoder.ImageFormat), GraphicsFormat.R32_SFloat, string.Format("test_capture_{0}_camera_{1}_depth.jpg", i, c)); requests.Add(r); camera.Render(); } yield return(null); } Debug.Log("Finally, wait for any remaining requests to complete."); if (requests.Exists(r => r.completed == false)) { yield return(null); } Debug.Assert(!requests.Exists(r => r.error == true), "one or more requests returned an error"); Debug.Log("CaptureScreenshotsNew_NewColorAndDepth elapsed " + Elapsed()); }
void Update() { #region 对象捕捉 if (Input.GetKey(KeyCode.LeftControl) && Input.GetKey(KeyCode.LeftAlt) && Input.GetKeyDown(KeyCode.C)) { CaptureRun = !CaptureRun; } if (CaptureRun == true && SuperTools.UICapture == true) { if (CaptureCamera == null) { CaptureCamera = Camera.main; } RaycastHit hit = new RaycastHit(); Physics.Raycast(CaptureCamera.ScreenPointToRay(Input.mousePosition), out hit, 90000); if (hit.transform != null) { CaptureObjct = hit.transform.gameObject; var path = CaptureObjct.name; var pTransform = CaptureObjct.transform; while (pTransform.parent != null) { pTransform = pTransform.parent; path = pTransform.name + "\\" + path; } if (CapturePath != path) {//地址不一样说明换了一个东西,所以要刷新 CapturePath = path; CaptureComponents = CaptureObjct.GetComponents <Component>().ToList(); } } } #endregion }
private void AddRecorderToCamera() { if (_recorder == null && CaptureCamera) { _recorder = CaptureCamera.GetComponent <Recorder>(); if (_recorder == null) { _recorder = CaptureCamera.gameObject.AddComponent <Recorder>(); _recorder.Init(); PostProcessLayer pp = Camera.main.GetComponent <PostProcessLayer>(); if (pp != null) { originalFinalBlitToCameraTarget = pp.finalBlitToCameraTarget; pp.finalBlitToCameraTarget = false; } removeRecorder = true; } else { removeRecorder = false; } } }
public ImageServer(int port, CaptureElement captureElement) : base(port) { this.captureElement = captureElement; camera = new CaptureCamera(); }