public static void RenderScenePreview(Transform origin, PreviewTextures textures) { var pos = origin.position; var rot = origin.rotation; ReinitializeRenderPipeline(); var previewRootPrefab = AssetDatabase.LoadAssetAtPath <GameObject>("Assets/Prefabs/ScenePreviewRoot.prefab"); var previewRoot = Object.Instantiate(previewRootPrefab); previewRoot.transform.rotation = rot; previewRoot.transform.position = pos; var camera = previewRoot.GetComponentInChildren <Camera>(); // This will trigger HDCamera.Update, which must be done before calling HDCamera.GetOrCreate // Otherwise m_AdditionalCameraData will not be set and HDCamera will be discarded after first frame camera.Render(); var hdSettings = camera.GetComponent <HDAdditionalCameraData>(); hdSettings.hasPersistentHistory = true; var hd = HDCamera.GetOrCreate(camera); var volume = previewRoot.GetComponentInChildren <Volume>(); var pointCloudRenderers = Object.FindObjectsOfType <NodeTreeRenderer>(); foreach (var pointCloudRenderer in pointCloudRenderers) { pointCloudRenderer.UpdateImmediate(camera); } Render(hd, textures, volume); Object.DestroyImmediate(previewRoot); }
void Update() { HDRenderPipeline hdrp = RenderPipelineManager.currentPipeline is HDRenderPipeline ? (HDRenderPipeline)RenderPipelineManager.currentPipeline : null; if (hdrp != null) { // Get the HDCamera for the current camera var hdCamera = HDCamera.GetOrCreate(GetComponent <Camera>()); // Evaluate the effect params HDEffectsParameters hdEffectParams = HDRenderPipeline.EvaluateEffectsParameters(hdCamera, true, false); // Clear the rtas from the previous frame if (rtas != null) { rtas.Dispose(); } // Create the RTAS rtas = new RayTracingAccelerationStructure(); // Add all the objects individually int numGameObjects = gameObjects.Count; for (int i = 0; i < numGameObjects; ++i) { HDRenderPipeline.AddInstanceToRAS(rtas, gameObjects[i].GetComponent <Renderer>(), hdEffectParams, ref hdCamera.transformsDirty, ref hdCamera.materialsDirty); } // Build the RTAS rtas.Build(transform.position); // Assign it to the camera hdCamera.rayTracingAccelerationStructure = rtas; } }
void ApplySettings() { var stack = HDCamera.GetOrCreate(Camera.main).volumeStack; var windSettings = stack.GetComponent <BasicWind>(); if (windSettings == null) { return; } if (windZone == null) { windZone = gameObject.GetComponent <WindZone>(); } if (windZone != null) { GetDirectionAndSpeed(); } Shader.SetGlobalTexture(BasicWindShaderIDs.TexNoise, windSettings.noiseTexture.value); Shader.SetGlobalTexture(BasicWindShaderIDs.TexGust, windSettings.gustMaskTexture.value); Shader.SetGlobalVector(BasicWindShaderIDs.WorldDirectionAndSpeed, new Vector4(windDirection.x, windDirection.y, windDirection.z, windSpeed * 0.2777f)); Shader.SetGlobalFloat(BasicWindShaderIDs.FlexNoiseScale, 1.0f / Mathf.Max(0.01f, windSettings.flexNoiseWorldSize.value)); Shader.SetGlobalFloat(BasicWindShaderIDs.ShiverNoiseScale, 1.0f / Mathf.Max(0.01f, windSettings.shiverNoiseWorldSize.value)); Shader.SetGlobalFloat(BasicWindShaderIDs.Turbulence, windSpeed * windTurbulence); Shader.SetGlobalFloat(BasicWindShaderIDs.GustSpeed, windSettings.gustSpeed.value); Shader.SetGlobalFloat(BasicWindShaderIDs.GustScale, windSettings.gustScale.value); Shader.SetGlobalFloat(BasicWindShaderIDs.GustWorldScale, 1.0f / Mathf.Max(0.01f, windSettings.gustWorldSize.value)); Shader.SetGlobalFloat(BasicWindShaderIDs.Attenuation, windSettings.attenuation.value); }
IEnumerator FrameRequest() { yield return(new WaitForEndOfFrame()); if (currentState == State.WaitingForFirstFrame) { #if HDRP_AVAILABLE if (UnityHelpers.UsingHDRP()) { Camera[] cams = Camera.allCameras; foreach (var c in cams) { HDCamera hdcam = HDCamera.GetOrCreate(c); HDAdditionalCameraData hdCameraData = c.GetComponent <HDAdditionalCameraData>(); if (hdcam != null && hdCameraData != null && hdCameraData.antialiasing == HDAdditionalCameraData.AntialiasingMode.TemporalAntialiasing) { hdcam.Reset(); } } } #endif // We need to wait one more frame to overcome the GameView resolution change // REC-589 yield return(new WaitForEndOfFrame()); } FrameReady(); if (currentState == State.WaitingForFirstFrame) { EnterRunningState(); } frameProducedCount++; }
public override void Execute(GameObject instigator = null) { var cam = Manager.Get <VirtualCameraManager>().Camera; HDCamera hdCam = HDCamera.GetOrCreate(cam); hdCam.Reset(); hdCam.volumetricHistoryIsValid = false; hdCam.colorPyramidHistoryIsValid = false; }
void SetOverrideTime(float ct, float lt) { if (m_CameraHD == null && m_Camera != null) { m_CameraHD = HDCamera.GetOrCreate(m_Camera); } m_CameraHD.animateMaterialsTime = ct; m_CameraHD.animateMaterialsTimeLast = lt; }
private void ResetCameras() { foreach (var cam in cameras) { var hdCamera = HDCamera.GetOrCreate(cam); hdCamera.Reset(); hdCamera.volumetricHistoryIsValid = false; hdCamera.colorPyramidHistoryIsValid = false; } }
public IEnumerator Counters([ValueSource(nameof(GetCounterTests))] CounterTestDescription testDescription) { yield return(LoadScene(testDescription.sceneData.scene, testDescription.assetData.asset)); var sceneSettings = SetupTestScene(); var hdCamera = HDCamera.GetOrCreate(sceneSettings.testCamera, 0); // We don't support XR for now yield return(MeasureProfilingSamplers(GetAllMarkers(hdCamera), WarmupCount, sceneSettings.measurementCount)); }
public static void RenderVehiclePreview(string vehicleAssetFile, PreviewTextures textures) { ReinitializeRenderPipeline(); var cameraObj = GameObject.Find("PreviewCamera"); var camera = cameraObj == null ? null : cameraObj.GetComponent <Camera>(); if (camera == null) { Debug.LogError("Camera for vehicle preview was not found. Preview won't be available."); return; } var volume = Object.FindObjectOfType <Volume>(); if (volume == null) { Debug.LogError("Volume for vehicle preview was not found. Preview won't be available."); return; } var vehiclePrefab = AssetDatabase.LoadAssetAtPath <GameObject>(vehicleAssetFile); var vehicleParent = GameObject.Find("VehicleParent"); var vehicle = vehicleParent != null ? Object.Instantiate(vehiclePrefab, vehicleParent.transform) : Object.Instantiate(vehiclePrefab); vehicle.transform.localRotation = Quaternion.identity; vehicle.transform.localPosition = Vector3.zero; // adjust camera distance based on hit distance RaycastHit hit; var start = cameraObj.transform.position; var end = vehicle.transform.position; var direction = (end - start); Ray cameraRay = new Ray(start, direction); if (Physics.Raycast(cameraRay, out hit, LayerMask.GetMask("Agent"))) { cameraObj.transform.position = hit.point + ((cameraObj.transform.position - hit.point).normalized) * 3f; } // This will trigger HDCamera.Update, which must be done before calling HDCamera.GetOrCreate // Otherwise m_AdditionalCameraData will not be set and HDCamera will be discarded after first frame camera.Render(); var hdSettings = camera.GetComponent <HDAdditionalCameraData>(); hdSettings.hasPersistentHistory = true; var hd = HDCamera.GetOrCreate(camera); Render(hd, textures, volume); Object.DestroyImmediate(vehicle); }
void OnOverlayGUI(Object target, SceneView sceneView) { // Get the exposure texture used in this scene view if (!(RenderPipelineManager.currentPipeline is HDRenderPipeline hdrp)) { return; } var hdCamera = HDCamera.GetOrCreate(sceneView.camera, new XRPass()); var exposureTex = hdrp.GetExposureTexture(hdCamera); var index = Array.IndexOf(m_TypedTargets, target); if (index == -1) { return; } var p = m_TypedTargets[index]; if (p.texture == null) { return; } var factor = k_PreviewHeight / p.texture.height; var previewSize = new Rect(p.texture.width * factor, k_PreviewHeight, 0, 0); if (Event.current.type == EventType.Layout || !firstDraw && Event.current.type == EventType.Repaint) { // Get and reserve rect //this can cause the following issue if calls on a repaint before a layout: //ArgumentException: Getting control 0's position in a group with only 0 controls when doing repaint var cameraRect = GUILayoutUtility.GetRect(previewSize.x, previewSize.y); firstDraw = false; var c = new Rect(cameraRect); c.width = p.texture.width * factor; c.height = k_PreviewHeight; // Setup the material to draw the quad with the exposure texture var material = GUITextureBlit2SRGBMaterial; material.SetTexture("_Exposure", exposureTex); Graphics.DrawTexture(c, p.texture, new Rect(0, 0, 1, 1), 0, 0, 0, 0, GUI.color, material, -1); var fovRect = new Rect(c.x + 5, c.y + 2, c.width - 10, EditorGUIUtility.singleLineHeight); GUI.TextField(fovRect, $"FOV: {p.renderData.fieldOfView:F2}°"); } }
public override void Setup() { base.Setup(); m_LensDistortionShader = Shader.Find(k_ShaderName); var shaderVariantCollection = new ShaderVariantCollection(); if (shaderVariantCollection != null) { shaderVariantCollection.Add(new ShaderVariantCollection.ShaderVariant(m_LensDistortionShader, PassType.ScriptableRenderPipeline)); } m_LensDistortionMaterial = new Material(m_LensDistortionShader); if (shaderVariantCollection != null) { shaderVariantCollection.WarmUp(); } // Set up a new texture if (m_DistortedTexture == null || m_DistortedTexture.width != Screen.width || m_DistortedTexture.height != Screen.height) { if (m_DistortedTexture != null) { m_DistortedTexture.Release(); } m_DistortedTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.ARGBFloat, RenderTextureReadWrite.Linear); m_DistortedTexture.enableRandomWrite = true; m_DistortedTexture.filterMode = FilterMode.Point; m_DistortedTexture.Create(); } // Grab the lens distortion #if HDRP_PRESENT // Grab the Lens Distortion from Perception Camera stack var hdCamera = HDCamera.GetOrCreate(targetCamera); var stack = hdCamera.volumeStack; m_LensDistortion = stack.GetComponent <LensDistortion>(); #elif URP_PRESENT var stack = VolumeManager.instance.stack; m_LensDistortion = stack.GetComponent <LensDistortion>(); #endif m_Initialized = true; }
void OnOverlayGUI(Object target, SceneView sceneView) { // Get the exposure texture used in this scene view if (!(RenderPipelineManager.currentPipeline is HDRenderPipeline hdrp)) { return; } var hdCamera = HDCamera.GetOrCreate(sceneView.camera); var exposureTex = hdrp.GetExposureTexture(hdCamera); var index = Array.IndexOf(m_TypedTargets, target); if (index == -1) { return; } var p = m_TypedTargets[index]; if (p.texture == null) { return; } var factor = k_PreviewHeight / p.texture.height; var previewSize = new Rect(p.texture.width * factor, k_PreviewHeight, 0, 0); // Get and reserve rect var cameraRect = GUILayoutUtility.GetRect(previewSize.x, previewSize.y); if (Event.current.type == EventType.Repaint) { var c = new Rect(cameraRect); c.width = p.texture.width * factor; c.height = k_PreviewHeight; // Setup the material to draw the quad with the exposure texture var material = GUITextureBlit2SRGBMaterial; material.SetTexture("_Exposure", exposureTex); Graphics.DrawTexture(c, p.texture, new Rect(0, 0, 1, 1), 0, 0, 0, 0, GUI.color, material, -1); var fovRect = new Rect(c.x + 5, c.y + 2, c.width - 10, EditorGUIUtility.singleLineHeight); GUI.TextField(fovRect, $"FOV: {p.renderData.fieldOfView:F2}°"); } }
protected void RenderCamera() { var cmd = CommandBufferPool.Get(); var hd = HDCamera.GetOrCreate(SensorCamera); if (renderTarget.IsCube && !HDAdditionalCameraData.hasCustomRender) { // HDRP renders cubemap as multiple separate images, each with different exposure. // Locking exposure will force it to use the same value for all faces, removing inconsistencies. hd.LockExposure(); SensorCamera.stereoSeparation = 0f; SensorCamera.RenderToCubemap(renderTarget, faceMask, Camera.MonoOrStereoscopicEye.Left); hd.UnlockExposure(); } else { SensorCamera.Render(); } if (Distorted) { if (Fisheye) { LensDistortion.UnifiedProjectionDistort(cmd, renderTarget, DistortedHandle); } else { LensDistortion.PlumbBobDistort(cmd, renderTarget, DistortedHandle); } cmd.SetGlobalVector(ScreenSizeProperty, new Vector4(Width, Height, 1.0f / Width, 1.0f / Height)); var ctx = new PostProcessPassContext(cmd, hd, DistortedHandle); SimulatorManager.Instance.Sensors.PostProcessSystem.RenderLateForSensor(ctx, this); } FinalRenderTarget.BlitTo2D(cmd, hd); HDRPUtilities.ExecuteAndClearCommandBuffer(cmd); CommandBufferPool.Release(cmd); }
[Timeout(300 * 1000)] // Set timeout to 5 minutes to handle complex scenes with many shaders (default timeout is 3 minutes) public IEnumerator Run(GraphicsTestCase testCase) { SceneManager.LoadScene(testCase.ScenePath); // Arbitrary wait for 5 frames for the scene to load, and other stuff to happen (like Realtime GI to appear ...) for (int i = 0; i < 5; ++i) { yield return(null); } // Load the test settings var settings = GameObject.FindObjectOfType <HDRP_TestSettings>(); var camera = GameObject.FindGameObjectWithTag("MainCamera").GetComponent <Camera>(); if (camera == null) { camera = GameObject.FindObjectOfType <Camera>(); } if (camera == null) { Assert.Fail("Missing camera for graphic tests."); } Time.captureFramerate = settings.captureFramerate; if (XRGraphicsAutomatedTests.enabled) { if (settings.xrCompatible) { XRGraphicsAutomatedTests.running = true; // Increase tolerance to account for slight changes due to float precision settings.ImageComparisonSettings.AverageCorrectnessThreshold *= settings.xrThresholdMultiplier; settings.ImageComparisonSettings.PerPixelCorrectnessThreshold *= settings.xrThresholdMultiplier; // Increase number of volumetric slices to compensate for initial half-resolution due to XR single-pass optimization foreach (var volume in GameObject.FindObjectsOfType <Volume>()) { if (volume.profile.TryGet <Fog>(out Fog fog)) { fog.volumeSliceCount.value *= 2; } } } else { Assert.Ignore("Test scene is not compatible with XR and will be skipped."); } } if (settings.doBeforeTest != null) { settings.doBeforeTest.Invoke(); // Wait again one frame, to be sure. yield return(null); } // Reset temporal effects on hdCamera HDCamera.GetOrCreate(camera).Reset(); for (int i = 0; i < settings.waitFrames; ++i) { yield return(null); } var settingsSG = (GameObject.FindObjectOfType <HDRP_TestSettings>() as HDRP_ShaderGraph_TestSettings); if (settingsSG == null || !settingsSG.compareSGtoBI) { // Standard Test ImageAssert.AreEqual(testCase.ReferenceImage, camera, settings?.ImageComparisonSettings); // For some reason, tests on mac os have started failing with render graph enabled by default. // Some tests have 400+ gcalloc in them. Unfortunately it's not reproductible outside of command line so it's impossible to debug. // That's why we don't test on macos anymore. if (settings.checkMemoryAllocation && SystemInfo.graphicsDeviceType != GraphicsDeviceType.Metal) { // Does it allocate memory when it renders what's on camera? bool allocatesMemory = false; try { // GC alloc from Camera.CustomRender (case 1206364) int gcAllocThreshold = 2; ImageAssert.AllocatesMemory(camera, settings?.ImageComparisonSettings, gcAllocThreshold); } catch (AssertionException) { allocatesMemory = true; } if (allocatesMemory) { Assert.Fail("Allocated memory when rendering what is on camera"); } } } else { if (settingsSG.sgObjs == null) { Assert.Fail("Missing Shader Graph objects in test scene."); } if (settingsSG.biObjs == null) { Assert.Fail("Missing comparison objects in test scene."); } settingsSG.sgObjs.SetActive(true); settingsSG.biObjs.SetActive(false); yield return(null); // Wait a frame yield return(null); bool sgFail = false; bool biFail = false; // First test: Shader Graph try { ImageAssert.AreEqual(testCase.ReferenceImage, camera, (settings != null)?settings.ImageComparisonSettings:null); } catch (AssertionException) { sgFail = true; } settingsSG.sgObjs.SetActive(false); settingsSG.biObjs.SetActive(true); settingsSG.biObjs.transform.position = settingsSG.sgObjs.transform.position; // Move to the same location. yield return(null); // Wait a frame yield return(null); // Second test: HDRP/Lit Materials try { ImageAssert.AreEqual(testCase.ReferenceImage, camera, (settings != null)?settings.ImageComparisonSettings:null); } catch (AssertionException) { biFail = true; } // Informs which ImageAssert failed, if any. if (sgFail && biFail) { Assert.Fail("Both Shader Graph and Non-Shader Graph Objects failed to match the reference image"); } else if (sgFail) { Assert.Fail("Shader Graph Objects failed."); } else if (biFail) { Assert.Fail("Non-Shader Graph Objects failed to match Shader Graph objects."); } } }
public static void RenderScenePreview(Transform origin, PreviewTextures textures, bool forcePreview) { var pos = origin.position; var rot = origin.rotation; ReinitializeRenderPipeline(); var hasReflectionProbes = Object.FindObjectOfType <ReflectionProbe>() != null; var volumes = Object.FindObjectsOfType <Volume>(); Volume volume = null; IndirectLightingController indirectLightingController = null; foreach (var vol in volumes) { if (vol.isGlobal && volume == null) { volume = vol; continue; } var collider = vol.GetComponent <Collider>(); if (collider.bounds.Contains(pos)) { volume = vol; break; } } var previewRootPrefab = AssetDatabase.LoadAssetAtPath <GameObject>("Assets/Prefabs/ScenePreviewRoot.prefab"); var previewRoot = Object.Instantiate(previewRootPrefab, pos, rot); var camera = previewRoot.GetComponentInChildren <Camera>(); if (forcePreview) { previewRoot.transform.SetPositionAndRotation(pos, Quaternion.Euler(new Vector3(0f, rot.eulerAngles.y, 0f))); camera.transform.SetParent(null); camera.transform.SetPositionAndRotation(pos, rot); } // This will trigger HDCamera.Update, which must be done before calling HDCamera.GetOrCreate // Otherwise m_AdditionalCameraData will not be set and HDCamera will be discarded after first frame camera.Render(); var hdSettings = camera.GetComponent <HDAdditionalCameraData>(); hdSettings.hasPersistentHistory = true; var hd = HDCamera.GetOrCreate(camera); if (volume == null) { volume = previewRoot.GetComponentInChildren <Volume>(); } // CullingResults in first frame after loading scene does not contain aby data about reflection probes. // Light loop will use further options, which usually means skybox indirect reflections. This ignores // occlusion and will break interior lighting. Due to lack of other options, just disable indirect specular // lighting for preview rendering. indirectLightingController = volume.profile.components.FirstOrDefault(x => x is IndirectLightingController) as IndirectLightingController; var indirectControllerAdded = hasReflectionProbes && indirectLightingController == null; if (indirectControllerAdded) { indirectLightingController = volume.profile.Add <IndirectLightingController>(); } var indirectMultiplier = indirectLightingController == null ? 0f : indirectLightingController.reflectionLightingMultiplier.value; if (hasReflectionProbes && indirectLightingController != null) { indirectLightingController.reflectionLightingMultiplier.value = 0f; } var pointCloudRenderers = Object.FindObjectsOfType <NodeTreeRenderer>(); foreach (var pointCloudRenderer in pointCloudRenderers) { pointCloudRenderer.UpdateImmediate(camera); } Render(hd, textures, volume); if (hasReflectionProbes && indirectLightingController != null) { indirectLightingController.reflectionLightingMultiplier.value = indirectMultiplier; } if (indirectControllerAdded && indirectLightingController != null) { volume.profile.Remove <IndirectLightingController>(); } Object.DestroyImmediate(previewRoot); }
void OnOverlayGUI(Object target, SceneView sceneView) { // Draw a preview of the captured texture from the planar reflection // Get the exposure texture used in this scene view if (!(RenderPipelineManager.currentPipeline is HDRenderPipeline hdrp)) { return; } var hdCamera = HDCamera.GetOrCreate(sceneView.camera); var exposureTex = hdrp.GetExposureTexture(hdCamera); var index = Array.IndexOf(m_TypedTargets, target); if (index == -1) { return; } var p = m_TypedTargets[index]; if (p.texture == null) { return; } var previewWidth = k_PreviewHeight; var previewSize = new Rect(previewWidth, k_PreviewHeight + EditorGUIUtility.singleLineHeight + 2, 0, 0); if (Event.current.type == EventType.Layout || !firstDraw && Event.current.type == EventType.Repaint) { // Get and reserve rect //this can cause the following issue if calls on a repaint before a layout: //ArgumentException: Getting control 0's position in a group with only 0 controls when doing repaint var cameraRect = GUILayoutUtility.GetRect(previewSize.x, previewSize.y); firstDraw = false; // The aspect ratio of the capture texture may not be the aspect of the texture // So we need to stretch back the texture to the aspect used during the capture // to give users a non distorded preview of the capture. // Here we compute a centered rect that has the correct aspect for the texture preview. var c = new Rect(cameraRect); c.y += EditorGUIUtility.singleLineHeight + 2; if (p.renderData.aspect > 1) { c.width = k_PreviewHeight; c.height = k_PreviewHeight / p.renderData.aspect; c.y += (k_PreviewHeight - c.height) * 0.5f; } else { c.width = k_PreviewHeight * p.renderData.aspect; c.height = k_PreviewHeight; c.x += (k_PreviewHeight - c.width) * 0.5f; } // Setup the material to draw the quad with the exposure texture var material = GUITextureBlit2SRGBMaterial; material.SetTexture("_Exposure", exposureTex); Graphics.DrawTexture(c, p.texture, new Rect(0, 0, 1, 1), 0, 0, 0, 0, GUI.color, material, -1); // We now display the FoV and aspect used during the capture of the planar reflection var fovRect = new Rect(cameraRect); fovRect.x += 5; fovRect.y += 2; fovRect.width -= 10; fovRect.height = EditorGUIUtility.singleLineHeight; var width = fovRect.width; fovRect.width = width * 0.5f; GUI.TextField(fovRect, $"F: {p.renderData.fieldOfView:F2}°"); fovRect.x += width * 0.5f; fovRect.width = width * 0.5f; GUI.TextField(fovRect, $"A: {p.renderData.aspect:F2}"); } }
// We have to lock api.ActionsSemaphore before the first continuation (await) // to make sure API calls are executed one after the other public async void Execute(JSONNode args) { var sim = SimulatorManager.Instance; var api = ApiManager.Instance; // instead of relying on ApiMAnager's exception handling, // we wrap the whole method since we are async try { if (sim == null) { throw new Exception("SimulatorManager not found! Is scene loaded?"); } var name = args["name"].Value; var type = args["type"].AsInt; var position = args["state"]["transform"]["position"].ReadVector3(); var rotation = args["state"]["transform"]["rotation"].ReadVector3(); var velocity = args["state"]["velocity"].ReadVector3(); var angular_velocity = args["state"]["angular_velocity"].ReadVector3(); string uid; var argsUid = args["uid"]; if (argsUid == null) { uid = System.Guid.NewGuid().ToString(); // Add uid key to arguments, as it will be distributed to the clients' simulations if (Loader.Instance.Network.IsMaster) { args.Add("uid", uid); } } else { uid = argsUid.Value; } if (type == (int)AgentType.Ego) { var agents = SimulatorManager.Instance.AgentManager; GameObject agentGO = null; VehicleDetailData vehicleData = await ConnectionManager.API.GetByIdOrName <VehicleDetailData>(name); var config = new AgentConfig(vehicleData.ToVehicleData()); config.Position = position; config.Rotation = Quaternion.Euler(rotation); if (ApiManager.Instance.CachedVehicles.ContainsKey(vehicleData.Name)) { config.Prefab = ApiManager.Instance.CachedVehicles[vehicleData.Name]; } else { var assetModel = await DownloadManager.GetAsset(BundleConfig.BundleTypes.Vehicle, vehicleData.AssetGuid, vehicleData.Name); config.Prefab = Loader.LoadVehicleBundle(assetModel.LocalPath); } if (config.Prefab == null) { throw new Exception($"failed to acquire ego prefab"); } var downloads = new List <Task>(); List <SensorData> sensorsToDownload = new List <SensorData>(); ConcurrentDictionary <Task, string> assetDownloads = new ConcurrentDictionary <Task, string>(); if (config.Sensors != null) { foreach (var plugin in config.Sensors) { if (plugin.Plugin.AssetGuid != null && sensorsToDownload.FirstOrDefault(s => s.Plugin.AssetGuid == plugin.Plugin.AssetGuid) == null) { sensorsToDownload.Add(plugin); } } } if (config.BridgeData != null) { var pluginTask = DownloadManager.GetAsset(BundleConfig.BundleTypes.Bridge, config.BridgeData.AssetGuid, config.BridgeData.Name); downloads.Add(pluginTask); assetDownloads.TryAdd(pluginTask, config.BridgeData.Type); } foreach (var sensor in sensorsToDownload) { var pluginTask = DownloadManager.GetAsset(BundleConfig.BundleTypes.Sensor, sensor.Plugin.AssetGuid, sensor.Name); downloads.Add(pluginTask); assetDownloads.TryAdd(pluginTask, sensor.Type); } await Task.WhenAll(downloads); foreach (var download in downloads) { assetDownloads.TryRemove(download, out _); } agentGO = agents.SpawnAgent(config); if (agents.ActiveAgents.Count == 1) { agents.SetCurrentActiveAgent(agentGO); var hdCamera = HDCamera.GetOrCreate(SimulatorManager.Instance.CameraManager.SimulatorCamera); hdCamera.RequestExposureAdaptationLock(); } var rb = agentGO.GetComponent <Rigidbody>(); if (rb != null) { rb.velocity = velocity; rb.angularVelocity = angular_velocity; } Debug.Assert(agentGO != null); api.Agents.Add(uid, agentGO); api.AgentUID.Add(agentGO, uid); var sensors = agentGO.GetComponentsInChildren <SensorBase>(true); foreach (var sensor in sensors) { var sensorUid = System.Guid.NewGuid().ToString(); if (SimulatorManager.InstanceAvailable) { SimulatorManager.Instance.Sensors.AppendUid(sensor, sensorUid); } } api.SendResult(this, new JSONString(uid)); } else if (type == (int)AgentType.Npc) { var colorData = args["color"].ReadVector3(); var template = sim.NPCManager.NPCVehicles.Find(obj => obj.Prefab.name == name); if (template.Prefab == null) { throw new Exception($"Unknown '{name}' NPC name"); } var spawnData = new NPCManager.NPCSpawnData { Active = true, GenId = uid, Template = template, Position = position, Rotation = Quaternion.Euler(rotation), Color = colorData == new Vector3(-1, -1, -1) ? sim.NPCManager.GetWeightedRandomColor(template.NPCType) : new Color(colorData.x, colorData.y, colorData.z), Seed = sim.NPCManager.NPCSeedGenerator.Next(), }; var npcController = SimulatorManager.Instance.NPCManager.SpawnNPC(spawnData); npcController.IsUserSpecified = true; npcController.SetBehaviour <NPCManualBehaviour>(); var body = npcController.GetComponent <Rigidbody>(); body.velocity = velocity; body.angularVelocity = angular_velocity; uid = npcController.name; api.Agents.Add(uid, npcController.gameObject); api.AgentUID.Add(npcController.gameObject, uid); api.SendResult(this, new JSONString(uid)); // Override the color argument as NPCController may change the NPC color if (Loader.Instance.Network.IsMaster) { var colorVector = new Vector3(npcController.NPCColor.r, npcController.NPCColor.g, npcController.NPCColor.b); args["color"].WriteVector3(colorVector); } } else if (type == (int)AgentType.Pedestrian) { var pedManager = SimulatorManager.Instance.PedestrianManager; if (!pedManager.gameObject.activeSelf) { var sceneName = SceneManager.GetActiveScene().name; throw new Exception($"{sceneName} is missing Pedestrian NavMesh"); } var model = sim.PedestrianManager.PedestrianData.Find(obj => obj.Name == name).Prefab; if (model == null) { throw new Exception($"Unknown '{name}' pedestrian name"); } var spawnData = new PedestrianManager.PedSpawnData { Active = true, API = true, GenId = uid, Model = model, Position = position, Rotation = Quaternion.Euler(rotation), Seed = sim.PedestrianManager.PEDSeedGenerator.Next(), }; var pedController = pedManager.SpawnPedestrian(spawnData); if (pedController == null) { throw new Exception($"Pedestrian controller error for '{name}'"); } api.Agents.Add(uid, pedController.gameObject); api.AgentUID.Add(pedController.gameObject, uid); api.SendResult(this, new JSONString(uid)); } else { throw new Exception($"Unsupported '{args["type"]}' type"); } } catch (Exception e) { Debug.LogException(e); api.SendError(this, e.Message); } finally { Executed?.Invoke(this); } }