public void GiveModel( string behaviorName, NNModel model, InferenceDevice inferenceDevice = InferenceDevice.CPU) { SetModel(behaviorName, model, inferenceDevice); }
/// <summary> /// Initializes the Brain with the Model that it will use when selecting actions for /// the agents /// </summary> /// <param name="model"> The Barracuda model to load </param> /// <param name="actionSpec"> Description of the actions for the Agent.</param> /// <param name="inferenceDevice"> Inference execution device. CPU is the fastest /// option for most of ML Agents models. </param> /// <param name="seed"> The seed that will be used to initialize the RandomNormal /// and Multinomial objects used when running inference.</param> /// <exception cref="UnityAgentsException">Throws an error when the model is null /// </exception> public ModelRunner( NNModel model, ActionSpec actionSpec, InferenceDevice inferenceDevice, int seed = 0) { Model barracudaModel; m_Model = model; m_ModelName = model.name; m_InferenceDevice = inferenceDevice; m_TensorAllocator = new TensorCachingAllocator(); if (model != null) { #if BARRACUDA_VERBOSE m_Verbose = true; #endif D.logEnabled = m_Verbose; barracudaModel = ModelLoader.Load(model); WorkerFactory.Type executionDevice; switch (inferenceDevice) { case InferenceDevice.CPU: executionDevice = WorkerFactory.Type.CSharp; break; case InferenceDevice.GPU: executionDevice = WorkerFactory.Type.ComputePrecompiled; break; case InferenceDevice.Burst: executionDevice = WorkerFactory.Type.CSharpBurst; break; case InferenceDevice.Default: // fallthrough default: executionDevice = WorkerFactory.Type.CSharpBurst; break; } m_Engine = WorkerFactory.CreateWorker(executionDevice, barracudaModel, m_Verbose); } else { barracudaModel = null; m_Engine = null; } m_InferenceInputs = barracudaModel.GetInputTensors(); m_OutputNames = barracudaModel.GetOutputNames(); m_TensorGenerator = new TensorGenerator( seed, m_TensorAllocator, m_Memories, barracudaModel); m_TensorApplier = new TensorApplier( actionSpec, seed, m_TensorAllocator, m_Memories, barracudaModel); m_InputsByName = new Dictionary <string, Tensor>(); m_InferenceOutputs = new List <TensorProxy>(); }
/// <summary> /// Updates the model and related details for this behavior. /// </summary> /// <param name="newBehaviorName">New name for the behavior.</param> /// <param name="model">New neural network model for this behavior.</param> /// <param name="inferenceDevice">New inference device for this behavior.</param> public void GiveModel( string newBehaviorName, NNModel model, InferenceDevice inferenceDevice = InferenceDevice.CPU) { m_Model = model; m_InferenceDevice = inferenceDevice; m_BehaviorName = newBehaviorName; }
/// <inheritdoc /> public BarracudaPolicy( BrainParameters brainParameters, NNModel model, InferenceDevice inferenceDevice) { var modelRunner = Academy.Instance.GetOrCreateModelRunner(model, brainParameters, inferenceDevice); m_ModelRunner = modelRunner; }
/// <summary> /// Updates the Model for the agent. Any model currently assigned to the /// agent will be replaced with the provided one. If the arguments are /// identical to the current parameters of the agent, the model will /// remain unchanged. /// </summary> /// <param name="behaviorName"> The identifier of the behavior. This /// will categorize the agent when training. /// </param> /// <param name="model"> The model to use for inference.</param> /// <param name = "inferenceDevide"> Define on what device the model /// will be run.</param> public void GiveModel( string behaviorName, NNModel model, InferenceDevice inferenceDevice = InferenceDevice.CPU) { m_PolicyFactory.GiveModel(behaviorName, model, inferenceDevice); m_Brain?.Dispose(); m_Brain = m_PolicyFactory.GeneratePolicy(Heuristic); }
/// <inheritdoc /> public BarracudaPolicy( ActionSpec actionSpec, NNModel model, InferenceDevice inferenceDevice) { var modelRunner = Academy.Instance.GetOrCreateModelRunner(model, actionSpec, inferenceDevice); m_ModelRunner = modelRunner; m_ActionSpec = actionSpec; }
/// <inheritdoc /> public BarracudaPolicy( ActionSpec actionSpec, NNModel model, InferenceDevice inferenceDevice) { var modelRunner = Academy.Instance.GetOrCreateModelRunner(model, actionSpec, inferenceDevice); m_ModelRunner = modelRunner; actionSpec.CheckNotHybrid(); m_SpaceType = actionSpec.NumContinuousActions > 0 ? SpaceType.Continuous : SpaceType.Discrete; }
/// <summary> /// Creates or retrieves an existing ModelRunner that uses the same /// NNModel and the InferenceDevice as provided. /// </summary> /// <param name="model">The NNModel the ModelRunner must use.</param> /// <param name="actionSpec"> Description of the actions for the Agent.</param> /// <param name="inferenceDevice"> /// The inference device (CPU or GPU) the ModelRunner will use. /// </param> /// <returns> The ModelRunner compatible with the input settings.</returns> internal ModelRunner GetOrCreateModelRunner( NNModel model, ActionSpec actionSpec, InferenceDevice inferenceDevice) { var modelRunner = m_ModelRunners.Find(x => x.HasModel(model, inferenceDevice)); if (modelRunner == null) { modelRunner = new ModelRunner(model, actionSpec, inferenceDevice, m_InferenceSeed); m_ModelRunners.Add(modelRunner); m_InferenceSeed++; } return modelRunner; }
/// <inheritdoc /> public BarracudaPolicy( BrainParameters brainParameters, NNModel model, InferenceDevice inferenceDevice) { var aca = Object.FindObjectOfType <Academy>(); aca.LazyInitialization(); var modelRunner = aca.GetOrCreateModelRunner(model, brainParameters, inferenceDevice); m_ModelRunner = modelRunner; }
/// <summary> /// Generate an InferenceEvent for the model. /// </summary> /// <param name="nnModel"></param> /// <param name="behaviorName"></param> /// <param name="inferenceDevice"></param> /// <param name="sensors"></param> /// <param name="actionSpec"></param> /// <param name="actuators"></param> /// <returns></returns> internal static InferenceEvent GetEventForModel( NNModel nnModel, string behaviorName, InferenceDevice inferenceDevice, IList <ISensor> sensors, ActionSpec actionSpec, IList <IActuator> actuators ) { var barracudaModel = ModelLoader.Load(nnModel); var inferenceEvent = new InferenceEvent(); // Hash the behavior name so that there's no concern about PII or "secret" data being leaked. inferenceEvent.BehaviorName = AnalyticsUtils.Hash(behaviorName); inferenceEvent.BarracudaModelSource = barracudaModel.IrSource; inferenceEvent.BarracudaModelVersion = barracudaModel.IrVersion; inferenceEvent.BarracudaModelProducer = barracudaModel.ProducerName; inferenceEvent.MemorySize = (int)barracudaModel.GetTensorByName(TensorNames.MemorySize)[0]; inferenceEvent.InferenceDevice = (int)inferenceDevice; if (barracudaModel.ProducerName == "Script") { // .nn files don't have these fields set correctly. Assign some placeholder values. inferenceEvent.BarracudaModelSource = "NN"; inferenceEvent.BarracudaModelProducer = "tensorflow_to_barracuda.py"; } #if UNITY_EDITOR var barracudaPackageInfo = UnityEditor.PackageManager.PackageInfo.FindForAssembly(typeof(Tensor).Assembly); inferenceEvent.BarracudaPackageVersion = barracudaPackageInfo.version; #else inferenceEvent.BarracudaPackageVersion = null; #endif inferenceEvent.ActionSpec = EventActionSpec.FromActionSpec(actionSpec); inferenceEvent.ObservationSpecs = new List <EventObservationSpec>(sensors.Count); foreach (var sensor in sensors) { inferenceEvent.ObservationSpecs.Add(EventObservationSpec.FromSensor(sensor)); } inferenceEvent.ActuatorInfos = new List <EventActuatorInfo>(actuators.Count); foreach (var actuator in actuators) { inferenceEvent.ActuatorInfos.Add(EventActuatorInfo.FromActuator(actuator)); } inferenceEvent.TotalWeightSizeBytes = GetModelWeightSize(barracudaModel); inferenceEvent.ModelHash = GetModelHash(barracudaModel); return(inferenceEvent); }
/// <inheritdoc /> public BarracudaPolicy( BrainParameters brainParameters, NNModel model, InferenceDevice inferenceDevice, string behaviorName ) { var modelRunner = Academy.Instance.GetOrCreateModelRunner(model, brainParameters, inferenceDevice); m_ModelRunner = modelRunner; m_BehaviorName = behaviorName; m_BrainParameters = brainParameters; }
/// <summary> /// Creates or retrieves an existing ModelRunner that uses the same /// NNModel and the InferenceDevice as provided. /// </summary> /// <param name="model"> The NNModel the ModelRunner must use </param> /// <param name="brainParameters"> The brainParameters used to create /// the ModelRunner </param> /// <param name="inferenceDevice"> The inference device (CPU or GPU) /// the ModelRunner will use </param> /// <returns> The ModelRunner compatible with the input settings</returns> internal ModelRunner GetOrCreateModelRunner( NNModel model, BrainParameters brainParameters, InferenceDevice inferenceDevice) { var modelRunner = m_ModelRunners.Find(x => x.HasModel(model, inferenceDevice)); if (modelRunner == null) { modelRunner = new ModelRunner( model, brainParameters, inferenceDevice); m_ModelRunners.Add(modelRunner); } return(modelRunner); }
/// <summary> /// Instantiate a BarracudaPolicy with the necessary objects for it to run. /// </summary> /// <param name="actionSpec">The action spec of the behavior.</param> /// <param name="actuators">The actuators used for this behavior.</param> /// <param name="model">The Neural Network to use.</param> /// <param name="inferenceDevice">Which device Barracuda will run on.</param> /// <param name="behaviorName">The name of the behavior.</param> public BarracudaPolicy( ActionSpec actionSpec, IList <IActuator> actuators, NNModel model, InferenceDevice inferenceDevice, string behaviorName ) { var modelRunner = Academy.Instance.GetOrCreateModelRunner(model, actionSpec, inferenceDevice); m_ModelRunner = modelRunner; m_BehaviorName = behaviorName; m_ActionSpec = actionSpec; m_Actuators = actuators; }
/// <summary> /// Updates the Model for the agent. Any model currently assigned to the /// agent will be replaced with the provided one. If the arguments are /// identical to the current parameters of the agent, the model will /// remain unchanged. /// </summary> /// <param name="behaviorName"> The identifier of the behavior. This /// will categorize the agent when training. /// </param> /// <param name="model"> The model to use for inference.</param> /// <param name = "inferenceDevice"> Define on what device the model /// will be run.</param> public void SetModel( string behaviorName, NNModel model, InferenceDevice inferenceDevice = InferenceDevice.CPU) { if (behaviorName == m_PolicyFactory.behaviorName && model == m_PolicyFactory.model && inferenceDevice == m_PolicyFactory.inferenceDevice) { // If everything is the same, don't make any changes. return; } NotifyAgentDone(DoneReason.Disabled); m_PolicyFactory.model = model; m_PolicyFactory.inferenceDevice = inferenceDevice; m_PolicyFactory.behaviorName = behaviorName; ReloadPolicy(); }
/// <summary> /// Initializes the Brain with the Model that it will use when selecting actions for /// the agents /// </summary> /// <param name="model"> The Barracuda model to load </param> /// <param name="brainParameters"> The parameters of the Brain used to generate the /// placeholder tensors </param> /// <param name="inferenceDevice"> Inference execution device. CPU is the fastest /// option for most of ML Agents models. </param> /// <param name="seed"> The seed that will be used to initialize the RandomNormal /// and Multinomial objects used when running inference.</param> /// <exception cref="UnityAgentsException">Throws an error when the model is null /// </exception> public ModelRunner( NNModel model, BrainParameters brainParameters, InferenceDevice inferenceDevice = InferenceDevice.CPU, int seed = 0) { Model barracudaModel; m_Model = model; m_InferenceDevice = inferenceDevice; m_TensorAllocator = new TensorCachingAllocator(); if (model != null) { #if BARRACUDA_VERBOSE m_Verbose = true; #endif D.logEnabled = m_Verbose; //barracudaModel = ModelLoader.Load(model.Value); barracudaModel = ModelLoader.Load(model); var executionDevice = inferenceDevice == InferenceDevice.GPU // ? BarracudaWorkerFactory.Type.ComputePrecompiled // : BarracudaWorkerFactory.Type.CSharp; // m_Engine = BarracudaWorkerFactory.CreateWorker(executionDevice, barracudaModel, m_Verbose); ? WorkerFactory.Type.ComputePrecompiled : WorkerFactory.Type.CSharp; m_Engine = WorkerFactory.CreateWorker(executionDevice, barracudaModel, m_Verbose); } else { barracudaModel = null; m_Engine = null; } m_InferenceInputs = BarracudaModelParamLoader.GetInputTensors(barracudaModel); m_OutputNames = BarracudaModelParamLoader.GetOutputNames(barracudaModel); m_TensorGenerator = new TensorGenerator( seed, m_TensorAllocator, m_Memories, barracudaModel); m_TensorApplier = new TensorApplier( brainParameters, seed, m_TensorAllocator, m_Memories, barracudaModel); }
public static void InferenceModelSet( NNModel nnModel, string behaviorName, InferenceDevice inferenceDevice, IList <ISensor> sensors, ActionSpec actionSpec, IList <IActuator> actuators ) { // The event shouldn't be able to report if this is disabled but if we know we're not going to report // Lets early out and not waste time gathering all the data if (!IsAnalyticsEnabled()) { return; } if (!EnableAnalytics()) { return; } var added = s_SentModels.Add(nnModel); if (!added) { // We previously added this model. Exit so we don't resend. return; } var data = GetEventForModel(nnModel, behaviorName, inferenceDevice, sensors, actionSpec, actuators); // Note - to debug, use JsonUtility.ToJson on the event. // Debug.Log(JsonUtility.ToJson(data, true)); #if UNITY_EDITOR && MLA_UNITY_ANALYTICS_MODULE if (AnalyticsUtils.s_SendEditorAnalytics) { EditorAnalytics.SendEventWithLimit(k_EventName, data, k_EventVersion); } #else return; #endif }
public bool HasModel(NNModel other, InferenceDevice otherInferenceDevice) { return(m_Model == other && m_InferenceDevice == otherInferenceDevice); }
/// <summary> /// Initializes the Brain with the Model that it will use when selecting actions for /// the agents /// </summary> /// <param name="model"> The Barracuda model to load </param> /// <param name="actionSpec"> Description of the actions for the Agent.</param> /// <param name="inferenceDevice"> Inference execution device. CPU is the fastest /// option for most of ML Agents models. </param> /// <param name="seed"> The seed that will be used to initialize the RandomNormal /// and Multinomial objects used when running inference.</param> /// <param name="deterministicInference"> Inference only: set to true if the action selection from model should be /// deterministic. </param> /// <exception cref="UnityAgentsException">Throws an error when the model is null /// </exception> public ModelRunner( NNModel model, ActionSpec actionSpec, InferenceDevice inferenceDevice, int seed = 0, bool deterministicInference = false) { Model barracudaModel; m_Model = model; m_ModelName = model.name; m_InferenceDevice = inferenceDevice; m_DeterministicInference = deterministicInference; m_TensorAllocator = new TensorCachingAllocator(); if (model != null) { #if BARRACUDA_VERBOSE m_Verbose = true; #endif D.logEnabled = m_Verbose; barracudaModel = ModelLoader.Load(model); var failedCheck = BarracudaModelParamLoader.CheckModelVersion( barracudaModel ); if (failedCheck != null) { if (failedCheck.CheckType == BarracudaModelParamLoader.FailedCheck.CheckTypeEnum.Error) { throw new UnityAgentsException(failedCheck.Message); } } WorkerFactory.Type executionDevice; switch (inferenceDevice) { case InferenceDevice.CPU: executionDevice = WorkerFactory.Type.CSharp; break; case InferenceDevice.GPU: executionDevice = WorkerFactory.Type.ComputePrecompiled; break; case InferenceDevice.Burst: executionDevice = WorkerFactory.Type.CSharpBurst; break; case InferenceDevice.Default: // fallthrough default: executionDevice = WorkerFactory.Type.CSharpBurst; break; } m_Engine = WorkerFactory.CreateWorker(executionDevice, barracudaModel, m_Verbose); } else { barracudaModel = null; m_Engine = null; } m_InferenceInputs = barracudaModel.GetInputTensors(); m_OutputNames = barracudaModel.GetOutputNames(m_DeterministicInference); m_TensorGenerator = new TensorGenerator( seed, m_TensorAllocator, m_Memories, barracudaModel, m_DeterministicInference); m_TensorApplier = new TensorApplier( actionSpec, seed, m_TensorAllocator, m_Memories, barracudaModel, m_DeterministicInference); m_InputsByName = new Dictionary <string, Tensor>(); m_InferenceOutputs = new List <TensorProxy>(); }