/// <summary> /// Factory for the ModelParamLoader : Creates a ModelParamLoader and runs the checks /// on it. /// </summary> /// <param name="model"> /// The Barracuda engine model for loading static parameters /// </param> /// <param name="brainParameters"> /// The BrainParameters that are used verify the compatibility with the InferenceEngine /// </param> /// <param name="sensors">Attached sensor components</param> /// <param name="actuatorComponents">Attached actuator components</param> /// <param name="observableAttributeTotalSize">Sum of the sizes of all ObservableAttributes.</param> /// <param name="behaviorType">BehaviorType or the Agent to check.</param> /// <returns>A IEnumerable of the checks that failed</returns> public static IEnumerable <FailedCheck> CheckModel( Model model, BrainParameters brainParameters, ISensor[] sensors, ActuatorComponent[] actuatorComponents, int observableAttributeTotalSize = 0, BehaviorType behaviorType = BehaviorType.Default ) { List <FailedCheck> failedModelChecks = new List <FailedCheck>(); if (model == null) { var errorMsg = "There is no model for this Brain; cannot run inference. "; if (behaviorType == BehaviorType.InferenceOnly) { errorMsg += "Either assign a model, or change to a different Behavior Type."; } else { errorMsg += "(But can still train)"; } failedModelChecks.Add(FailedCheck.Info(errorMsg)); return(failedModelChecks); } var hasExpectedTensors = model.CheckExpectedTensors(failedModelChecks); if (!hasExpectedTensors) { return(failedModelChecks); } var modelApiVersion = model.GetVersion(); if (modelApiVersion < (int)ModelApiVersion.MinSupportedVersion || modelApiVersion > (int)ModelApiVersion.MaxSupportedVersion) { failedModelChecks.Add( FailedCheck.Warning($"Version of the trainer the model was trained with ({modelApiVersion}) " + $"is not compatible with the current range of supported versions: " + $"({(int)ModelApiVersion.MinSupportedVersion} to {(int)ModelApiVersion.MaxSupportedVersion}).") ); return(failedModelChecks); } var memorySize = (int)model.GetTensorByName(TensorNames.MemorySize)[0]; if (memorySize == -1) { failedModelChecks.Add(FailedCheck.Warning($"Missing node in the model provided : {TensorNames.MemorySize}" )); return(failedModelChecks); } if (modelApiVersion == (int)ModelApiVersion.MLAgents1_0) { failedModelChecks.AddRange( CheckInputTensorPresenceLegacy(model, brainParameters, memorySize, sensors) ); failedModelChecks.AddRange( CheckInputTensorShapeLegacy(model, brainParameters, sensors, observableAttributeTotalSize) ); } else if (modelApiVersion == (int)ModelApiVersion.MLAgents2_0) { failedModelChecks.AddRange( CheckInputTensorPresence(model, brainParameters, memorySize, sensors) ); failedModelChecks.AddRange( CheckInputTensorShape(model, brainParameters, sensors, observableAttributeTotalSize) ); } failedModelChecks.AddRange( CheckOutputTensorShape(model, brainParameters, actuatorComponents) ); failedModelChecks.AddRange( CheckOutputTensorPresence(model, memorySize) ); return(failedModelChecks); }
/// <summary> /// Factory for the ModelParamLoader : Creates a ModelParamLoader and runs the checks /// on it. /// </summary> /// <param name="model"> /// The Barracuda engine model for loading static parameters /// </param> /// <param name="brainParameters"> /// The BrainParameters that are used verify the compatibility with the InferenceEngine /// </param> /// <param name="sensors">Attached sensor components</param> /// <param name="actuatorComponents">Attached actuator components</param> /// <param name="observableAttributeTotalSize">Sum of the sizes of all ObservableAttributes.</param> /// <param name="behaviorType">BehaviorType or the Agent to check.</param> /// <param name="deterministicInference"> Inference only: set to true if the action selection from model should be /// deterministic. </param> /// <returns>A IEnumerable of the checks that failed</returns> public static IEnumerable <FailedCheck> CheckModel( Model model, BrainParameters brainParameters, ISensor[] sensors, ActuatorComponent[] actuatorComponents, int observableAttributeTotalSize = 0, BehaviorType behaviorType = BehaviorType.Default, bool deterministicInference = false ) { List <FailedCheck> failedModelChecks = new List <FailedCheck>(); if (model == null) { var errorMsg = "There is no model for this Brain; cannot run inference. "; if (behaviorType == BehaviorType.InferenceOnly) { errorMsg += "Either assign a model, or change to a different Behavior Type."; } else { errorMsg += "(But can still train)"; } failedModelChecks.Add(FailedCheck.Info(errorMsg)); return(failedModelChecks); } var hasExpectedTensors = model.CheckExpectedTensors(failedModelChecks, deterministicInference); if (!hasExpectedTensors) { return(failedModelChecks); } var modelApiVersion = model.GetVersion(); var versionCheck = CheckModelVersion(model); if (versionCheck != null) { failedModelChecks.Add(versionCheck); } var memorySize = (int)model.GetTensorByName(TensorNames.MemorySize)[0]; if (memorySize == -1) { failedModelChecks.Add(FailedCheck.Warning($"Missing node in the model provided : {TensorNames.MemorySize}" )); return(failedModelChecks); } if (modelApiVersion == (int)ModelApiVersion.MLAgents1_0) { failedModelChecks.AddRange( CheckInputTensorPresenceLegacy(model, brainParameters, memorySize, sensors) ); failedModelChecks.AddRange( CheckInputTensorShapeLegacy(model, brainParameters, sensors, observableAttributeTotalSize) ); } else if (modelApiVersion == (int)ModelApiVersion.MLAgents2_0) { failedModelChecks.AddRange( CheckInputTensorPresence(model, brainParameters, memorySize, sensors, deterministicInference) ); failedModelChecks.AddRange( CheckInputTensorShape(model, brainParameters, sensors, observableAttributeTotalSize) ); } failedModelChecks.AddRange( CheckOutputTensorShape(model, brainParameters, actuatorComponents) ); failedModelChecks.AddRange( CheckOutputTensorPresence(model, memorySize, deterministicInference) ); return(failedModelChecks); }