void SendCommandEvent(CommandProto command) { switch (command) { case CommandProto.Quit: { NotifyQuitAndShutDownChannel(); return; } case CommandProto.Reset: { foreach (var brainName in m_OrderedAgentsRequestingDecisions.Keys) { m_OrderedAgentsRequestingDecisions[brainName].Clear(); } ResetCommandReceived?.Invoke(); return; } default: { return; } } }
/// <summary> /// Sends the academy parameters through the Communicator. /// Is used by the academy to send the AcademyParameters to the communicator. /// </summary> /// <returns>The External Initialization Parameters received.</returns> /// <param name="academyParameters">The Unity Initialization Paramters to be sent.</param> public UnityRLInitializationInput SendAcademyParameters( UnityRLInitializationOutput academyParameters) { UnityInput input; var initializationInput = new UnityInput(); try { initializationInput = m_communicator.Initialize( new UnityOutput { RlInitializationOutput = academyParameters }, out input); } catch { throw new UnityAgentsException( "The Communicator was unable to connect. Please make sure the External " + "process is ready to accept communication with Unity."); } var firstRlInput = input.RlInput; m_command = firstRlInput.Command; m_environmentParameters = firstRlInput.EnvironmentParameters; m_isTraining = firstRlInput.IsTraining; return(initializationInput.RlInitializationInput); }
/// <summary> /// Helper method that sends the curent UnityRLOutput, receives the next UnityInput and /// Applies the appropriate AgentAction to the agents. /// </summary> void SendBatchedMessageHelper() { var input = m_communicator.Exchange( new UnityOutput { RlOutput = m_currentUnityRLOutput }); m_messagesReceived += 1; foreach (var k in m_currentUnityRLOutput.AgentInfos.Keys) { m_currentUnityRLOutput.AgentInfos[k].Value.Clear(); } if (input == null) { m_command = CommandProto.Quit; return; } var rlInput = input.RlInput; if (rlInput == null) { m_command = CommandProto.Quit; return; } m_command = rlInput.Command; m_environmentParameters = rlInput.EnvironmentParameters; m_isTraining = rlInput.IsTraining; if (rlInput.AgentActions == null) { return; } foreach (var brainName in rlInput.AgentActions.Keys) { if (!m_currentAgents[brainName].Any()) { continue; } if (!rlInput.AgentActions[brainName].Value.Any()) { continue; } for (var i = 0; i < m_currentAgents[brainName].Count(); i++) { var agent = m_currentAgents[brainName][i]; var action = rlInput.AgentActions[brainName].Value[i]; agent.UpdateVectorAction( action.VectorActions.ToArray()); agent.UpdateMemoriesAction( action.Memories.ToList()); agent.UpdateTextAction( action.TextActions); } } }
public void RegisterCommand(string name, CommandProto command, string description) { allCommands[name] = command; allDescriptions[name] = description; }