/// <summary> /// Sends the academy parameters through the Communicator. /// Is used by the academy to send the AcademyParameters to the communicator. /// </summary> /// <returns>The External Initialization Parameters received.</returns> /// <param name="academyParameters">The Unity Initialization Parameters to be sent.</param> public CommunicatorObjects.UnityRLInitializationInput SendAcademyParameters( CommunicatorObjects.UnityRLInitializationOutput academyParameters) { CommunicatorObjects.UnityInput input; var initializationInput = new CommunicatorObjects.UnityInput(); try { initializationInput = m_communicator.Initialize( new CommunicatorObjects.UnityOutput { RlInitializationOutput = academyParameters }, out input); } catch { throw new UnityAgentsException( "The Communicator was unable to connect. Please make sure the External " + "process is ready to accept communication with Unity."); } var firstRlInput = input.RlInput; m_command = firstRlInput.Command; m_environmentParameters = firstRlInput.EnvironmentParameters; m_isTraining = firstRlInput.IsTraining; return(initializationInput.RlInitializationInput); }
/// <summary> /// Helper method that sends the curent UnityRLOutput, receives the next UnityInput and /// Applies the appropriate AgentAction to the agents. /// </summary> void SendBatchedMessageHelper() { var input = m_communicator.Exchange( new CommunicatorObjects.UnityOutput { RlOutput = m_currentUnityRLOutput }); m_messagesReceived += 1; foreach (string k in m_currentUnityRLOutput.AgentInfos.Keys) { m_currentUnityRLOutput.AgentInfos[k].Value.Clear(); } if (input == null) { m_command = CommunicatorObjects.CommandProto.Quit; return; } CommunicatorObjects.UnityRLInput rlInput = input.RlInput; if (rlInput == null) { m_command = CommunicatorObjects.CommandProto.Quit; return; } m_command = rlInput.Command; m_environmentParameters = rlInput.EnvironmentParameters; m_isTraining = rlInput.IsTraining; if (rlInput.AgentActions == null) { return; } foreach (var brainName in rlInput.AgentActions.Keys) { if (!m_currentAgents[brainName].Any()) { continue; } if (!rlInput.AgentActions[brainName].Value.Any()) { continue; } for (var i = 0; i < m_currentAgents[brainName].Count; i++) { var agent = m_currentAgents[brainName][i]; var action = rlInput.AgentActions[brainName].Value[i]; agent.UpdateVectorAction(action.VectorActions.ToArray()); agent.UpdateMemoriesAction(action.Memories.ToList()); agent.UpdateTextAction(action.TextActions); agent.UpdateValueAction(action.Value); agent.UpdateCustomAction(action.CustomAction); } } }