/// <summary> /// Helper method that sends the current UnityRLOutput, receives the next UnityInput and /// Applies the appropriate AgentAction to the agents. /// </summary> void SendBatchedMessageHelper() { var input = m_Communicator.Exchange( new CommunicatorObjects.UnityOutput { RlOutput = m_CurrentUnityRlOutput }); m_MessagesReceived += 1; foreach (var k in m_CurrentUnityRlOutput.AgentInfos.Keys) { m_CurrentUnityRlOutput.AgentInfos[k].Value.Clear(); } if (input == null) { m_Command = CommunicatorObjects.CommandProto.Quit; return; } var rlInput = input.RlInput; if (rlInput == null) { m_Command = CommunicatorObjects.CommandProto.Quit; return; } m_Command = rlInput.Command; m_EnvironmentParameters = rlInput.EnvironmentParameters; m_IsTraining = rlInput.IsTraining; if (rlInput.AgentActions == null) { return; } foreach (var brainName in rlInput.AgentActions.Keys) { if (!m_CurrentAgents[brainName].Any()) { continue; } if (!rlInput.AgentActions[brainName].Value.Any()) { continue; } for (var i = 0; i < m_CurrentAgents[brainName].Count; i++) { var agent = m_CurrentAgents[brainName][i]; var action = rlInput.AgentActions[brainName].Value[i]; agent.UpdateVectorAction(action.VectorActions.ToArray()); agent.UpdateMemoriesAction(action.Memories.ToList()); agent.UpdateTextAction(action.TextActions); agent.UpdateValueAction(action.Value); agent.UpdateCustomAction(action.CustomAction); } } }