/// <summary> /// Generates the Tensor inputs that are expected to be present in the Model. /// </summary> /// <param name="model"> /// The Barracuda engine model for loading static parameters. /// </param> /// <returns>TensorProxy IEnumerable with the expected Tensor inputs.</returns> public static IReadOnlyList <TensorProxy> GetInputTensors(Model model) { var tensors = new List <TensorProxy>(); if (model == null) { return(tensors); } foreach (var input in model.inputs) { tensors.Add(new TensorProxy { name = input.name, valueType = TensorProxy.TensorType.FloatingPoint, data = null, shape = input.shape.Select(i => (long)i).ToArray() }); } foreach (var mem in model.memories) { tensors.Add(new TensorProxy { name = mem.input, valueType = TensorProxy.TensorType.FloatingPoint, data = null, shape = TensorUtils.TensorShapeFromBarracuda(mem.shape) }); } tensors.Sort((el1, el2) => el1.name.CompareTo(el2.name)); return(tensors); }
public void Generate(TensorProxy tensorProxy, int batchSize, IList <AgentInfoSensorsPair> infos) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); var agentIndex = 0; for (var infoIndex = 0; infoIndex < infos.Count; infoIndex++) { var infoSensorPair = infos[infoIndex]; var sensor = infoSensorPair.sensors[m_SensorIndex]; if (infoSensorPair.agentInfo.done) { // If the agent is done, we might have a stale reference to the sensors // e.g. a dependent object might have been disposed. // To avoid this, just fill observation with zeroes instead of calling sensor.Write. TensorUtils.FillTensorBatch(tensorProxy, agentIndex, 0.0f); } else { m_ObservationWriter.SetTarget(tensorProxy, agentIndex, 0); sensor.Write(m_ObservationWriter); } agentIndex++; } }
public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); var agentIndex = 0; foreach (var info in infos) { if (info.agentInfo.done) { // If the agent is done, we might have a stale reference to the sensors // e.g. a dependent object might have been disposed. // To avoid this, just fill observation with zeroes instead of calling sensor.Write. TensorUtils.FillTensorBatch(tensorProxy, agentIndex, 0.0f); } else { var tensorOffset = 0; // Write each sensor consecutively to the tensor foreach (var sensorIndex in m_SensorIndices) { var sensor = info.sensors[sensorIndex]; m_ObservationWriter.SetTarget(tensorProxy, agentIndex, tensorOffset); var numWritten = sensor.Write(m_ObservationWriter); tensorOffset += numWritten; } } agentIndex++; } }
void FetchBarracudaOutputs(string[] names) { m_InferenceOutputs.Clear(); foreach (var n in names) { var output = m_Engine.PeekOutput(n); m_InferenceOutputs.Add(TensorUtils.TensorProxyFromBarracuda(output, n)); } }
List <TensorProxy> FetchBarracudaOutputs(string[] names) { var outputs = new List <TensorProxy>(); foreach (var n in names) { var output = m_Engine.PeekOutput(n); outputs.Add(TensorUtils.TensorProxyFromBarracuda(output, n)); } return(outputs); }
public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); var maskSize = tensorProxy.shape[tensorProxy.shape.Length - 1]; var agentIndex = 0; foreach (var infoSensorPair in infos) { var agentInfo = infoSensorPair.agentInfo; var maskList = agentInfo.discreteActionMasks; for (var j = 0; j < maskSize; j++) { var isUnmasked = (maskList != null && maskList[j]) ? 0.0f : 1.0f; tensorProxy.data[agentIndex, j] = isUnmasked; } agentIndex++; } }
/// <summary> /// Generates the Tensor inputs that are expected to be present in the Model. /// </summary> /// <param name="model"> /// The Barracuda engine model for loading static parameters. /// </param> /// <returns>TensorProxy IEnumerable with the expected Tensor inputs.</returns> public static IReadOnlyList <TensorProxy> GetInputTensors(this Model model) { var tensors = new List <TensorProxy>(); if (model == null) { return(tensors); } foreach (var input in model.inputs) { tensors.Add(new TensorProxy { name = input.name, valueType = TensorProxy.TensorType.FloatingPoint, data = null, shape = input.shape.Select(i => (long)i).ToArray() }); } var modelVersion = model.GetVersion(); if (modelVersion < (int)BarracudaModelParamLoader.ModelApiVersion.MLAgents2_0) { foreach (var mem in model.memories) { tensors.Add(new TensorProxy { name = mem.input, valueType = TensorProxy.TensorType.FloatingPoint, data = null, shape = TensorUtils.TensorShapeFromBarracuda(mem.shape) }); } } tensors.Sort((el1, el2) => string.Compare(el1.name, el2.name, StringComparison.InvariantCulture)); return(tensors); }
public void Generate(TensorProxy tensorProxy, int batchSize, IList <AgentInfoSensorsPair> infos) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); var vecObsSizeT = tensorProxy.shape[tensorProxy.shape.Length - 1]; var agentIndex = 0; for (var infoIndex = 0; infoIndex < infos.Count; infoIndex++) { var info = infos[infoIndex]; if (info.agentInfo.done) { // If the agent is done, we might have a stale reference to the sensors // e.g. a dependent object might have been disposed. // To avoid this, just fill observation with zeroes instead of calling sensor.Write. TensorUtils.FillTensorBatch(tensorProxy, agentIndex, 0.0f); } else { var tensorOffset = 0; // Write each sensor consecutively to the tensor // TOOD for (var sensorIndexIndex = 0; sensorIndexIndex < m_SensorIndices.Count; sensorIndexIndex++) { var sensorIndex = m_SensorIndices[sensorIndexIndex]; var sensor = info.sensors[sensorIndex]; m_ObservationWriter.SetTarget(tensorProxy, agentIndex, tensorOffset); var numWritten = sensor.Write(m_ObservationWriter); tensorOffset += numWritten; } Debug.AssertFormat( tensorOffset == vecObsSizeT, "mismatch between vector observation size ({0}) and number of observations written ({1})", vecObsSizeT, tensorOffset ); } agentIndex++; } }
public void Generate(TensorProxy tensorProxy, int batchSize, IList <AgentInfoSensorsPair> infos) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); var memorySize = (int)tensorProxy.shape[tensorProxy.shape.Length - 1]; var agentIndex = 0; for (var infoIndex = 0; infoIndex < infos.Count; infoIndex++) { var infoSensorPair = infos[infoIndex]; var info = infoSensorPair.agentInfo; var offset = memorySize * m_MemoryIndex; List <float> memory; if (info.done) { m_Memories.Remove(info.episodeId); } if (!m_Memories.TryGetValue(info.episodeId, out memory)) { for (var j = 0; j < memorySize; j++) { tensorProxy.data[agentIndex, j] = 0; } agentIndex++; continue; } for (var j = 0; j < memorySize; j++) { if (j >= memory.Count) { break; } tensorProxy.data[agentIndex, j] = memory[j + offset]; } agentIndex++; } }
public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); var actionSize = tensorProxy.shape[tensorProxy.shape.Length - 1]; var agentIndex = 0; foreach (var infoSensorPair in infos) { var info = infoSensorPair.agentInfo; var pastAction = info.storedVectorActions.DiscreteActions; if (!pastAction.IsEmpty()) { for (var j = 0; j < actionSize; j++) { tensorProxy.data[agentIndex, j] = pastAction[j]; } } agentIndex++; } }
public void Generate( TensorProxy tensorProxy, int batchSize, IList <AgentInfoSensorsPair> infos) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); var memorySize = tensorProxy.data.width; var agentIndex = 0; for (var infoIndex = 0; infoIndex < infos.Count; infoIndex++) { var infoSensorPair = infos[infoIndex]; var info = infoSensorPair.agentInfo; List <float> memory; if (info.done) { m_Memories.Remove(info.episodeId); } if (!m_Memories.TryGetValue(info.episodeId, out memory)) { for (var j = 0; j < memorySize; j++) { tensorProxy.data[agentIndex, 0, j, 0] = 0; } agentIndex++; continue; } for (var j = 0; j < Math.Min(memorySize, memory.Count); j++) { if (j >= memory.Count) { break; } tensorProxy.data[agentIndex, 0, j, 0] = memory[j]; } agentIndex++; } }
public void Generate(TensorProxy tensorProxy, int batchSize, IList <AgentInfoSensorsPair> infos) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); var actionSize = tensorProxy.shape[tensorProxy.shape.Length - 1]; var agentIndex = 0; for (var infoIndex = 0; infoIndex < infos.Count; infoIndex++) { var infoSensorPair = infos[infoIndex]; var info = infoSensorPair.agentInfo; var pastAction = info.storedVectorActions; if (pastAction != null) { for (var j = 0; j < actionSize; j++) { tensorProxy.data[agentIndex, j] = pastAction[j]; } } agentIndex++; } }
public void Generate(TensorProxy tensorProxy, int batchSize, IList <AgentInfoSensorsPair> infos) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); TensorUtils.FillTensorWithRandomNormal(tensorProxy, m_RandomNormal); }
public void Generate(TensorProxy tensorProxy, int batchSize, IList <AgentInfoSensorsPair> infos) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); }