コード例 #1
0
        /// <summary>
        /// Generates the Tensor inputs that are expected to be present in the Model.
        /// </summary>
        /// <param name="model">
        /// The Barracuda engine model for loading static parameters.
        /// </param>
        /// <returns>TensorProxy IEnumerable with the expected Tensor inputs.</returns>
        public static IReadOnlyList <TensorProxy> GetInputTensors(Model model)
        {
            var tensors = new List <TensorProxy>();

            if (model == null)
            {
                return(tensors);
            }

            foreach (var input in model.inputs)
            {
                tensors.Add(new TensorProxy
                {
                    name      = input.name,
                    valueType = TensorProxy.TensorType.FloatingPoint,
                    data      = null,
                    shape     = input.shape.Select(i => (long)i).ToArray()
                });
            }

            foreach (var mem in model.memories)
            {
                tensors.Add(new TensorProxy
                {
                    name      = mem.input,
                    valueType = TensorProxy.TensorType.FloatingPoint,
                    data      = null,
                    shape     = TensorUtils.TensorShapeFromBarracuda(mem.shape)
                });
            }

            tensors.Sort((el1, el2) => el1.name.CompareTo(el2.name));

            return(tensors);
        }
コード例 #2
0
        List <TensorProxy> FetchBarracudaOutputs(string[] names)
        {
            var outputs = new List <TensorProxy>();

            foreach (var n in names)
            {
                var output = m_Engine.PeekOutput(n);
                outputs.Add(TensorUtils.TensorProxyFromBarracuda(output, n));
            }

            return(outputs);
        }
コード例 #3
0
        public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);

            var maskSize   = tensorProxy.shape[tensorProxy.shape.Length - 1];
            var agentIndex = 0;

            foreach (var infoSensorPair in infos)
            {
                var agentInfo = infoSensorPair.agentInfo;
                var maskList  = agentInfo.discreteActionMasks;
                for (var j = 0; j < maskSize; j++)
                {
                    var isUnmasked = (maskList != null && maskList[j]) ? 0.0f : 1.0f;
                    tensorProxy.data[agentIndex, j] = isUnmasked;
                }
                agentIndex++;
            }
        }
コード例 #4
0
        public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);

            var actionSize = tensorProxy.shape[tensorProxy.shape.Length - 1];
            var agentIndex = 0;

            foreach (var infoSensorPair in infos)
            {
                var info       = infoSensorPair.agentInfo;
                var pastAction = info.storedVectorActions;
                if (pastAction != null)
                {
                    for (var j = 0; j < actionSize; j++)
                    {
                        tensorProxy.data[agentIndex, j] = pastAction[j];
                    }
                }

                agentIndex++;
            }
        }
コード例 #5
0
        public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
            var agentIndex = 0;

            foreach (var infoSensorPair in infos)
            {
                var sensor = infoSensorPair.sensors[m_SensorIndex];
                if (infoSensorPair.agentInfo.done)
                {
                    // If the agent is done, we might have a stale reference to the sensors
                    // e.g. a dependent object might have been disposed.
                    // To avoid this, just fill observation with zeroes instead of calling sensor.Write.
                    TensorUtils.FillTensorBatch(tensorProxy, agentIndex, 0.0f);
                }
                else
                {
                    m_WriteAdapter.SetTarget(tensorProxy, agentIndex, 0);
                    sensor.Write(m_WriteAdapter);
                }
                agentIndex++;
            }
        }
コード例 #6
0
        public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);

            var memorySize = (int)tensorProxy.shape[tensorProxy.shape.Length - 1];
            var agentIndex = 0;

            foreach (var infoSensorPair in infos)
            {
                var          info   = infoSensorPair.agentInfo;
                var          offset = memorySize * m_MemoryIndex;
                List <float> memory;
                if (info.done)
                {
                    m_Memories.Remove(info.episodeId);
                }
                if (!m_Memories.TryGetValue(info.episodeId, out memory))
                {
                    for (var j = 0; j < memorySize; j++)
                    {
                        tensorProxy.data[agentIndex, j] = 0;
                    }
                    agentIndex++;
                    continue;
                }
                for (var j = 0; j < memorySize; j++)
                {
                    if (j >= memory.Count)
                    {
                        break;
                    }

                    tensorProxy.data[agentIndex, j] = memory[j + offset];
                }
                agentIndex++;
            }
        }
コード例 #7
0
        public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
        {
            TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
            var vecObsSizeT = tensorProxy.shape[tensorProxy.shape.Length - 1];
            var agentIndex  = 0;

            foreach (var info in infos)
            {
                if (info.agentInfo.done)
                {
                    // If the agent is done, we might have a stale reference to the sensors
                    // e.g. a dependent object might have been disposed.
                    // To avoid this, just fill observation with zeroes instead of calling sensor.Write.
                    TensorUtils.FillTensorBatch(tensorProxy, agentIndex, 0.0f);
                }
                else
                {
                    var tensorOffset = 0;
                    // Write each sensor consecutively to the tensor
                    foreach (var sensorIndex in m_SensorIndices)
                    {
                        var sensor = info.sensors[sensorIndex];
                        m_WriteAdapter.SetTarget(tensorProxy, agentIndex, tensorOffset);
                        var numWritten = sensor.Write(m_WriteAdapter);
                        tensorOffset += numWritten;
                    }
                    Debug.AssertFormat(
                        tensorOffset == vecObsSizeT,
                        "mismatch between vector observation size ({0}) and number of observations written ({1})",
                        vecObsSizeT, tensorOffset
                        );
                }

                agentIndex++;
            }
        }
コード例 #8
0
 public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
 {
     TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
     TensorUtils.FillTensorWithRandomNormal(tensorProxy, m_RandomNormal);
 }
コード例 #9
0
 public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable <AgentInfoSensorsPair> infos)
 {
     TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
 }